diff --git a/Dockerfile b/Dockerfile index 40a94f4..bc085b1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,9 @@ -FROM alpine:3.3 +FROM alpine:3.7 RUN apk --no-cache add \ py-pip \ - python &&\ + python \ + curl &&\ pip install --upgrade \ pip \ awscli diff --git a/README.md b/README.md index 7f5980a..8213699 100644 --- a/README.md +++ b/README.md @@ -1,26 +1,24 @@ -[![](https://images.microbadger.com/badges/image/futurevision/aws-s3-sync.svg)](https://microbadger.com/images/futurevision/aws-s3-sync "Get your own image badge on microbadger.com") -[![](https://images.microbadger.com/badges/version/futurevision/aws-s3-sync.svg)](https://microbadger.com/images/futurevision/aws-s3-sync "Get your own version badge on microbadger.com") - -# futurevision/aws-s3-sync +# docker-aws-s3-sync Docker container that periodically syncs a folder to Amazon S3 using the [AWS Command Line Interface tool](https://aws.amazon.com/cli/) and cron. +Can be used with AWS Credentials and ECS Container Role + ## Usage docker run -d [OPTIONS] futurevision/aws-s3-sync - ### Required Parameters: -* `-e KEY=`: User Access Key -* `-e SECRET=`: User Access Secret -* `-e REGION=`: Region of your bucket -* `-e BUCKET=`: The name of your bucket +* `-e BUCKET=`: The name of your bucket, ex. dev-efs-8xabdop9fqb1 * `-v /path/to/backup:/data:ro`: mount target local folder to container's data folder. Content of this folder will be synced with S3 bucket. ### Optional parameters: +* `-e KEY=`: User Access Key, if not using a container role +* `-e SECRET=`: User Access Secret,if not using a container role +* `-e REGION=`: Region of your bucket, if not using a container role * `-e PARAMS=`: parameters to pass to the sync command ([full list here](http://docs.aws.amazon.com/cli/latest/reference/s3/sync.html)). * `-e BUCKET_PATH=`: The path of your s3 bucket where the files should be synced to (must start with a slash), defaults to "/" to sync to bucket root * `-e CRON_SCHEDULE="0 1 * * *"`: specifies when cron job starts ([details](http://en.wikipedia.org/wiki/Cron)), defaults to `0 1 * * *` (runs every night at 1:00). @@ -28,30 +26,75 @@ Docker container that periodically syncs a folder to Amazon S3 using the [AWS Co ## Examples: +### Once a hour Sync every hour with cron schedule (container keeps running): docker run -d \ - -e KEY=mykey \ - -e SECRET=mysecret \ - -e REGION=region \ - -e BUCKET=mybucket \ - -e CRON_SCHEDULE="0 * * * *" \ - -e BUCKET_PATH=/path \ - -v /home/user/data:/data:ro \ - futurevision/aws-s3-sync - + -e KEY=mykey \ + -e SECRET=mysecret \ + -e REGION=eu-central-1 \ + -e BUCKET=dev-efs-8xabdop9fqb1 \ + -e CRON_SCHEDULE="0 * * * *" \ + -e BUCKET_PATH=/path \ + -v /home/user/data:/data:ro \ + futurevision/aws-s3-sync + +### Only once Sync just once (container is deleted afterwards): docker run --rm \ - -e KEY=mykey \ - -e SECRET=mysecret \ - -e REGION=region \ - -e BUCKET=mybucket \ - -v /home/user/data:/data:ro \ - futurevision/aws-s3-sync no-cron + -e KEY=mykey \ + -e SECRET=mysecret \ + -e REGION=eu-central-1 \ + -e BUCKET=dev-efs-8xabdop9fqb1 \ + -v /home/user/data:/data:ro \ + futurevision/aws-s3-sync no-cron + +### AWS Role + + docker run -d \ + -e BUCKET=dev-efs-8xabdop9fqb1 \ + -e CRON_SCHEDULE="0 * * * *" \ + -v /home/user/data:/data:ro \ + futurevision/aws-s3-sync + +If using AWS Role based, make sure to set Network Mode to host. + +#### AWS Policy role + + { + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "s3:ListBucket", + "s3:GetBucketLocation" + ], + "Resource": "arn:aws:s3:::dev-efs-8xabdop9fqb1", + "Effect": "Allow" + }, + { + "Action": [ + "s3:PutObject", + "s3:PutObjectAcl", + "s3:PutObjectTagging", + "s3:Get*", + "s3:DeleteObject", + "s3:DeleteObjectTagging" + ], + "Resource": "arn:aws:s3:::dev-efs-8xabdop9fqb1/*", + "Effect": "Allow" + } + ] + } + +More information about TaskDefinition roles can be seen in the [AWS Developer guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/create-task-definition.html) under Using the EC2 launch type compatibility template. ## Credits This container is heavily inspired by [istepanov/backup-to-s3](https://github.com/istepanov/docker-backup-to-s3/blob/master/README.md). -The main difference is that this container is using Alpine Linux instead of Debian to be more light weight. It also uses a different method of using the AWS CLI tool. +Key differences are; +- The container is using Alpine Linux instead of Debian to be more light weight. +- It uses different methods of the AWS CLI tool. +- Supports AWS Role rather then AWS Credentials. diff --git a/start.sh b/start.sh index 115733f..b3ce29c 100644 --- a/start.sh +++ b/start.sh @@ -2,9 +2,15 @@ set -e -export AWS_ACCESS_KEY_ID=$KEY -export AWS_SECRET_ACCESS_KEY=$SECRET -export AWS_DEFAULT_REGION=$REGION +if [[ -z "$KEY" ]] || [[ -z "$SECRET" ]] || [[ -z "$REGION" ]]; then + echo "Set AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_DEFAULT_REGION" + + export AWS_ACCESS_KEY_ID=$KEY + export AWS_SECRET_ACCESS_KEY=$SECRET + export AWS_DEFAULT_REGION=$REGION +else + echo "Missing one or more ENV variables, using container role" +fi if [[ "$1" == 'now' ]]; then exec /sync.sh diff --git a/sync.sh b/sync.sh index 12bee6d..3a5290f 100644 --- a/sync.sh +++ b/sync.sh @@ -6,4 +6,4 @@ echo "$(date) - Start" aws s3 sync /data s3://$BUCKET$BUCKET_PATH $PARAMS -echo "$(date) End" +echo "$(date) - End"