Merge pull request #4 from 18F/pass_options

Accept options for S3 sync
This commit is contained in:
Aidan Feldman 2016-03-23 12:21:35 -04:00
commit 252f337121
5 changed files with 50 additions and 29 deletions

View file

@ -1,6 +1,6 @@
# Simple S3 Resource for [Concourse CI](http://concourse.ci) # Simple S3 Resource for [Concourse CI](http://concourse.ci)
Resource to upload files to S3. Unlike the [the official S3 Resource](https://github.com/concourse/s3-resource), this Resource doesn't care about files being versioned. Resource to upload files to S3. Unlike the [the official S3 Resource](https://github.com/concourse/s3-resource), this Resource can upload or download multiple files.
## Usage ## Usage
@ -19,6 +19,7 @@ resources:
access_key_id: {{aws-access-key}} access_key_id: {{aws-access-key}}
secret_access_key: {{aws-secret-key}} secret_access_key: {{aws-secret-key}}
bucket: {{aws-bucket}} bucket: {{aws-bucket}}
options: [<optional, see note below>]
jobs: jobs:
- name: <job name> - name: <job name>
plan: plan:
@ -26,14 +27,44 @@ jobs:
- put: <resource name> - put: <resource name>
``` ```
See [the instructions for getting your AWS credentials](http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-set-up.html#cli-signup), then pass them and the bucket name in as [parameters](http://concourse.ci/fly-set-pipeline.html#section_parameters). ### Options
The `options` parameter is synonymous with the options that `aws cli` accepts for `sync`. Please see [S3 Sync Options](http://docs.aws.amazon.com/cli/latest/reference/s3/sync.html#options) and pay special attention to the [Use of Exclude and Include Filters](http://docs.aws.amazon.com/cli/latest/reference/s3/index.html#use-of-exclude-and-include-filters).
Given the following directory `test` we can upload _only_ the `results` subdirectory by using the `options` JSON below.
```
test
├── results
│   ├── 1.json
│   └── 2.json
└── scripts
└── bad.sh
```
```
options:
- "--exclude: '*'",
- "--include: 'results/*'"
```
## Development ## Development
Requires [Docker](https://www.docker.com/). Requires [Docker](https://www.docker.com/).
### Building, Uploading, and Using the Docker Image
1. Download [Docker Toolbox](https://www.docker.com/products/docker-toolbox).
1. Get a [Docker Hub](https://www.dockerhub.com) account
1. Launch the Docker Terminal and `cd` to this directory.
1. `docker login -e <email> -p <password> -u <username>`
1. `docker build -t <username>/s3-resource-simple .`
1. verify with `docker images`
1. `docker push <username>/s3-resource-simple`
1. Now you can test your local Concourse pipelines using <username>/s3-resource-simple.
### Tests
1. Run `cp config.example.json config.json`. 1. Run `cp config.example.json config.json`.
1. Modify `config.json`. 1. Modify `config.json`.
* See [the instructions for getting your AWS credentials](http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-set-up.html#cli-signup). * See [the instructions for getting your AWS credentials](http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-set-up.html#cli-signup).
* Exclude the `s3://` prefix/protocol for the `bucket`. * Exclude the `s3://` prefix/protocol for `bucket`.
1. Run `./test/out </full/path/to/dir/or/file>`.
1. Run `./test/out </full/path/to/dir>`.

View file

@ -1,8 +1,6 @@
#!/bin/sh #!/bin/sh
# http://concourse.ci/implementing-resources.html#in:-fetch-a-given-resource. # Resource Impl: http://concourse.ci/implementing-resources.html#in:-fetch-a-given-resource
# https://github.com/concourse/git-resource/blob/6fcfbd4/assets/out#L4-L16
set -e set -e
exec 3>&1 # make stdout available as fd 3 for the result exec 3>&1 # make stdout available as fd 3 for the result
@ -19,17 +17,14 @@ fi
# parse incoming config data # parse incoming config data
payload=`cat` payload=`cat`
bucket=$(echo "$payload" | jq -r '.source.bucket') bucket=$(echo "$payload" | jq -r '.source.bucket')
options=$(echo "$payload" | jq -r '.source.options // [] | join(" ")')
# export for `aws` cli
echo "Downloading from S3..."
# http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html#cli-environment
export AWS_ACCESS_KEY_ID=$(echo "$payload" | jq -r '.source.access_key_id') export AWS_ACCESS_KEY_ID=$(echo "$payload" | jq -r '.source.access_key_id')
export AWS_SECRET_ACCESS_KEY=$(echo "$payload" | jq -r '.source.secret_access_key') export AWS_SECRET_ACCESS_KEY=$(echo "$payload" | jq -r '.source.secret_access_key')
aws s3 sync "s3://$bucket" $dest echo "Downloading from S3..."
eval aws s3 sync "s3://$bucket" $dest $options
echo "...done." echo "...done."
source "$(dirname $0)/emit.sh" >&3 source "$(dirname $0)/emit.sh" >&3

View file

@ -1,8 +1,6 @@
#!/bin/sh #!/bin/sh
# http://concourse.ci/implementing-resources.html#out:-update-a-resource. # Resource Impl: http://concourse.ci/implementing-resources.html#out:-update-a-resource.
# https://github.com/concourse/git-resource/blob/6fcfbd4/assets/out#L4-L16
set -e set -e
exec 3>&1 # make stdout available as fd 3 for the result exec 3>&1 # make stdout available as fd 3 for the result
@ -11,26 +9,22 @@ exec 1>&2 # redirect all output to stderr for logging
source=$1 source=$1
if [ -z "$source" ]; then if [ -z "$source" ]; then
echo "usage: $0 <path/to/volume>" echo "usage: $0 </full/path/to/dir>"
exit 1 exit 1
fi fi
####################################### #######################################
# parse incoming config data # parse incoming config data
payload=`cat` payload=`cat`
bucket=$(echo "$payload" | jq -r '.source.bucket') bucket=$(echo "$payload" | jq -r '.source.bucket')
options=$(echo "$payload" | jq -r '.source.options // [] | join(" ")')
# export for `aws` cli
echo "Uploading to S3..."
# http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html#cli-environment
export AWS_ACCESS_KEY_ID=$(echo "$payload" | jq -r '.source.access_key_id') export AWS_ACCESS_KEY_ID=$(echo "$payload" | jq -r '.source.access_key_id')
export AWS_SECRET_ACCESS_KEY=$(echo "$payload" | jq -r '.source.secret_access_key') export AWS_SECRET_ACCESS_KEY=$(echo "$payload" | jq -r '.source.secret_access_key')
aws s3 sync $source "s3://$bucket" echo "Uploading to S3..."
eval aws s3 sync $source "s3://$bucket" $options
echo "...done." echo "...done."
source "$(dirname $0)/emit.sh" >&3 source "$(dirname $0)/emit.sh" >&3

View file

@ -2,6 +2,7 @@
"source": { "source": {
"access_key_id": "", "access_key_id": "",
"secret_access_key": "", "secret_access_key": "",
"bucket": "" "bucket": "",
"options": []
} }
} }

View file

@ -5,7 +5,7 @@ set -e
source=$1 source=$1
if [ -z "$source" ]; then if [ -z "$source" ]; then
echo "usage: $0 </full/path/to/dir/or/file>" echo "usage: $0 </full/path/to/dir>"
exit 1 exit 1
fi fi