Skip to content

Commit 81d316f

Browse files
committed
Let's get started
0 parents  commit 81d316f

File tree

11 files changed

+240
-0
lines changed

11 files changed

+240
-0
lines changed

.buildkite/pipeline.yml

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
steps:
2+
- label: ":shell: Lint (Shellcheck)"
3+
plugins:
4+
shellcheck#v1.1.2:
5+
files: hooks/**
6+
7+
- label: ":sparkles: Lint (Buildkite Plugin Linter)"
8+
plugins:
9+
plugin-linter#v2.0.0:
10+
id: automattic/bash-cache
11+
12+
- label: ":docker: Test"
13+
plugins:
14+
docker-compose#v3.7.0:
15+
run: tests

README.md

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# Bash Cache Buildkite Plugin
2+
3+
A caching plugin that can be invoked from your build script
4+
5+
## Example
6+
7+
For a directory structure that looks like:
8+
9+
```
10+
my-project/
11+
├── node_modules/
12+
├── package.json
13+
├── package-lock.json
14+
15+
```
16+
17+
Add the following to your `pipeline.yml`:
18+
19+
20+
```yml
21+
steps:
22+
- command: |
23+
# To persist the cache
24+
save_cache node_modules/ $(hash_file package-lock.json)
25+
26+
# To restore the cache, if present
27+
restore_cache $(hash_file package-lock.json)
28+
29+
plugins:
30+
- automattic/bash-cache#v1.0.0
31+
```
32+
33+
## Configuration
34+
35+
There are no configuration options for this plugin
36+
37+
## Developing
38+
39+
To run the linter and tests:
40+
41+
```shell
42+
docker-compose run --rm lint
43+
docker-compose run --rm tests
44+
```
45+
46+
## Contributing
47+
48+
1. Fork the repo
49+
2. Make the changes
50+
3. Run the tests
51+
4. Commit and push your changes
52+
5. Send a pull request

bin/hash_directory

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
DIRECTORY_PATH=$1
6+
7+
if [ -z "$1" ]; then
8+
echo "You must pass a directory name to hash"
9+
return 1
10+
fi
11+
12+
# - Find all files in the given directory
13+
# - Run `sha256sum` on each file found – the `+` flag does it in parallel for a huge speed boost.
14+
# - Sort the files by filename for deterministic hashing
15+
# - Take the hash of all of the output hashes (and file paths)
16+
find "$DIRECTORY_PATH" -type f -exec shasum -a 256 "{}" \+ | sort -k 2 | shasum -a 256 | cut -f1 -d " "

bin/hash_file

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
if [ -z "$1" ]; then
6+
echo "You must pass a filename to hash"
7+
return 1
8+
fi
9+
10+
shasum -a 256 "$1" | cut -f1 -d " "

bin/restore_cache

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
CACHE_KEY=$1
6+
7+
if [ -z "$CACHE_BUCKET_NAME" ]; then
8+
echo "⛔Unable to save file to cache – no \$CACHE_BUCKET_NAME is set"
9+
return 1
10+
fi
11+
12+
if aws s3api head-object --bucket "$CACHE_BUCKET_NAME" --key "$CACHE_KEY" > /dev/null 2>&1; then
13+
echo "Restoring cache entry $CACHE_KEY"
14+
15+
echo " Downloading"
16+
aws s3 cp "s3://a8c-ci-cache/$CACHE_KEY" "$CACHE_KEY" --quiet
17+
18+
echo " Decompressing"
19+
tar -xf "$CACHE_KEY"
20+
21+
echo " Cleaning Up"
22+
rm "$CACHE_KEY"
23+
else
24+
echo "No cache entry found"
25+
fi

bin/save_cache

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
CACHE_FILE=$1
6+
CACHE_KEY=$2
7+
8+
if [ -z "$CACHE_FILE" ]; then
9+
echo "You must pass the file or directory you want to be cached"
10+
return 1
11+
fi
12+
13+
# We can automatically derive a cache key if one isn't provided
14+
if [ -z "$CACHE_KEY" ]; then
15+
echo "No cache key provided – automatically deriving one:"
16+
17+
# if the $CACHE_FILE is a directory, derived the key from the hash of all files within it
18+
if [[ -d $CACHE_FILE ]]; then
19+
CACHE_KEY=$(hash_directory "$CACHE_FILE")
20+
echo " '$CACHE_FILE' is a directory with the hash $CACHE_KEY"
21+
22+
# if the $CACHE_FILE is a regular file, derive the key from the file's hash
23+
elif [[ -f $CACHE_FILE ]]; then
24+
CACHE_KEY=$(hash_file "$CACHE_FILE")
25+
echo " '$CACHE_FILE' is a file with the hash $CACHE_KEY"
26+
fi
27+
fi
28+
29+
if [ -z "$CACHE_BUCKET_NAME" ]; then
30+
if [ -z "$BUILDKITE_PLUGIN_BASH_CACHE_BUCKET" ]; then
31+
printenv | grep buildkite
32+
echo "Reading bucket name from 'BUILDKITE_PLUGIN_BASH_CACHE_BUCKET'"
33+
CACHE_BUCKET_NAME="$BUILDKITE_PLUGIN_BASH_CACHE_BUCKET"
34+
else
35+
echo "⛔Unable to save file to cache – no \$CACHE_BUCKET_NAME is set"
36+
return 1
37+
fi
38+
fi
39+
40+
if ! aws s3api head-object --bucket "$CACHE_BUCKET_NAME" --key "$CACHE_KEY" > /dev/null 2>&1; then
41+
echo "No existing cache entry for $CACHE_KEY – storing in cache"
42+
43+
echo " Compressing"
44+
tar -czf "$CACHE_KEY" "$CACHE_FILE"
45+
46+
echo " Uploading"
47+
aws s3 cp "$CACHE_KEY" "s3://$CACHE_BUCKET_NAME/$CACHE_KEY" --quiet
48+
49+
echo " Cleaning Up"
50+
rm "$CACHE_KEY"
51+
else
52+
echo "This file is already cached – skipping upload"
53+
fi

docker-compose.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
services:
2+
tests:
3+
image: buildkite/plugin-tester
4+
volumes:
5+
- ".:/plugin:ro"
6+
lint:
7+
image: buildkite/plugin-linter
8+
command: ['--id', 'automattic/bash-cache']
9+
volumes:
10+
- ".:/plugin:ro"

hooks/environment

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/bin/bash
2+
3+
# The `pre-command` hook will run just before your build command runs
4+
5+
# Note that as the script is sourced not run directly, the shebang line will be ignored
6+
# See https://buildkite.com/docs/agent/v3/hooks#creating-hook-scripts
7+
8+
set -e
9+
10+
HOOKS_ROOT=$( dirname "${BASH_SOURCE[0]}" )
11+
PLUGIN_ROOT=$( dirname "$HOOKS_ROOT" )
12+
PLUGIN_BIN="$PLUGIN_ROOT/bin"
13+
14+
export PATH="$PATH:$PLUGIN_BIN"
15+
16+
echo "~~~ :file_cabinet: Loaded Caching Plugin"

plugin.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
name: Bash Cache
2+
description: Caching operations inside your build script
3+
author: https://github.com/automattic
4+
requirements: ['awscli']
5+
configuration:
6+
properties:
7+
bucket:
8+
type: string
9+
additionalProperties: false

test.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
#!/bin/bash
2+
3+
docker-compose run --rm lint
4+
docker-compose run --rm tests
5+
6+
# Test hooks
7+
shellcheck hooks/*

0 commit comments

Comments
 (0)