File tree Expand file tree Collapse file tree 6 files changed +73
-2
lines changed Expand file tree Collapse file tree 6 files changed +73
-2
lines changed Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+
3+ set -e
4+
5+ # Follow the typical pod installation process...
6+ install_cocoapods
7+
8+ # ...then overwrite the cache for future jobs to use
9+ save_cache ~ /.cocoapods " $BUILDKITE_PIPELINE_SLUG -specs-repos" --force
10+ save_cache ~ /Library/Caches/CocoaPods/ " $BUILDKITE_PIPELINE_SLUG -global-pod-cache" --force
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+
3+ set -e
4+
5+ AWS_BUCKET=$1
6+
7+ DATE=$( date +" %Y-%m-%d" )
8+
9+ mkdir -p " /tmp/$DATE "
10+
11+ # Download the repo
12+ git clone --mirror $BUILDKITE_REPO " /tmp/$DATE /$BUILDKITE_PIPELINE_SLUG .git"
13+
14+ # Create the tarball
15+ TAR_NAME=$DATE .git.tar
16+ tar -C " /tmp/$DATE " -cvf $TAR_NAME $BUILDKITE_PIPELINE_SLUG .git
17+
18+ # Copy the file to S3
19+ aws s3 cp $TAR_NAME s3://$AWS_BUCKET /$BUILDKITE_ORGANIZATION_SLUG /$BUILDKITE_PIPELINE_SLUG /$TAR_NAME
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+
3+ set -e
4+
5+ # Start by restoring specs repo and pod cache:
6+ # - The specs repo cache holds all of the Podspec files. This avoids having download them all from the CDN.
7+ # - The pod cache holds the downloaded Pod source files. This avoids having to check them out again.
8+ restore_cache " $BUILDKITE_PIPELINE_SLUG -specs-repos"
9+ restore_cache " $BUILDKITE_PIPELINE_SLUG -global-pod-cache"
10+
11+ # Restore the local `Pods` directory based on the `Podfile.lock` contents
12+ restore_cache " $( hash_file Podfile.lock) "
13+
14+ # If the `pod check` plugin is installed, use it to determine whether or not to install Pods at all
15+ # If it's not installed (or if it fails), we'll try to install Pods.
16+ # If that fails, it may be due to an out-of-date repo. We can use `--repo-update` to try to resolve this automatically.
17+ if [ $( bundle exec pod plugins installed | grep check) ]; then
18+ bundle exec pod check || bundle exec pod install || bundle exec pod install --repo-update --verbose
19+ else
20+ bundle exec pod install || bundle exec pod install --repo-update --verbose
21+ fi
22+
23+ # If this is the first time we've seen this particular hash of `Podfile.lock`, create a cache entry for future use
24+ save_cache Pods " $( hash_file Podfile.lock) "
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+
3+ set -e
4+
5+ ARCHITECTURE=$( uname -m)
6+ CACHEKEY=" $BUILDKITE_PIPELINE_SLUG -$ARCHITECTURE -ruby$( cat .ruby-version) -$( hash_file Gemfile.lock) "
7+
8+ restore_cache $CACHEKEY
9+ bundle install
10+
11+ # If this is the first time we've seen this particular cache key, save it for the future
12+ save_cache vendor/bundle $CACHEKEY
Original file line number Diff line number Diff line change @@ -6,7 +6,7 @@ CACHE_KEY=$1
66
77if [ -z " $CACHE_BUCKET_NAME " ]; then
88 echo " ⛔Unable to save file to cache – no \$ CACHE_BUCKET_NAME is set"
9- return 1
9+ exit 0
1010fi
1111
1212if aws s3api head-object --bucket " $CACHE_BUCKET_NAME " --key " $CACHE_KEY " > /dev/null 2>&1 ; then
@@ -21,5 +21,5 @@ if aws s3api head-object --bucket "$CACHE_BUCKET_NAME" --key "$CACHE_KEY" > /dev
2121 echo " Cleaning Up"
2222 rm " $CACHE_KEY "
2323else
24- echo " No cache entry found"
24+ echo " No cache entry found for ' $CACHE_KEY ' "
2525fi
Original file line number Diff line number Diff line change @@ -37,6 +37,12 @@ if [ -z "$CACHE_BUCKET_NAME" ]; then
3737 fi
3838fi
3939
40+ # Use with caution – in general it's not a good idea to overwrite a cache entry
41+ if [[ $3 == ' --force' ]]; then
42+ echo " Deleting the existing cache key"
43+ aws s3 rm " s3://a8c-ci-cache/$CACHE_KEY "
44+ fi
45+
4046if ! aws s3api head-object --bucket " $CACHE_BUCKET_NAME " --key " $CACHE_KEY " > /dev/null 2>&1 ; then
4147 echo " No existing cache entry for $CACHE_KEY – storing in cache"
4248
You can’t perform that action at this time.
0 commit comments