-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathbuild.yml
130 lines (118 loc) · 4.55 KB
/
build.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Build
on:
workflow_call:
inputs:
app_name:
required: true
description: "Name of the app, unique for the repo, kebab-cased"
type: string
bucket_name:
required: true
description: "Name of the S3 registry bucket"
type: string
custom_hash:
required: false
description: "Custom hash used to cache the action on successful build"
type: string
build_dir:
required: true
description: "Location of the deploy bundle after running the build command"
type: string
build_cmd:
required: true
description: "Command for building the deploy bundle"
type: string
pnpm_version:
required: false
default: 0
description: "pnpm version to install. If ommitted, the project needs to have the version specified in the 'packageManager' field in its package.json"
type: number
registry_scope:
required: false
default: "@pleo-io"
description: "Org scope for the GitHub Package Registry"
type: string
runner:
required: false
default: ubuntu-latest-8-cores
description: "Use a specific size of job runner to run this workflow"
type: string
turbo_cache:
required: false
description: "Use Turborepo Remote Cache"
type: boolean
turbo_force:
required: false
description: "Skip using Turborepo Remote Cache when build task (still writes task output to cache)"
type: boolean
outputs:
build_hash:
description: "Hash of the code built"
value: ${{ jobs.build.outputs.build_hash }}
bundle_uri:
description: "S3 URI of the bundle in the registry bucket"
value: ${{ jobs.build.outputs.bundle_uri }}
jobs:
build:
name: Build & Upload
runs-on: ${{ inputs.runner }}
outputs:
build_hash: ${{ steps.s3-cache.outputs.hash }}
bundle_uri: ${{ steps.bundle-uri.outputs.uri }}
steps:
- uses: actions/[email protected]
- name: Check S3 Cache
uses: pleo-io/[email protected]
id: s3-cache
with:
bucket-name: ${{ inputs.bucket_name }}
key-prefix: build/${{ inputs.app_name }}
custom-hash: ${{ inputs.custom_hash }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_FRONTEND_REGISTRY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_FRONTEND_REGISTRY }}
aws-region: eu-west-1
- uses: pnpm/[email protected]
if: steps.s3-cache.outputs.processed == 'false'
with:
version: ${{ inputs.pnpm_version != 0 && inputs.pnpm_version || null }}
- uses: actions/[email protected]
if: steps.s3-cache.outputs.processed == 'false'
with:
node-version: "20"
registry-url: "https://npm.pkg.github.com"
scope: ${{ inputs.registry_scope }}
- name: Cache node_modules/.pnpm
if: steps.s3-cache.outputs.processed == 'false'
uses: actions/[email protected]
with:
path: node_modules/.pnpm
key: pnpm-node-modules-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: |
pnpm-node-modules-
- name: Install Dependencies
if: steps.s3-cache.outputs.processed == 'false'
run: pnpm install --frozen-lockfile --ignore-scripts
env:
NODE_AUTH_TOKEN: ${{ secrets.GH_REGISTRY_NPM_TOKEN }}
- name: Build
if: steps.s3-cache.outputs.processed == 'false'
run: ${{ inputs.build_cmd }}
env:
NOTION_API_KEY: ${{ secrets.NOTION_API_KEY }}
TURBO_TOKEN: ${{ inputs.turbo_cache && secrets.TURBO_TOKEN || null }}
TURBO_FORCE: ${{ inputs.turbo_force }}
- name: Get Bundle S3 URI
id: bundle-uri
run: |
echo "uri=s3://${{ inputs.bucket_name }}/bundles/${{ github.repository }}/${{ inputs.app_name }}/${{ steps.s3-cache.outputs.hash }}.tar.gz" >> $GITHUB_OUTPUT
- uses: aws-actions/[email protected]
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_FRONTEND_REGISTRY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_FRONTEND_REGISTRY }}
aws-region: eu-west-1
- name: Compress & Upload Bundle
if: steps.s3-cache.outputs.processed == 'false'
run: |
tar -zcvf bundle.tar.gz -C ${{ inputs.build_dir }} .
aws s3 cp bundle.tar.gz ${{ steps.bundle-uri.outputs.uri }}