Merge pull request #257 from datacoves/release/20251205 #24
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: "30: 🚀 Deploy to Production" | |
| on: # yamllint disable-line rule:truthy | |
| push: | |
| branches: | |
| - main | |
| paths: | |
| - .github/workflows/**/* | |
| - automate/**/* | |
| - transform/**/* | |
| # Allows you to run this workflow manually from the Actions tab | |
| workflow_dispatch: | |
| jobs: | |
| # Perform the deployment to Prod | |
| build: | |
| name: Deployment Script | |
| runs-on: ubuntu-latest | |
| # Set environment variables in | |
| # https://github.com//<your org>/<your repo>/settings/variables/actions | |
| # | |
| # Alternatively, You can define multiple ENV for different workflows. | |
| # https://github.com/<org>/<repo>/settings/environments | |
| # environment: PR_ENV | |
| container: datacoves/ci-basic-dbt-snowflake:4.1 | |
| defaults: | |
| run: | |
| working-directory: /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/transform | |
| env: | |
| DBT_PROFILES_DIR: /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/automate/dbt | |
| DATACOVES__DBT_HOME: /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/transform | |
| DATACOVES__MAIN__ACCOUNT: ${{ vars.DATACOVES__MAIN__ACCOUNT }} | |
| DATACOVES__MAIN__DATABASE: ${{ vars.DATACOVES__MAIN__DATABASE }} | |
| DATACOVES__MAIN__SCHEMA: ${{ vars.DATACOVES__MAIN__SCHEMA }} | |
| DATACOVES__MAIN__ROLE: ${{ vars.DATACOVES__MAIN__ROLE }} | |
| DATACOVES__MAIN__WAREHOUSE: ${{ vars.DATACOVES__MAIN__WAREHOUSE }} | |
| DATACOVES__MAIN__USER: ${{ vars.DATACOVES__MAIN__USER }} | |
| DATACOVES__MAIN__PRIVATE_KEY: ${{ secrets.DATACOVES__MAIN__PRIVATE_KEY }} | |
| DBT_ARTIFACTS_BUCKET: "convexa-local" | |
| # This is used by datacoves to drop the staging database for blue/green | |
| # deployments, most likely you don't want to set this, we use it for demos | |
| DATACOVES__DROP_DB_ON_FAIL: ${{ vars.DATACOVES__DROP_DB_ON_FAIL }} | |
| steps: | |
| - name: Checkout branch | |
| uses: actions/checkout@v2 | |
| with: | |
| ref: ${{ github.event.push.head.sha }} | |
| fetch-depth: 0 | |
| - name: Set Secure Directory | |
| run: git config --global --add safe.directory /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }} | |
| - name: Install dbt packages | |
| run: "dbt deps" | |
| - name: Get prod manifest | |
| id: prod_manifest | |
| run: "../automate/dbt/get_artifacts.sh" | |
| - name: Check for full-refresh marker in merge commit message | |
| id: check-full-refresh | |
| run: | | |
| # Check the latest commit message for the deployment marker | |
| if git log -1 --pretty=%B | grep -q "\[deploy:full-refresh\]"; then | |
| echo "FULL_REFRESH_FLAG='--full-refresh'" >> $GITHUB_OUTPUT | |
| echo "Found '[deploy:full-refresh]' marker in merge commit message, will run with --full-refresh" | |
| else | |
| # Don't set FULL_REFRESH_FLAG | |
| echo "No full-refresh deployment marker found in merge commit message, running normal deployment" | |
| fi | |
| # this is needed bec our sample dynamic table is not getting refreshed | |
| - name: Refresh dynamic tables before blue-green | |
| run: "dbt build -s config.materialized:dynamic_table stg_test_failures resource_type:seed ${{ steps.check-full-refresh.outputs.FULL_REFRESH_FLAG }}" | |
| # Do Blue-Green run | |
| - name: Run blue / green deployment | |
| if: ${{ steps.prod_manifest.outputs.manifest_found == 'false' }} | |
| run: "dbt-coves blue-green ${{ steps.check-full-refresh.outputs.FULL_REFRESH_FLAG }}" | |
| - name: Run blue / green deployment with defer | |
| if: ${{ steps.prod_manifest.outputs.manifest_found == 'true' }} | |
| run: "dbt-coves blue-green --defer ${{ steps.check-full-refresh.outputs.FULL_REFRESH_FLAG }}" | |
| - name: Drop orphaned relations in db that are no longer in dbt | |
| run: "dbt run-operation drop_orphaned_relations --args '{\"dry_run\": false}'" | |
| - name: Generate dbt documentation | |
| run: "dbt docs generate" | |
| - name: Deploy docs 🚀 | |
| uses: JamesIves/[email protected] | |
| with: | |
| branch: dbt-docs | |
| folder: transform/target | |
| - name: Deploy Streamlit App | |
| run: "dbt run-operation deploy_streamlit_app --args \"{app_path: '../visualize/streamlit/loans-example', app_file: loans.py}\"" | |
| - name: Upload dbt artifacts | |
| run: "dbt run-operation upload_artifacts" | |
| - name: Configure AWS credentials | |
| uses: aws-actions/configure-aws-credentials@v4 | |
| with: | |
| aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
| aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
| aws-region: us-west-2 | |
| - name: Upload artifacts to S3 for Datahub | |
| run: | | |
| # Check and upload each file individually | |
| for file in target/sources.json target/manifest.json target/catalog.json target/run_results.json; do | |
| if [ -f "$file" ]; then | |
| echo "Uploading $file to S3..." | |
| aws s3 cp "$file" "s3://${DBT_ARTIFACTS_BUCKET}/dbt_artifacts/$(basename $file)" | |
| else | |
| echo "File $file does not exist, skipping..." | |
| fi | |
| done | |
| - uses: fregante/setup-git-user@v2 | |
| - name: Bump dbt project and git project version | |
| run: "../automate/dbt/bump_dbt_project.sh" | |
| - name: Push version changes | |
| uses: ad-m/[email protected] | |
| with: | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| branch: ${{ github.ref_name }} | |
| tags: true | |
| - name: Drop staging db on Failure | |
| if: always() && (steps.run-blue-green.outcome == 'failure') | |
| run: "dbt --no-write-json run-operation drop_staging_db --args 'db_name: ${{env.DATACOVES__STAGING__DATABASE}}'" # yamllint disable-line rule:line-length |