Data Processing Pipeline #1482
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Data Processing Pipeline | |
| on: | |
| schedule: | |
| # Run every 6 hours (at 00:00, 06:00, 12:00, 18:00 UTC) | |
| - cron: '0 */6 * * *' | |
| workflow_dispatch: | |
| inputs: | |
| run_bridge_data: | |
| description: 'Run bridge data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_mezo_users: | |
| description: 'Run Mezo users fetching' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_musd_data: | |
| description: 'Run MUSD data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_market_data: | |
| description: 'Run market data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_swaps_data: | |
| description: 'Run swaps data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_pools_data: | |
| description: 'Run pools data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_dapp_data: | |
| description: 'Run DApp data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_vaults_data: | |
| description: 'Run vaults data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_vote_data: | |
| description: 'Run vote data processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| run_token_registrations: | |
| description: 'Run token registrations processing' | |
| required: false | |
| default: 'true' | |
| type: boolean | |
| env: | |
| PYTHON_VERSION: 3.13 | |
| jobs: | |
| process-bridge-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_bridge_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π Process Bridge Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting bridge data processing..." | |
| python scripts/process_bridge_data.py | |
| echo "β Bridge data processing completed" | |
| fetch-mezo-users: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_mezo_users == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π₯ Fetch Mezo Users | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting Mezo users fetch..." | |
| python scripts/fetch_mezo_users.py | |
| echo "β Mezo users fetch completed" | |
| process-musd-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_musd_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π° Process MUSD Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting MUSD data processing..." | |
| python scripts/process_musd_data.py | |
| echo "β MUSD data processing completed" | |
| process-market-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_market_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π Process Market Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting market data processing..." | |
| python scripts/process_market_data.py | |
| echo "β Market data processing completed" | |
| process-swaps-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_swaps_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π Process Swaps Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting swaps data processing..." | |
| python scripts/process_swaps_data.py | |
| echo "β Swaps data processing completed" | |
| process-pools-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_pools_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π Process Pools Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting pools data processing..." | |
| python scripts/process_pools_data.py | |
| echo "β Pools data processing completed" | |
| process-dapp-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_dapp_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π’ Process DApp Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting DApp data processing..." | |
| python scripts/process_dapp_data.py | |
| echo "β DApp data processing completed" | |
| process-vaults-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_vaults_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π¦ Process Vaults Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting vaults data processing..." | |
| python scripts/process_vaults_data.py | |
| echo "β Vaults data processing completed" | |
| process-vote-data: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_vote_data == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π³οΈ Process Vote Data | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting vote data processing..." | |
| python scripts/process_vote_data.py | |
| echo "β Vote data processing completed" | |
| process-token-registrations: | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_token_registrations == 'true') | |
| steps: | |
| - name: π Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.ref }} | |
| - name: π Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ env.PYTHON_VERSION }} | |
| cache: 'pip' | |
| - name: π¦ Install dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install -r requirements.txt | |
| - name: π Process Token Registrations | |
| env: | |
| SUPABASE_URL_PROD: ${{ secrets.SUPABASE_URL_PROD }} | |
| SUPABASE_KEY_PROD: ${{ secrets.SUPABASE_KEY_PROD }} | |
| SUPABASE_DATA_URL: ${{ secrets.SUPABASE_DATA_URL }} | |
| SUPABASE_DATA_KEY: ${{ secrets.SUPABASE_DATA_KEY }} | |
| COINGECKO_KEY: ${{ secrets.COINGECKO_KEY }} | |
| GOOGLE_CLOUD_KEY: ${{ secrets.GOOGLE_CLOUD_KEY }} | |
| run: | | |
| echo "π Starting token registrations processing..." | |
| python scripts/process_token_registrations.py | |
| echo "β Token registrations processing completed" | |
| notify-on-failure: | |
| runs-on: ubuntu-latest | |
| if: always() && contains(needs.*.result, 'failure') | |
| needs: [process-bridge-data, fetch-mezo-users, process-musd-data, process-market-data, process-swaps-data, process-pools-data, process-dapp-data, process-vaults-data, process-vote-data, process-token-registrations] | |
| steps: | |
| - name: π± Discord notification on failure | |
| run: | | |
| # Determine which jobs failed | |
| FAILED_JOBS="" | |
| if [[ "${{ needs.process-bridge-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Bridge Data Processing\n" | |
| fi | |
| if [[ "${{ needs.fetch-mezo-users.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Mezo Users Fetch\n" | |
| fi | |
| if [[ "${{ needs.process-musd-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ MUSD Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-market-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Market Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-swaps-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Swaps Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-pools-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Pools Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-dapp-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ DApp Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-vaults-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Vaults Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-vote-data.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Vote Data Processing\n" | |
| fi | |
| if [[ "${{ needs.process-token-registrations.result }}" == "failure" ]]; then | |
| FAILED_JOBS="${FAILED_JOBS}β’ Token Registrations Processing\n" | |
| fi | |
| curl -X POST -H "Content-Type: application/json" \ | |
| -d '{ | |
| "embeds": [{ | |
| "title": "π¨ Data Processing Pipeline Failed", | |
| "description": "One or more jobs failed in `${{ github.repository }}`", | |
| "color": 15158332, | |
| "fields": [ | |
| { | |
| "name": "Failed Jobs", | |
| "value": "'"${FAILED_JOBS}"'", | |
| "inline": false | |
| }, | |
| { | |
| "name": "Workflow", | |
| "value": "${{ github.workflow }}", | |
| "inline": true | |
| }, | |
| { | |
| "name": "Branch", | |
| "value": "${{ github.ref_name }}", | |
| "inline": true | |
| }, | |
| { | |
| "name": "Triggered by", | |
| "value": "${{ github.event_name }}", | |
| "inline": true | |
| }, | |
| { | |
| "name": "View Logs", | |
| "value": "[Run #${{ github.run_id }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})", | |
| "inline": false | |
| } | |
| ], | |
| "timestamp": "'$(date -u +%Y-%m-%dT%H:%M:%SZ)'" | |
| }] | |
| }' \ | |
| ${{ secrets.DISCORD_WEBHOOK }} |