Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
184 changes: 184 additions & 0 deletions .github/workflows/firedrill.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
name: Firedrill Workflow

on:
push:
branches:
- main
- test-framework
pull_request:
branches:
- main

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Build and run Docker Compose
run: |
docker compose build --build-arg HOP_USERNAME=${{ secrets.CI_FIREDRILL_USERNAME }} --build-arg HOP_PASSWORD=${{ secrets.CI_FIREDRILL_PASSWORD }}
docker compose up -d --force-recreate
echo "Waiting for services to start..."
sleep 3

- name: Wait for postgres to initialize
run: |
echo "Waiting for snews_pg to initialize ..."
for i in {1..30}; do
if docker logs snews_pg 2>&1 | grep -q "PostgreSQL init process complete; ready for start up."; then
echo "Postgres is ready."
break
fi
echo "Still waiting for Postgres to initialize..."
sleep 2
done

- name: Wait for db_pipeline to set up tables and initialize listener
run: |
echo "Waiting for postgres tables to be set by db_pipeline and listener to initialize..."
max_retries=5
for attempt in $(seq 1 $max_retries); do
if docker logs db_pipeline 2>&1 | grep -q "CREATE TABLE time_tier_archive" && \
docker logs db_pipeline 2>&1 | grep -q "(re)Initializing Database Listener System for kafka://kafka.scimma.org/snews.experiments-github"; then
echo "Postgres and Database Listener are ready."
break
fi
echo "Attempt $attempt failed. Retrying in 2 seconds..."
sleep 2
if [ $attempt -eq $max_retries ]; then
echo "Postgres and Database Listener did not initialize after $max_retries attempts."
exit 1
fi
done

- name: confirm tables are present in postgres
run: |
echo "Confirming tables are present in postgres..."
max_retries=5
for attempt in $(seq 1 $max_retries); do
docker exec snews_pg psql -U user -d snews_pg -c "\dt" > tables.txt
tables=("all_mgs" "cached_heartbeats" "coincidence_tier_archive" "retraction_tier_archive" "sig_tier_archive" "time_tier_archive")
all_tables_present=true
for table in "${tables[@]}"; do
if ! grep -q "$table" tables.txt; then
echo "Table $table is not present in postgres."
all_tables_present=false
else
echo "Table $table is present in postgres."
fi
done
if [ "$all_tables_present" = true ]; then
echo "All tables are present! Moving on..."
break
fi
echo "Attempt $attempt failed. Retrying in 2 seconds..."
sleep 2
if [ $attempt -eq $max_retries ]; then
echo "Some tables are still missing after $max_retries attempts."
cat tables.txt
exit 1
fi
done

- name: Wait for coincidence_system to initialize
run: |
echo "Waiting for coincidence_system to initialize (confirming observation on snews.experiments-github) ..."
for i in {1..30}; do
if docker logs coincidence_system 2>&1 | grep -q "(re)Initializing Coincidence System for kafka://kafka.scimma.org/snews.experiments-github"; then
echo "Coincidence System is ready."
break
fi
echo "Still waiting for Coincidence System to initialize..."
sleep 2
done

- name: Publish messages
run: |
echo "Publishing messages..."
docker exec publishing_tools_publisher snews_pt publish /app/snews_pt/test/firedrill_combined_message.json --firedrill
docker exec publishing_tools_publisher snews_pt publish /app/snews_pt/test/firedrill_combined_message2.json --firedrill
sleep 10

- name: Verify alert publishing from coincidence_system
run: |
echo "Checking logs of coincidence_system..."
docker logs coincidence_system > coincidence_logs.txt
if ! grep -q "PUBLISHING AN ALERT!!!" coincidence_logs.txt; then
echo "Published messages did not run or produce the expected output."
cat coincidence_logs.txt
exit 1
fi
echo "Coincidence_system ran successfully and produced the expected output."
echo "current coincidence docker logs:"
docker logs coincidence_system

- name: Verify custom script output
run: |
echo "Waiting for to see the right broker (snews.alert-github) in publishing_tools_subscriber logs..."
for i in {1..30}; do
if docker logs publishing_tools_subscriber 2>&1 | grep -q "Broker:kafka://kafka.scimma.org/snews.alert-github"; then
echo "Subscribing to firedrill broker. Proceeding to check for alert log..."
break
fi
echo "Still waiting for broker log..."
sleep 2
done
echo "current docker logs:"
docker logs publishing_tools_subscriber

echo "Checking logs of publishing_tools_subscriber for alert..."
for i in {1..30}; do
if docker logs publishing_tools_subscriber 2>&1 | grep -q "Here is the alert dictionary I received"; then
echo "Custom script ran successfully and produced the expected output."
exit 0
fi
echo "Still waiting to see the alert in the log..."
sleep 2
done
echo "Custom script did not run or produce the expected output."
docker logs publishing_tools_subscriber
exit 1

- name: Confirm records written to coincidence_tier_archive table in snews_pg
run: |
echo "Confirming specific records are written to coincidence_tier_archive table in postgres..."
max_retries=5
for attempt in $(seq 1 $max_retries); do
docker exec snews_pg psql -U user -d snews_pg -c "SELECT * FROM coincidence_tier_archive" -t -A -F"," > records.csv
record_count=$(wc -l < records.csv)
if [ "$record_count" -eq 2 ]; then
echo "Found 2 records in coincidence_tier_archive table. Verifying content..."
if grep -q "XENONnT" records.csv && grep -q "JUNO" records.csv; then
echo "Records match the expected format and content."
break
else
echo "Records do not match the expected format or content."
cat records.csv
exit 1
fi
else
echo "Expected 2 records, but found $record_count. Retrying..."
fi
echo "Attempt $attempt failed. Retrying in 2 seconds..."
sleep 2
if [ $attempt -eq $max_retries ]; then
echo "Failed to find 2 valid records after $max_retries attempts."
cat records.csv
exit 1
fi
done

- name: ensure db_pipeline is still running and there is no "Listener Stopped" or "DONE" in logs
run: |
echo "Checking logs of db_pipeline..."
docker logs db_pipeline > db_pipeline_logs.txt
if grep -q "Listener Stopped" db_pipeline_logs.txt || grep -q "DONE" db_pipeline_logs.txt; then
echo "db_pipeline has stopped or completed. Exiting with error."
cat db_pipeline_logs.txt
exit 1
fi
echo "db_pipeline is still running and has not stopped or completed."
echo "current db_pipeline docker logs:"
docker logs db_pipeline
47 changes: 47 additions & 0 deletions Dockerfile.coincidence_system
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
FROM python:3.11-bullseye

# Set working directory
WORKDIR /app

# Install system dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends gnupg apt-transport-https ca-certificates && \
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 648ACFD622F3D138 112695A0E562B32A && \
sed -i 's|http://deb.debian.org|https://deb.debian.org|g' /etc/apt/sources.list && \
apt-get update --allow-releaseinfo-change && \
apt-get install -y --no-install-recommends git build-essential libpq-dev && \
rm -rf /var/lib/apt/lists/*

# Accept build arguments
ARG HOP_USERNAME
ARG HOP_PASSWORD

# Set environment variables
ENV HOP_USERNAME=${HOP_USERNAME}
ENV HOP_PASSWORD=${HOP_PASSWORD}

# Clone the repository
ARG REPO_URL
ARG BRANCH
RUN git clone --branch ${BRANCH} ${REPO_URL} .

RUN sed -i 's/^FIREDRILL_OBSERVATION_TOPIC=.*/FIREDRILL_OBSERVATION_TOPIC=kafka:\/\/$\{HOP_BROKER\}\/snews\.experiments-github/' /app/snews_cs/etc/test-config.env
RUN sed -i 's/^FIREDRILL_ALERT_TOPIC=.*/FIREDRILL_ALERT_TOPIC=kafka:\/\/$\{HOP_BROKER\}\/snews\.alert-github/' /app/snews_cs/etc/test-config.env

# Copy credentials into the container
COPY generate_firedrill_creds.sh /app/generate_firedrill_creds.sh
RUN chmod +x /app/generate_firedrill_creds.sh
RUN /app/generate_firedrill_creds.sh

# Install Poetry
RUN pip install --no-cache-dir poetry

# Install dependencies using Poetry
RUN poetry lock
RUN poetry install
#
## Add hop credentials
RUN poetry run hop auth add hop_creds.csv

## Command to run the coincidence system
CMD ["poetry", "run", "snews_cs", "run-coincidence", "--firedrill"]
50 changes: 50 additions & 0 deletions Dockerfile.publishing_tools
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
FROM python:3.11-bullseye

WORKDIR /app

SHELL ["/bin/bash", "-c"]

# Install system dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends gnupg apt-transport-https ca-certificates && \
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 648ACFD622F3D138 112695A0E562B32A && \
sed -i 's|http://deb.debian.org|https://deb.debian.org|g' /etc/apt/sources.list && \
apt-get update --allow-releaseinfo-change && \
apt-get install -y --no-install-recommends git build-essential libpq-dev && \
rm -rf /var/lib/apt/lists/*

# Accept build arguments
ARG HOP_USERNAME
ARG HOP_PASSWORD

# Set environment variables
ENV HOP_USERNAME=${HOP_USERNAME}
ENV HOP_PASSWORD=${HOP_PASSWORD}

ARG REPO_URL
ARG BRANCH
RUN git clone --branch ${BRANCH} ${REPO_URL} .


# Set observation and alert topics for github CI firedrill
RUN sed -i 's/^FIREDRILL_OBSERVATION_TOPIC=.*/FIREDRILL_OBSERVATION_TOPIC=kafka:\/\/$\{HOP_BROKER\}\/snews\.experiments-github/' /app/snews_pt/auxiliary/test-config.env
RUN sed -i 's/^FIREDRILL_ALERT_TOPIC=.*/FIREDRILL_ALERT_TOPIC=kafka:\/\/$\{HOP_BROKER\}\/snews\.alert-github/' /app/snews_pt/auxiliary/test-config.env

# Upgrade pip
RUN pip install --upgrade pip

# Install Python dependencies
RUN pip install -r requirements.txt

COPY generate_firedrill_creds.sh /app/generate_firedrill_creds.sh
RUN chmod +x /app/generate_firedrill_creds.sh
RUN /app/generate_firedrill_creds.sh

# Install the project
RUN pip install .
RUN hop auth add hop_creds.csv
RUN mkdir -p /app/output
RUN snews_pt set-name -n JUNO

# Command to keep the container running
CMD ["tail", "-f", "/dev/null"]
46 changes: 46 additions & 0 deletions Dockerfile.snews_db_pipeline
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
FROM python:3.11-bullseye

# Set working directory
WORKDIR /app

# Accept build arguments
ARG HOP_USERNAME
ARG HOP_PASSWORD
ARG POSTGRES_USER
ARG POSTGRES_PASSWORD
ARG POSTGRES_DB

# Set environment variables
ENV HOP_USERNAME=${HOP_USERNAME}
ENV HOP_PASSWORD=${HOP_PASSWORD}
ENV POSTGRES_USER=${POSTGRES_USER}
ENV POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
ENV POSTGRES_DB=${POSTGRES_DB}

COPY . /app
SHELL ["/bin/bash", "-c"]
RUN apt-get update && apt-get install -y --no-install-recommends git build-essential libpq-dev && rm -rf /var/lib/apt/lists/*


RUN sed -i 's/^FIREDRILL_OBSERVATION_TOPIC=.*/FIREDRILL_OBSERVATION_TOPIC="kafka:\/\/$\{HOP_BROKER\}\/snews\.experiments-github"/' /app/snews_db/tests/etc/test-config.env
RUN sed -i 's|^DATABASE_URL=.*|DATABASE_URL="postgresql://user:password@snews_pg:5432/snews_pg"|' /app/snews_db/tests/etc/test-config.env


# Copy credentials into the container
COPY generate_firedrill_creds.sh /app/generate_firedrill_creds.sh
RUN chmod +x /app/generate_firedrill_creds.sh
RUN /app/generate_firedrill_creds.sh


# Install Poetry
RUN pip install --no-cache-dir poetry

# Install dependencies using Poetry
RUN poetry lock
RUN poetry install

## Add hop credentials
RUN poetry run hop auth add hop_creds.csv

## Command to run the coincidence system
CMD ["poetry", "run", "snews_db", "--env", "/app/snews_db/tests/etc/test-config.env", "listen-to-detectors", "--firedrill"]
Loading
Loading