Skip to content

Merge pull request #139 from khaledk2/fix_clean_quries #774

Merge pull request #139 from khaledk2/fix_clean_quries

Merge pull request #139 from khaledk2/fix_clean_quries #774

Workflow file for this run

name: Build
on:
push:
tags:
branches:
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-24.04
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: postgress
max_wal_size: 4GB
ports:
- 5432/tcp
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
elasticsearch:
image: elastic/elasticsearch:9.1.1
ports:
- 9200/tcp
env:
es_api_basic_auth_username: "elastic"
ELASTIC_PASSWORD: "elasticsearch_user_password"
options: -e="discovery.type=single-node" --health-cmd="curl -k -u elastic:elasticsearch_user_password https://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.12
uses: actions/setup-python@v4
with:
python-version: 3.12
#test the app before bulding the image
- name: Install dependencies and test
run: |
python3 -m pip install --upgrade pip
pip3 install -r requirements.txt
export FLASK_APP=commands.py
# Configure database url
# rename data source
flask rename_data_source -c idr -n omero1
flask set_database_configuration -u localhost -s ${{ job.services.postgres.ports[5432] }} -n postgres -p password -w omero1 -d omero -b omero.pgdump
export PGPASSWORD=password;
echo $PGPASSWORD
psql -h localhost -U postgres -p ${{ job.services.postgres.ports[5432] }} -c 'CREATE DATABASE omero'
# configure elasticsearch
flask set_elasticsearch_configuration -e https://localhost:${{ job.services.elasticsearch.ports[9200] }}
# download and extract the database backup file
wget https://downloads.openmicroscopy.org/public-resources/omero_db_searchengine.zip -P app_data
unzip app_data/omero_db_searchengine.zip -d app_data/
# Restore OMERO database
flask restore_postgresql_database
# run indexing indexing
flask get_index_data_from_database -b False -t True
# Set up the second database
rm app_data/omero_db_searchengine.zip
rm app_data/omero.pgdump
wget https://downloads.openmicroscopy.org/public-resources/omero_train.zip -P app_data
unzip app_data/omero_train.zip -d app_data/
rm app_data/omero_train.zip
flask set_database_configuration -u localhost -s ${{ job.services.postgres.ports[5432] }} -n postgres -p password -w omero_train -d omero_train -b omero_train.pgdump
echo $PGPASSWORD
psql -h localhost -U postgres -p ${{ job.services.postgres.ports[5432] }} -c 'CREATE DATABASE omero_train'
flask restore_postgresql_database -s omero_train
flask get_index_data_from_database -b False -d omero_train -t True
rm app_data/omero_train.pgdump
# set csv data source
flask set_data_source_files -n test_csv
# download the images CSV file
wget https://downloads.openmicroscopy.org/public-resources/test_images.zip -P app_data
unzip app_data/test_images.zip -d app_data/
rm app_data/test_images.zip
flask get_index_data_from_csv_files -d test_csv -f app_data/test_images.csv -r image -u True
# run tests
python3 -m unittest discover -s unit_tests/indexing_tests
python3 -m unittest discover -s unit_tests/queries_tests
python3 -m unittest discover -s unit_tests/datasource_tests
upload:
needs: build
if: startsWith(github.ref, 'refs/tags')
runs-on: ubuntu-latest
env:
name: openmicroscopy/omero-searchengine
steps:
- name: Get prefix
id: getprefix
run: |
if [ ! -z ${{ env.name }} ]; then
echo "prefix=${{ env.name }}:" >> $GITHUB_OUTPUT
else
echo "prefix=${{ github.repository }}:" >> $GITHUB_OUTPUT
fi
- name: Get other tags
id: gettags
uses: jupyterhub/action-major-minor-tag-calculator@v1.1.0
with:
githubToken: ${{ secrets.GITHUB_TOKEN }}
prefix: "${{ steps.getprefix.outputs.prefix }}"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- uses: actions/checkout@v2
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: display docker account
run: echo ${{ secrets.DOCKER_LOGIN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: deployment/docker/rockylinux/Dockerfile
push: true
tags: ${{ join(fromJson(steps.gettags.outputs.tags)) }}