Skip to content

[wip]feat(backend): postgres integration #5518

[wip]feat(backend): postgres integration

[wip]feat(backend): postgres integration #5518

Workflow file for this run

name: KFP E2E Pipeline tests
env:
E2E_TESTS_DIR: "./backend/test/end2end"
NUMBER_OF_PARALLEL_NODES: 10
CLUSTER_NAME: "kfp"
NAMESPACE: "kubeflow"
PYTHON_VERSION: "3.9"
USER_NAMESPACE: "kubeflow-user-example-com"
CA_CERT_PATH: ""
on:
push:
branches: [master]
pull_request:
paths:
- '.github/workflows/e2e-test.yml'
- '.github/actions/create-cluster/**'
- '.github/resources/**'
- 'api/**'
- 'go.mod'
- 'go.sum'
- 'backend/**'
- 'proxy/**'
- 'manifests/kustomize/**'
- 'test_data/sdk_compiled_pipelines/**'
- '!**/*.md'
- '!**/OWNERS'
workflow_dispatch:
inputs:
test_label:
description: "Test label that you want to filter on and run"
default: 'ApiServerTests'
required: true
type: string
number_of_parallel_tests:
description: "Number of ginkgo nodes that you want run in parallel, it essentially is equivalent to number of parallel tests with some caveats"
default: 10
required: true
type: number
namespace:
description: "Namespace where you want to create your pipelines in"
default: "kubeflow"
required: true
type: string
concurrency:
group: e2e-tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
uses: ./.github/workflows/image-builds-with-cache.yml
end-to-end-scenario-tests:
runs-on: ubuntu-latest
needs: build
strategy:
matrix:
k8s_version: ["v1.34.0"]
cache_enabled: ["true", "false"]
argo_version: [ "v3.7.3", "v3.6.7", "v3.5.14"]
storage: [ "seaweedfs", "minio"]
proxy: [ "false" ]
test_label: [ "E2ECritical" ]
pod_to_pod_tls_enabled: [ "false" ]
include:
- k8s_version: "v1.29.2"
cache_enabled: "false"
argo_version: "v3.5.14"
test_label: "E2ECritical"
- k8s_version: "v1.34.0"
cache_enabled: "false"
proxy: "true"
test_label: "E2EProxy"
- k8s_version: "v1.34.0"
cache_enabled: "false"
test_label: "E2EEssential"
- k8s_version: "v1.34.0"
cache_enabled: "false"
test_label: "E2EFailure"
- k8s_version: "v1.34.0"
cache_enabled: "true"
pod_to_pod_tls_enabled: "true"
test_label: "E2ECritical"
fail-fast: false
name: End to End ${{ matrix.test_label}} Tests - K8s ${{ matrix.k8s_version }} cacheEnabled=${{ matrix.cache_enabled }} argoVersion=${{ matrix.argo_version}} proxy=${{ matrix.proxy}} storage=${{ matrix.storage }} pod_to_pod_tls_enabled=${{ matrix.pod_to_pod_tls_enabled }}
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Create cluster
uses: ./.github/actions/create-cluster
id: create-cluster
with:
k8s_version: ${{ matrix.k8s_version }}
cluster_name: ${{ env.CLUSTER_NAME }}
- name: Deploy KFP
uses: ./.github/actions/deploy
if: ${{ steps.create-cluster.outcome == 'success' }}
id: deploy
with:
cache_enabled: ${{ matrix.cache_enabled }}
argo_version: ${{ matrix.argo_version }}
storage_backend: ${{ matrix.storage }}
pod_to_pod_tls_enabled: ${{ matrix.pod_to_pod_tls_enabled }}
image_path: ${{ needs.build.outputs.IMAGE_PATH }}
image_tag: ${{ needs.build.outputs.IMAGE_TAG }}
image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }}
- name: Configure Cluster CA Cert for TLS-Enabled Tests
shell: bash
if: ${{ matrix.pod_to_pod_tls_enabled == 'true'}}
run: |
kubectl get secret kfp-api-tls-cert -n kubeflow -o jsonpath='{.data.ca\.crt}' | base64 -d > "${{ github.workspace }}/ca.crt"
echo "CA_CERT_PATH=${{ github.workspace }}/ca.crt" >> "$GITHUB_ENV"
- name: Configure Input Variables
shell: bash
id: configure
if: ${{ steps.deploy.outcome == 'success' }}
run: |
NUMBER_OF_NODES=${{ env.NUMBER_OF_PARALLEL_NODES }}
TEST_LABEL=${{ matrix.test_label }}
NAMESPACE=${{ env.NAMESPACE }}
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
NUMBER_OF_NODES=${{ inputs.number_of_parallel_tests }}
TEST_LABEL=${{ inputs.test_label }}
NAMESPACE=${{ inputs.namespace }}
fi
{
echo "NUMBER_OF_NODES=$NUMBER_OF_NODES"
echo "TEST_LABEL=$TEST_LABEL"
echo "NAMESPACE=$NAMESPACE"
} >> "$GITHUB_OUTPUT"
- name: Build and upload the sample Modelcar image to Kind
id: build-sample-modelcar-image
if: ${{ steps.deploy.outcome == 'success' }}
run: |
docker build -f ./test_data/sdk_compiled_pipelines/valid/critical/modelcar/Dockerfile -t registry.domain.local/modelcar:test .
kind --name kfp load docker-image registry.domain.local/modelcar:test
continue-on-error: true
- name: Run Tests
uses: ./.github/actions/test-and-report
id: test-run
if: ${{ steps.configure.outcome == 'success' }}
with:
cache_enabled: ${{ matrix.cache_enabled }}
test_directory: ${{ env.E2E_TESTS_DIR }}
test_label: ${{ steps.configure.outputs.TEST_LABEL }}
num_parallel_nodes: ${{ steps.configure.outputs.NUMBER_OF_NODES }}
default_namespace: ${{ steps.configure.outputs.NAMESPACE }}
python_version: ${{ env.PYTHON_VERSION }}
report_name: "${{ matrix.test_label}}Tests_K8s=${{ matrix.k8s_version }}_cacheEnabled=${{ matrix.cache_enabled }}_argoVersion=${{ matrix.argo_version}}_proxy=${{ matrix.proxy }}_storage=${{ matrix.storage }}"
tls_enabled: ${{ matrix.pod_to_pod_tls_enabled }}
ca_cert_path: ${{ env.CA_CERT_PATH }}
- name: Notify test reports
shell: bash
if: ${{ steps.test-run.outcome == 'success' }}
run: |
echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab"
end-to-end-critical-scenario-multi-user-tests:
runs-on: ubuntu-latest
needs: build
strategy:
matrix:
k8s_version: ["v1.34.0"]
cache_enabled: ["true", "false"]
storage: [ "seaweedfs", "minio"]
multi_user: [ "true" ]
test_label: [ "E2ECritical" ]
include:
- multi_user: "true"
artifact_proxy: "true"
storage: "seaweedfs"
k8s_version: "v1.34.0"
cache_enabled: "true"
test_label: "E2ECritical"
fail-fast: false
name: End to End Critical Scenario Multi User Tests - K8s ${{ matrix.k8s_version }} cacheEnabled=${{ matrix.cache_enabled }} multiUser=${{ matrix.multi_user }} storage=${{ matrix.storage }} artifactProxy=${{ matrix.artifact_proxy }}
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Create cluster
uses: ./.github/actions/create-cluster
id: create-cluster
with:
k8s_version: ${{ matrix.k8s_version }}
cluster_name: ${{ env.CLUSTER_NAME }}
- name: Deploy KFP
uses: ./.github/actions/deploy
if: ${{ steps.create-cluster.outcome == 'success' }}
id: deploy
with:
cache_enabled: ${{ matrix.cache_enabled }}
multi_user: "true"
storage_backend: ${{ matrix.storage }}
artifact_proxy: ${{ matrix.artifact_proxy }}
image_path: ${{ needs.build.outputs.IMAGE_PATH }}
image_tag: ${{ needs.build.outputs.IMAGE_TAG }}
image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }}
- name: Configure Input Variables
shell: bash
id: configure
if: ${{ steps.deploy.outcome == 'success' }}
run: |
NUMBER_OF_NODES=${{ env.NUMBER_OF_PARALLEL_NODES }}
TEST_LABEL=${{ matrix.test_label }}
NAMESPACE=${{ env.NAMESPACE }}
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
NUMBER_OF_NODES=${{ inputs.number_of_parallel_tests }}
TEST_LABEL=${{ inputs.test_label }}
NAMESPACE=${{ inputs.namespace }}
fi
{
echo "NUMBER_OF_NODES=$NUMBER_OF_NODES"
echo "TEST_LABEL=$TEST_LABEL"
echo "NAMESPACE=$NAMESPACE"
} >> "$GITHUB_OUTPUT"
- name: Build and upload the sample Modelcar image to Kind
id: build-sample-modelcar-image
if: ${{ steps.deploy.outcome == 'success' }}
run: |
docker build -f ./test_data/sdk_compiled_pipelines/valid/critical/modelcar/Dockerfile -t registry.domain.local/modelcar:test .
kind --name kfp load docker-image registry.domain.local/modelcar:test
continue-on-error: true
- name: Test Artifact Proxy
id: test-artifact-proxy
if: ${{ steps.deploy.outcome == 'success' && matrix.artifact_proxy == 'true' }}
shell: bash
env:
USER_NAMESPACE: ${{ env.USER_NAMESPACE }}
run: |
./test/artifact-proxy/test-artifact-proxy.sh "$USER_NAMESPACE"
- name: Run Tests
uses: ./.github/actions/test-and-report
if: ${{ steps.configure.outcome == 'success' }}
id: test-run
env:
LOCAL_API_SERVER: "true"
MULTI_USER: ${{ matrix.multi_user }}
with:
multi_user: ${{ matrix.multi_user }}
cache_enabled: ${{ matrix.cache_enabled }}
test_directory: ${{ env.E2E_TESTS_DIR }}
test_label: ${{ steps.configure.outputs.TEST_LABEL }}
num_parallel_nodes: ${{ steps.configure.outputs.NUMBER_OF_NODES }}
default_namespace: ${{ steps.configure.outputs.NAMESPACE }}
python_version: ${{ env.PYTHON_VERSION }}
user_namespace: ${{ env.USER_NAMESPACE }}
report_name: "E2EMultiUserTests_K8s=${{ matrix.k8s_version }}_cacheEnabled=${{ matrix.cache_enabled }}_multiUser=${{ matrix.multi_user }}_storage=${{ matrix.storage }}"
- name: Notify test reports
shell: bash
if: ${{ steps.test-run.outcome == 'success' }}
run: |
echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab"