Skip to content

Commit 568fff4

Browse files
BQMT e2e Tests Scenarios
1 parent 4a086e7 commit 568fff4

16 files changed

+832
-5
lines changed

.github/workflows/e2e.yml

Lines changed: 34 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright © 2021 Cask Data, Inc.
1+
# Copyright © 2021-2023 Cask Data, Inc.
22
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
33
# use this file except in compliance with the License. You may obtain a copy of
44
# the License at
@@ -12,6 +12,7 @@
1212
# This workflow will build a Java project with Maven
1313
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
1414
# Note: Any changes to this workflow would be used only after merging into develop
15+
1516
name: Build e2e tests
1617

1718
on:
@@ -40,7 +41,8 @@ jobs:
4041
)
4142
strategy:
4243
matrix:
43-
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy]
44+
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute,
45+
bigquerymultitable, gcscopy]
4446
fail-fast: false
4547
steps:
4648
# Pinned 1.0.0 version
@@ -68,24 +70,52 @@ jobs:
6870
key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
6971
restore-keys: |
7072
${{ runner.os }}-maven-${{ github.workflow }}
73+
74+
- name: Get Secrets from GCP Secret Manager
75+
id: secrets
76+
uses: 'google-github-actions/get-secretmanager-secrets@v0'
77+
with:
78+
secrets: |-
79+
MYSQL_HOST:cdapio-github-builds/MYSQL_HOST
80+
MYSQL_USERNAME:cdapio-github-builds/MYSQL_USERNAME
81+
MYSQL_PASSWORD:cdapio-github-builds/MYSQL_PASSWORD
82+
MYSQL_PORT:cdapio-github-builds/MYSQL_PORT
83+
BQMT_CONNECTION_STRING:cdapio-github-builds/BQMT_CONNECTION_STRING
84+
7185
- name: Run required e2e tests
7286
if: github.event_name != 'workflow_dispatch' && github.event_name != 'push' && steps.filter.outputs.e2e-test == 'false'
7387
run: python3 e2e/src/main/scripts/run_e2e_test.py --testRunner **/${{ matrix.tests }}/**/TestRunnerRequired.java
88+
env:
89+
MYSQL_HOST: ${{ steps.secrets.outputs.MYSQL_HOST }}
90+
MYSQL_USERNAME: ${{ steps.secrets.outputs.MYSQL_USERNAME }}
91+
MYSQL_PASSWORD: ${{ steps.secrets.outputs.MYSQL_PASSWORD }}
92+
MYSQL_PORT: ${{ steps.secrets.outputs.MYSQL_PORT }}
93+
BQMT_CONNECTION_STRING: ${{ steps.secrets.outputs.BQMT_CONNECTION_STRING }}
94+
7495
- name: Run all e2e tests
7596
if: github.event_name == 'workflow_dispatch' || github.event_name == 'push' || steps.filter.outputs.e2e-test == 'true'
7697
run: python3 e2e/src/main/scripts/run_e2e_test.py --testRunner **/${{ matrix.tests }}/**/TestRunner.java
77-
- name: Upload report
98+
env:
99+
MYSQL_HOST: ${{ steps.secrets.outputs.MYSQL_HOST }}
100+
MYSQL_USERNAME: ${{ steps.secrets.outputs.MYSQL_USERNAME }}
101+
MYSQL_PASSWORD: ${{ steps.secrets.outputs.MYSQL_PASSWORD }}
102+
MYSQL_PORT: ${{ steps.secrets.outputs.MYSQL_PORT }}
103+
BQMT_CONNECTION_STRING: ${{ steps.secrets.outputs.BQMT_CONNECTION_STRING }}
104+
105+
- name: Upload report
78106
uses: actions/upload-artifact@v3
79107
if: always()
80108
with:
81109
name: Cucumber report - ${{ matrix.tests }}
82110
path: ./plugin/target/cucumber-reports
83-
- name: Upload debug files
111+
112+
- name: Upload debug files
84113
uses: actions/upload-artifact@v3
85114
if: always()
86115
with:
87116
name: Debug files - ${{ matrix.tests }}
88117
path: ./**/target/e2e-debug
118+
89119
- name: Upload files to GCS
90120
uses: google-github-actions/upload-cloud-storage@v0
91121
if: always()

pom.xml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
<?xml version="1.0" encoding="UTF-8"?>
2+
23
<!--
34
~ Copyright © 2016-2019 Cask Data, Inc.
45
~
@@ -1204,6 +1205,12 @@
12041205
<version>0.4.0-SNAPSHOT</version>
12051206
<scope>test</scope>
12061207
</dependency>
1208+
<dependency>
1209+
<groupId>mysql</groupId>
1210+
<artifactId>mysql-connector-java</artifactId>
1211+
<version>8.0.19</version>
1212+
<scope>test</scope>
1213+
</dependency>
12071214
<dependency>
12081215
<groupId>ch.qos.logback</groupId>
12091216
<artifactId>logback-classic</artifactId>
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
# Copyright © 2023 Cask Data, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4+
# use this file except in compliance with the License. You may obtain a copy of
5+
# the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12+
# License for the specific language governing permissions and limitations under
13+
# the License.
14+
15+
@BQMT_SINK
16+
Feature: BigQueryMultiTable sink - Validate BigQueryMultiTable sink plugin error scenarios
17+
18+
@BQMT_Required
19+
Scenario Outline: Verify BigQueryMultiTable Sink properties validation errors for mandatory fields
20+
Given Open Datafusion Project to configure pipeline
21+
When Expand Plugin group in the LHS plugins list: "Sink"
22+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
23+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
24+
Then Click on the Validate button
25+
Then Validate mandatory property error for "<property>"
26+
Examples:
27+
| property |
28+
| dataset |
29+
30+
Scenario:Verify BQMT Sink properties validation errors for incorrect value of chunk size
31+
Given Open Datafusion Project to configure pipeline
32+
When Expand Plugin group in the LHS plugins list: "Sink"
33+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
34+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
35+
And Enter input plugin property: "referenceName" with value: "Reference"
36+
And Replace input plugin property: "project" with value: "projectId"
37+
And Enter input plugin property: "dataset" with value: "dataset"
38+
Then Override Service account details if set in environment variables
39+
Then Enter input plugin property: "gcsChunkSize" with value: "bqmtInvalidChunkSize"
40+
Then Click on the Validate button
41+
Then Verify that the Plugin Property: "gcsChunkSize" is displaying an in-line error message: "errorMessageIncorrectBQMTChunkSize"
42+
43+
@BQMT_Required
44+
Scenario:Verify BQMT Sink properties validation errors for incorrect dataset
45+
Given Open Datafusion Project to configure pipeline
46+
When Expand Plugin group in the LHS plugins list: "Sink"
47+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
48+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
49+
And Enter input plugin property: "referenceName" with value: "Reference"
50+
And Replace input plugin property: "project" with value: "projectId"
51+
Then Override Service account details if set in environment variables
52+
Then Enter input plugin property: "dataset" with value: "bqmtInvalidSinkDataset"
53+
Then Click on the Validate button
54+
Then Verify that the Plugin Property: "dataset" is displaying an in-line error message: "errorMessageIncorrectBQMTDataset"
55+
56+
Scenario:Verify BQMT Sink properties validation errors for incorrect reference name
57+
Given Open Datafusion Project to configure pipeline
58+
When Expand Plugin group in the LHS plugins list: "Sink"
59+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
60+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
61+
And Replace input plugin property: "project" with value: "projectId"
62+
And Enter input plugin property: "dataset" with value: "dataset"
63+
Then Override Service account details if set in environment variables
64+
Then Enter input plugin property: "referenceName" with value: "bqmtInvalidSinkReferenceName"
65+
Then Click on the Validate button
66+
Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageIncorrectBQMTReferenceName"
67+
68+
Scenario:Verify BQMT Sink properties validation errors for incorrect value of temporary bucket name
69+
Given Open Datafusion Project to configure pipeline
70+
When Expand Plugin group in the LHS plugins list: "Sink"
71+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
72+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
73+
And Enter input plugin property: "referenceName" with value: "Reference"
74+
And Replace input plugin property: "project" with value: "projectId"
75+
And Enter input plugin property: "dataset" with value: "dataset"
76+
Then Override Service account details if set in environment variables
77+
Then Enter input plugin property: "bucket" with value: "bqmtInvalidTemporaryBucket"
78+
Then Click on the Validate button
79+
Then Verify that the Plugin Property: "bucket" is displaying an in-line error message: "errorMessageIncorrectBQMTBucketName"
Lines changed: 133 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,133 @@
1+
# Copyright © 2023 Cask Data, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4+
# use this file except in compliance with the License. You may obtain a copy of
5+
# the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12+
# License for the specific language governing permissions and limitations under
13+
# the License.
14+
15+
@BQMT_SINK
16+
Feature: BigQueryMultiTable sink -Verification of Multiple Database Tables to BigQueryMultiTable successful data transfer using macros
17+
18+
@MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required
19+
Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with all datatypes using macros
20+
Given Open Datafusion Project to configure pipeline
21+
When Expand Plugin group in the LHS plugins list: "Source"
22+
When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
23+
When Expand Plugin group in the LHS plugins list: "Sink"
24+
When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
25+
Then Navigate to the properties page of plugin: "Multiple Database Tables"
26+
Then Replace input plugin property: "referenceName" with value: "ref"
27+
Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
28+
Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
29+
Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
30+
Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
31+
And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
32+
Then Click on the Add Button of the property: "sqlStatements" with value:
33+
| selectQuery|
34+
Then Click on the Validate button
35+
And Close the Plugin Properties page
36+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
37+
And Enter input plugin property: "referenceName" with value: "Reference"
38+
Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
39+
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
40+
Then Click on the Macro button of Property: "serviceAccountType" and set the value to: "serviceAccountType"
41+
Then Click on the Macro button of Property: "serviceAccountFilePath" and set the value to: "serviceAccount"
42+
Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
43+
Then Click plugin property: "truncateTable"
44+
Then Click plugin property: "allowSchema"
45+
Then Click on the Validate button
46+
And Close the Plugin Properties page
47+
Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
48+
Then Save the pipeline
49+
# Then Preview and run the pipeline
50+
# Then Enter runtime argument value "projectId" for key "bqProjectId"
51+
# Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
52+
# Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
53+
# Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
54+
# Then Enter runtime argument value "dataset" for key "bqDataset"
55+
# Then Run the preview of pipeline with runtime arguments
56+
# Then Wait till pipeline preview is in running state
57+
# Then Open and capture pipeline preview logs
58+
# Then Verify the preview of pipeline is "succeeded"
59+
# Then Close the pipeline logs
60+
# Then Close the preview
61+
Then Deploy the pipeline
62+
Then Run the Pipeline in Runtime
63+
Then Enter runtime argument value "projectId" for key "bqProjectId"
64+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
65+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
66+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
67+
Then Enter runtime argument value "dataset" for key "bqDataset"
68+
Then Run the Pipeline in Runtime with runtime arguments
69+
Then Wait till pipeline is in running state
70+
Then Open and capture logs
71+
Then Verify the pipeline status is "Succeeded"
72+
Then Close the pipeline logs
73+
Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table
74+
75+
@MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required
76+
Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with split field using macros
77+
Given Open Datafusion Project to configure pipeline
78+
When Expand Plugin group in the LHS plugins list: "Source"
79+
When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
80+
When Expand Plugin group in the LHS plugins list: "Sink"
81+
When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
82+
Then Navigate to the properties page of plugin: "Multiple Database Tables"
83+
Then Replace input plugin property: "referenceName" with value: "ref"
84+
Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
85+
Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
86+
Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
87+
Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
88+
And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
89+
Then Click on the Add Button of the property: "sqlStatements" with value:
90+
| selectQuery|
91+
Then Click on the Validate button
92+
And Close the Plugin Properties page
93+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
94+
And Enter input plugin property: "referenceName" with value: "Reference"
95+
Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
96+
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
97+
Then Click on the Macro button of Property: "serviceAccountType" and set the value to: "serviceAccountType"
98+
Then Click on the Macro button of Property: "serviceAccountFilePath" and set the value to: "serviceAccount"
99+
Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
100+
Then Click on the Macro button of Property: "splitField" and set the value to: "bqmtSplitField"
101+
Then Click plugin property: "truncateTable"
102+
Then Click plugin property: "allowSchema"
103+
Then Click on the Validate button
104+
And Close the Plugin Properties page
105+
Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
106+
Then Save the pipeline
107+
Then Preview and run the pipeline
108+
Then Enter runtime argument value "projectId" for key "bqProjectId"
109+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
110+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
111+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
112+
Then Enter runtime argument value "dataset" for key "bqDataset"
113+
Then Enter runtime argument value "bqmtSplitField" for key "bqmtSplitField"
114+
Then Run the preview of pipeline with runtime arguments
115+
Then Wait till pipeline preview is in running state
116+
Then Open and capture pipeline preview logs
117+
Then Verify the preview of pipeline is "succeeded"
118+
Then Close the pipeline logs
119+
Then Close the preview
120+
Then Deploy the pipeline
121+
Then Run the Pipeline in Runtime
122+
Then Enter runtime argument value "projectId" for key "bqProjectId"
123+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
124+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
125+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
126+
Then Enter runtime argument value "dataset" for key "bqDataset"
127+
Then Enter runtime argument value "bqmtSplitField" for key "bqmtSplitField"
128+
Then Run the Pipeline in Runtime with runtime arguments
129+
Then Wait till pipeline is in running state
130+
Then Open and capture logs
131+
Then Verify the pipeline status is "Succeeded"
132+
Then Close the pipeline logs
133+
Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table

0 commit comments

Comments
 (0)