Skip to content

Commit f0c2b16

Browse files
priyabhatnagar25bharatgulati
authored andcommitted
BQMT e2e Tests Scenarios
1 parent dc04dc6 commit f0c2b16

16 files changed

+828
-3
lines changed

.github/workflows/e2e.yml

Lines changed: 29 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright © 2021 Cask Data, Inc.
1+
# Copyright © 2021-2023 Cask Data, Inc.
22
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
33
# use this file except in compliance with the License. You may obtain a copy of
44
# the License at
@@ -12,6 +12,7 @@
1212
# This workflow will build a Java project with Maven
1313
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
1414
# Note: Any changes to this workflow would be used only after merging into develop
15+
1516
name: Build e2e tests
1617

1718
on:
@@ -40,7 +41,8 @@ jobs:
4041
)
4142
strategy:
4243
matrix:
43-
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute]
44+
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute,
45+
bigquerymultitable]
4446
fail-fast: false
4547
steps:
4648
# Pinned 1.0.0 version
@@ -68,24 +70,49 @@ jobs:
6870
key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
6971
restore-keys: |
7072
${{ runner.os }}-maven-${{ github.workflow }}
73+
74+
- name: Get Secrets from GCP Secret Manager
75+
id: secrets
76+
uses: 'google-github-actions/get-secretmanager-secrets@v0'
77+
with:
78+
secrets: |-
79+
MYSQL_HOST:cdapio-github-builds/MYSQL_HOST
80+
MYSQL_USERNAME:cdapio-github-builds/MYSQL_USERNAME
81+
MYSQL_PASSWORD:cdapio-github-builds/MYSQL_PASSWORD
82+
MYSQL_PORT:cdapio-github-builds/MYSQL_PORT
83+
7184
- name: Run required e2e tests
7285
if: github.event_name != 'workflow_dispatch' && github.event_name != 'push' && steps.filter.outputs.e2e-test == 'false'
7386
run: python3 e2e/src/main/scripts/run_e2e_test.py --testRunner **/${{ matrix.tests }}/**/TestRunnerRequired.java
87+
env:
88+
MYSQL_HOST: ${{ steps.secrets.outputs.MYSQL_HOST }}
89+
MYSQL_USERNAME: ${{ steps.secrets.outputs.MYSQL_USERNAME }}
90+
MYSQL_PASSWORD: ${{ steps.secrets.outputs.MYSQL_PASSWORD }}
91+
MYSQL_PORT: ${{ steps.secrets.outputs.MYSQL_PORT }}
92+
7493
- name: Run all e2e tests
7594
if: github.event_name == 'workflow_dispatch' || github.event_name == 'push' || steps.filter.outputs.e2e-test == 'true'
7695
run: python3 e2e/src/main/scripts/run_e2e_test.py --testRunner **/${{ matrix.tests }}/**/TestRunner.java
96+
env:
97+
MYSQL_HOST: ${{ steps.secrets.outputs.MYSQL_HOST }}
98+
MYSQL_USERNAME: ${{ steps.secrets.outputs.MYSQL_USERNAME }}
99+
MYSQL_PASSWORD: ${{ steps.secrets.outputs.MYSQL_PASSWORD }}
100+
MYSQL_PORT: ${{ steps.secrets.outputs.MYSQL_PORT }}
101+
77102
- name: Upload report
78103
uses: actions/upload-artifact@v3
79104
if: always()
80105
with:
81106
name: Cucumber report - ${{ matrix.tests }}
82107
path: ./plugin/target/cucumber-reports
108+
83109
- name: Upload debug files
84110
uses: actions/upload-artifact@v3
85111
if: always()
86112
with:
87113
name: Debug files - ${{ matrix.tests }}
88114
path: ./**/target/e2e-debug
115+
89116
- name: Upload files to GCS
90117
uses: google-github-actions/upload-cloud-storage@v0
91118
if: always()

pom.xml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
<?xml version="1.0" encoding="UTF-8"?>
2+
23
<!--
34
~ Copyright © 2016-2019 Cask Data, Inc.
45
~
@@ -1157,6 +1158,7 @@
11571158
<execution>
11581159
<goals>
11591160
<goal>integration-test</goal>
1161+
<goal>verify</goal>
11601162
</goals>
11611163
</execution>
11621164
</executions>
@@ -1204,6 +1206,12 @@
12041206
<version>0.3.0-SNAPSHOT</version>
12051207
<scope>test</scope>
12061208
</dependency>
1209+
<dependency>
1210+
<groupId>mysql</groupId>
1211+
<artifactId>mysql-connector-java</artifactId>
1212+
<version>8.0.19</version>
1213+
<scope>test</scope>
1214+
</dependency>
12071215
<dependency>
12081216
<groupId>ch.qos.logback</groupId>
12091217
<artifactId>logback-classic</artifactId>
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
# Copyright © 2023 Cask Data, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4+
# use this file except in compliance with the License. You may obtain a copy of
5+
# the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12+
# License for the specific language governing permissions and limitations under
13+
# the License.
14+
15+
@BQMT_SINK
16+
Feature: BigQueryMultiTable sink - Validate BigQueryMultiTable sink plugin error scenarios
17+
18+
@BQMT_Required
19+
Scenario Outline: Verify BigQueryMultiTable Sink properties validation errors for mandatory fields
20+
Given Open Datafusion Project to configure pipeline
21+
When Expand Plugin group in the LHS plugins list: "Sink"
22+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
23+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
24+
Then Click on the Validate button
25+
Then Validate mandatory property error for "<property>"
26+
Examples:
27+
| property |
28+
| dataset |
29+
30+
Scenario:Verify BQMT Sink properties validation errors for incorrect value of chunk size
31+
Given Open Datafusion Project to configure pipeline
32+
When Expand Plugin group in the LHS plugins list: "Sink"
33+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
34+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
35+
And Enter input plugin property: "referenceName" with value: "Reference"
36+
And Replace input plugin property: "project" with value: "projectId"
37+
And Enter input plugin property: "dataset" with value: "dataset"
38+
Then Override Service account details if set in environment variables
39+
Then Enter input plugin property: "gcsChunkSize" with value: "bqmtInvalidChunkSize"
40+
Then Click on the Validate button
41+
Then Verify that the Plugin Property: "gcsChunkSize" is displaying an in-line error message: "errorMessageIncorrectBQMTChunkSize"
42+
43+
@BQMT_Required
44+
Scenario:Verify BQMT Sink properties validation errors for incorrect dataset
45+
Given Open Datafusion Project to configure pipeline
46+
When Expand Plugin group in the LHS plugins list: "Sink"
47+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
48+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
49+
And Enter input plugin property: "referenceName" with value: "Reference"
50+
And Replace input plugin property: "project" with value: "projectId"
51+
Then Override Service account details if set in environment variables
52+
Then Enter input plugin property: "dataset" with value: "bqmtInvalidSinkDataset"
53+
Then Click on the Validate button
54+
Then Verify that the Plugin Property: "dataset" is displaying an in-line error message: "errorMessageIncorrectBQMTDataset"
55+
56+
Scenario:Verify BQMT Sink properties validation errors for incorrect reference name
57+
Given Open Datafusion Project to configure pipeline
58+
When Expand Plugin group in the LHS plugins list: "Sink"
59+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
60+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
61+
And Replace input plugin property: "project" with value: "projectId"
62+
And Enter input plugin property: "dataset" with value: "dataset"
63+
Then Override Service account details if set in environment variables
64+
Then Enter input plugin property: "referenceName" with value: "bqmtInvalidSinkReferenceName"
65+
Then Click on the Validate button
66+
Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageIncorrectBQMTReferenceName"
67+
68+
Scenario:Verify BQMT Sink properties validation errors for incorrect value of temporary bucket name
69+
Given Open Datafusion Project to configure pipeline
70+
When Expand Plugin group in the LHS plugins list: "Sink"
71+
When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
72+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
73+
And Enter input plugin property: "referenceName" with value: "Reference"
74+
And Replace input plugin property: "project" with value: "projectId"
75+
And Enter input plugin property: "dataset" with value: "dataset"
76+
Then Override Service account details if set in environment variables
77+
Then Enter input plugin property: "bucket" with value: "bqmtInvalidTemporaryBucket"
78+
Then Click on the Validate button
79+
Then Verify that the Plugin Property: "bucket" is displaying an in-line error message: "errorMessageIncorrectBQMTBucketName"
Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
# Copyright © 2023 Cask Data, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4+
# use this file except in compliance with the License. You may obtain a copy of
5+
# the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12+
# License for the specific language governing permissions and limitations under
13+
# the License.
14+
15+
@BQMT_SINK
16+
Feature: BigQueryMultiTable sink -Verification of Multiple Database Tables to BigQueryMultiTable successful data transfer using macros
17+
18+
@MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required @PLUGIN-1669
19+
Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with all datatypes using macros
20+
Given Open Datafusion Project to configure pipeline
21+
When Expand Plugin group in the LHS plugins list: "Source"
22+
When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
23+
When Expand Plugin group in the LHS plugins list: "Sink"
24+
When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
25+
Then Navigate to the properties page of plugin: "Multiple Database Tables"
26+
Then Replace input plugin property: "referenceName" with value: "ref"
27+
Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
28+
Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
29+
Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
30+
Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
31+
And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
32+
Then Click on the Add Button of the property: "sqlStatements" with value:
33+
| selectQuery|
34+
Then Validate "Multiple Database Tables" plugin properties
35+
And Close the Plugin Properties page
36+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
37+
And Enter input plugin property: "referenceName" with value: "Reference"
38+
Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
39+
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
40+
Then Click on the Macro button of Property: "serviceAccountType" and set the value to: "serviceAccountType"
41+
Then Click on the Macro button of Property: "serviceAccountFilePath" and set the value to: "serviceAccount"
42+
Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
43+
Then Click plugin property: "truncateTable"
44+
Then Click plugin property: "allowSchema"
45+
Then Validate "BigQuery Multi Table" plugin properties
46+
And Close the Plugin Properties page
47+
Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
48+
Then Save the pipeline
49+
Then Preview and run the pipeline
50+
Then Enter runtime argument value "projectId" for key "bqProjectId"
51+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
52+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
53+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
54+
Then Enter runtime argument value "dataset" for key "bqDataset"
55+
Then Run the preview of pipeline with runtime arguments
56+
Then Wait till pipeline preview is in running state
57+
Then Open and capture pipeline preview logs
58+
Then Verify the preview run status of pipeline in the logs is "succeeded"
59+
Then Close the pipeline logs
60+
Then Close the preview
61+
Then Deploy the pipeline
62+
Then Run the Pipeline in Runtime
63+
Then Enter runtime argument value "projectId" for key "bqProjectId"
64+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
65+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
66+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
67+
Then Enter runtime argument value "dataset" for key "bqDataset"
68+
Then Run the Pipeline in Runtime with runtime arguments
69+
Then Wait till pipeline is in running state
70+
Then Open and capture logs
71+
Then Verify the pipeline status is "Succeeded"
72+
Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table
73+
74+
@MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required @PLUGIN-1669
75+
Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with split field using macros
76+
Given Open Datafusion Project to configure pipeline
77+
When Expand Plugin group in the LHS plugins list: "Source"
78+
When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
79+
When Expand Plugin group in the LHS plugins list: "Sink"
80+
When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
81+
Then Navigate to the properties page of plugin: "Multiple Database Tables"
82+
Then Replace input plugin property: "referenceName" with value: "ref"
83+
Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
84+
Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
85+
Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
86+
Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
87+
And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
88+
Then Click on the Add Button of the property: "sqlStatements" with value:
89+
| selectQuery|
90+
Then Validate "Multiple Database Tables" plugin properties
91+
And Close the Plugin Properties page
92+
Then Navigate to the properties page of plugin: "BigQuery Multi Table"
93+
And Enter input plugin property: "referenceName" with value: "Reference"
94+
Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
95+
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
96+
Then Click on the Macro button of Property: "serviceAccountType" and set the value to: "serviceAccountType"
97+
Then Click on the Macro button of Property: "serviceAccountFilePath" and set the value to: "serviceAccount"
98+
Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
99+
Then Click on the Macro button of Property: "SplitField" and set the value to: "bqmtSplitField"
100+
Then Click plugin property: "truncateTable"
101+
Then Click plugin property: "allowSchema"
102+
Then Validate "BigQuery Multi Table" plugin properties
103+
And Close the Plugin Properties page
104+
Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
105+
Then Save the pipeline
106+
Then Preview and run the pipeline
107+
Then Enter runtime argument value "projectId" for key "bqProjectId"
108+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
109+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
110+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
111+
Then Enter runtime argument value "dataset" for key "bqDataset"
112+
Then Enter runtime argument value "splitField" for key "bqmtSplitField"
113+
Then Run the preview of pipeline with runtime arguments
114+
Then Wait till pipeline preview is in running state
115+
Then Open and capture pipeline preview logs
116+
Then Verify the preview run status of pipeline in the logs is "succeeded"
117+
Then Close the pipeline logs
118+
Then Close the preview
119+
Then Deploy the pipeline
120+
Then Run the Pipeline in Runtime
121+
Then Enter runtime argument value "projectId" for key "bqProjectId"
122+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
123+
Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
124+
Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
125+
Then Enter runtime argument value "dataset" for key "bqDataset"
126+
Then Enter runtime argument value "splitField" for key "bqmtSplitField"
127+
Then Run the Pipeline in Runtime with runtime arguments
128+
Then Wait till pipeline is in running state
129+
Then Open and capture logs
130+
Then Verify the pipeline status is "Succeeded"
131+
Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table

0 commit comments

Comments
 (0)