Skip to content

Commit 0ae7154

Browse files
committed
e2e tests PostgreSql Sink
1 parent 4e91070 commit 0ae7154

File tree

4 files changed

+173
-0
lines changed

4 files changed

+173
-0
lines changed

postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTime.feature

+99
Original file line numberDiff line numberDiff line change
@@ -144,3 +144,102 @@ Feature: PostgreSQL - Verify data transfer from BigQuery source to PostgreSQL si
144144
Then Open and capture logs
145145
Then Verify the pipeline status is "Succeeded"
146146
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table
147+
148+
@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @Plugin-1526
149+
Scenario Outline: To verify data is getting transferred from BigQuery source to PostgreSQL sink with Advanced operations Upsert and Update for table key
150+
Given Open Datafusion Project to configure pipeline
151+
When Expand Plugin group in the LHS plugins list: "Source"
152+
When Select plugin: "BigQuery" from the plugins list as: "Source"
153+
When Expand Plugin group in the LHS plugins list: "Sink"
154+
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
155+
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
156+
Then Navigate to the properties page of plugin: "BigQuery"
157+
Then Replace input plugin property: "project" with value: "projectId"
158+
Then Enter input plugin property: "datasetProject" with value: "projectId"
159+
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
160+
Then Enter input plugin property: "dataset" with value: "dataset"
161+
Then Enter input plugin property: "table" with value: "bqSourceTable"
162+
Then Click on the Get Schema button
163+
Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema"
164+
Then Validate "BigQuery" plugin properties
165+
Then Close the Plugin Properties page
166+
Then Navigate to the properties page of plugin: "PostgreSQL"
167+
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
168+
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
169+
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
170+
Then Replace input plugin property: "database" with value: "databaseName"
171+
Then Replace input plugin property: "tableName" with value: "targetTable"
172+
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
173+
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
174+
Then Enter input plugin property: "referenceName" with value: "targetRef"
175+
Then Replace input plugin property: "dbSchemaName" with value: "schema"
176+
Then Select radio button plugin property: "operationName" with value: "<options>"
177+
Then Click on the Add Button of the property: "relationTableKey" with value:
178+
| PostgreSQLTableKey |
179+
Then Validate "PostgreSQL" plugin properties
180+
Then Close the Plugin Properties page
181+
Then Save the pipeline
182+
Then Preview and run the pipeline
183+
Then Verify the preview of pipeline is "success"
184+
Then Click on preview data for PostgreSQL sink
185+
Then Close the preview data
186+
Then Deploy the pipeline
187+
Then Run the Pipeline in Runtime
188+
Then Wait till pipeline is in running state
189+
Then Open and capture logs
190+
Then Verify the pipeline status is "Succeeded"
191+
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table
192+
Examples:
193+
| options |
194+
| upsert |
195+
| update |
196+
197+
@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @CONNECTION @Plugin-1526
198+
Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink successfully using Connection
199+
Given Open Datafusion Project to configure pipeline
200+
When Expand Plugin group in the LHS plugins list: "Source"
201+
When Select plugin: "BigQuery" from the plugins list as: "Source"
202+
When Expand Plugin group in the LHS plugins list: "Sink"
203+
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
204+
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
205+
Then Navigate to the properties page of plugin: "BigQuery"
206+
Then Replace input plugin property: "project" with value: "projectId"
207+
Then Enter input plugin property: "datasetProject" with value: "projectId"
208+
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
209+
Then Enter input plugin property: "dataset" with value: "dataset"
210+
Then Enter input plugin property: "table" with value: "bqSourceTable"
211+
Then Click on the Get Schema button
212+
Then Validate "BigQuery" plugin properties
213+
Then Close the Plugin Properties page
214+
Then Navigate to the properties page of plugin: "PostgreSQL"
215+
And Click plugin property: "switch-useConnection"
216+
And Click on the Browse Connections button
217+
And Click on the Add Connection button
218+
Then Click plugin property: "connector-PostgreSQL"
219+
And Enter input plugin property: "name" with value: "connection.name"
220+
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
221+
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
222+
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
223+
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
224+
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
225+
Then Replace input plugin property: "database" with value: "databaseName"
226+
Then Click on the Test Connection button
227+
And Verify the test connection is successful
228+
Then Click on the Create button
229+
Then Select connection: "connection.name"
230+
Then Enter input plugin property: "referenceName" with value: "targetRef"
231+
Then Replace input plugin property: "tableName" with value: "targetTable"
232+
Then Replace input plugin property: "dbSchemaName" with value: "schema"
233+
Then Validate "PostgreSQL" plugin properties
234+
Then Close the Plugin Properties page
235+
Then Save the pipeline
236+
Then Preview and run the pipeline
237+
Then Verify the preview of pipeline is "success"
238+
Then Click on preview data for PostgreSQL sink
239+
Then Close the preview data
240+
Then Deploy the pipeline
241+
Then Run the Pipeline in Runtime
242+
Then Wait till pipeline is in running state
243+
Then Open and capture logs
244+
Then Verify the pipeline status is "Succeeded"
245+
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table

postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTimeMacro.feature

+48
Original file line numberDiff line numberDiff line change
@@ -136,3 +136,51 @@ Feature: PostgreSQL - Verify data transfer to PostgreSQL sink with macro argumen
136136
Then Verify the pipeline status is "Succeeded"
137137
Then Close the pipeline logs
138138
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table
139+
140+
@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @PLUGIN-1628 @Plugin-1526
141+
Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink using connection arguments and operations as macro
142+
Given Open Datafusion Project to configure pipeline
143+
When Expand Plugin group in the LHS plugins list: "Source"
144+
When Select plugin: "BigQuery" from the plugins list as: "Source"
145+
When Expand Plugin group in the LHS plugins list: "Sink"
146+
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
147+
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
148+
Then Navigate to the properties page of plugin: "BigQuery"
149+
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
150+
Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
151+
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
152+
Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
153+
Then Click on the Macro button of Property: "table" and set the value to: "bqTable"
154+
Then Validate "BigQuery" plugin properties
155+
Then Close the Plugin Properties page
156+
Then Navigate to the properties page of plugin: "PostgreSQL"
157+
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
158+
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
159+
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
160+
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
161+
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
162+
Then Enter input plugin property: "referenceName" with value: "targetRef"
163+
Then Replace input plugin property: "database" with value: "databaseName"
164+
Then Click on the Macro button of Property: "connectionArguments" and set the value to: "PostgreSQLConnectionArguments"
165+
Then Click on the Macro button of Property: "operationName" and set the value to: "PostgreSQLOperationName"
166+
Then Click on the Macro button of Property: "tableName" and set the value to: "PostgreSQLTableName"
167+
Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "PostgreSQLSchemaName"
168+
Then Validate "PostgreSQL" plugin properties
169+
Then Close the Plugin Properties page
170+
Then Save the pipeline
171+
Then Deploy the pipeline
172+
Then Run the Pipeline in Runtime
173+
Then Enter runtime argument value "projectId" for key "bqProjectId"
174+
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
175+
Then Enter runtime argument value "dataset" for key "bqDataset"
176+
Then Enter runtime argument value "bqSourceTable" for key "bqTable"
177+
Then Enter runtime argument value "PostgreSQLConnectionArgumentsList" for key "PostgreSQLConnectionArgumentsList"
178+
Then Enter runtime argument value "PostgreSQLOperationName" for key "PostgreSQLOperationName"
179+
Then Enter runtime argument value "targetTable" for key "PostgreSQLTableName"
180+
Then Enter runtime argument value "schema" for key "PostgreSQLSchemaName"
181+
Then Run the Pipeline in Runtime with runtime arguments
182+
Then Wait till pipeline is in running state
183+
Then Open and capture logs
184+
Then Verify the pipeline status is "Succeeded"
185+
Then Close the pipeline logs
186+
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table

postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java

+23
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717
package io.cdap.plugin.common.stepsdesign;
1818

1919
import com.google.cloud.bigquery.BigQueryException;
20+
import io.cdap.e2e.pages.actions.CdfConnectionActions;
21+
import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions;
2022
import io.cdap.e2e.utils.BigQueryClient;
2123
import io.cdap.e2e.utils.PluginPropertyUtils;
2224
import io.cdap.plugin.PostgresqlClient;
@@ -159,4 +161,25 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile
159161
PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable);
160162
BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully");
161163
}
164+
165+
@Before(order = 1, value = "@CONNECTION")
166+
public static void setNewConnectionName() {
167+
String connectionName = "PostgreSql" + RandomStringUtils.randomAlphanumeric(10);
168+
PluginPropertyUtils.addPluginProp("connection.name", connectionName);
169+
BeforeActions.scenario.write("New Connection name: " + connectionName);
170+
}
171+
172+
private static void deleteConnection(String connectionType, String connectionName) throws IOException {
173+
CdfConnectionActions.openWranglerConnectionsPage();
174+
CdfConnectionActions.expandConnections(connectionType);
175+
CdfConnectionActions.openConnectionActionMenu(connectionType, connectionName);
176+
CdfConnectionActions.selectConnectionAction(connectionType, connectionName, "Delete");
177+
CdfPluginPropertiesActions.clickPluginPropertyButton("Delete");
178+
}
179+
180+
@After(order = 1, value = "@CONNECTION")
181+
public static void deleteBQConnection() throws IOException {
182+
deleteConnection("PostgreSql", "connection.name");
183+
PluginPropertyUtils.removePluginProp("connection.name");
184+
}
162185
}

postgresql-plugin/src/e2e-test/resources/pluginParameters.properties

+3
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,9 @@ invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table
7272
invalidBoundingQueryValue=select;
7373
invalidTable=table
7474
#POSTGRESQL Valid Properties
75+
PostgreSQLConnectionArgumentsList=fetchsize=1000
76+
PostgreSQLOperationName=INSERT
77+
PostgreSQLTableKey=col2
7578
connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}]
7679
connectionTimeout=150
7780
numberOfSplits=2

0 commit comments

Comments
 (0)