From b68a9866fc92f192db32540ef0321e2d47a811c2 Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Thu, 30 Jan 2025 15:08:21 +0530 Subject: [PATCH] e2e tests PostgreSql Sink --- .../postgresql/sink/PostgresqlRunTime.feature | 149 ++++++++++++++++++ .../sink/PostgresqlRunTimeMacro.feature | 48 ++++++ .../resources/pluginParameters.properties | 3 + 3 files changed, 200 insertions(+) diff --git a/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTime.feature b/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTime.feature index 0ad323979..261c5fb61 100644 --- a/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTime.feature +++ b/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTime.feature @@ -144,3 +144,152 @@ Feature: PostgreSQL - Verify data transfer from BigQuery source to PostgreSQL si Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table + + @POSTGRESQL_SOURCE_TEST @Postgresql_Required @POSTGRESQL_SINK_TEST @Plugin-1526 + Scenario: To verify data is getting transferred from PostgreSQL source to PostgreSQL sink with Advanced operations Upsert for table key + Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "PostgreSQL" and "PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Select radio button plugin property: "operationName" with value: "upsert" + Then Click on the Add Button of the property: "relationTableKey" with value: + | PostgreSQLTableKey | + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for PostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table + + @POSTGRESQL_SOURCE_TEST @Postgresql_Required @POSTGRESQL_SINK_TEST @Plugin-1526 + Scenario: To verify data is getting transferred from PostgreSQL source to PostgreSQL sink with Advanced operations Update for table key + Given Open Datafusion Project to configure pipeline + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "PostgreSQL" and "PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Select radio button plugin property: "operationName" with value: "update" + Then Click on the Add Button of the property: "relationTableKey" with value: + | PostgreSQLTableKey | + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for PostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table + + @POSTGRESQL_SOURCE_TEST @POSTGRESQL_SINK_TEST @Postgresql_Required @CONNECTION @Plugin-1526 + Scenario: To verify data is getting transferred from PostgreSQL source to PostgreSQL sink successfully using Connection + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "PostgreSQL" and "PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "PostgreSQL2" + And Click plugin property: "switch-useConnection" + And Click on the Browse Connections button + And Click on the Add Connection button + Then Click plugin property: "connector-PostgreSQL" + And Enter input plugin property: "name" with value: "connection.name" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Test Connection button + And Verify the test connection is successful + Then Click on the Create button + Then Select connection: "connection.name" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for PostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table diff --git a/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTimeMacro.feature b/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTimeMacro.feature index 9d0971610..bfa08c09d 100644 --- a/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTimeMacro.feature +++ b/postgresql-plugin/src/e2e-test/features/postgresql/sink/PostgresqlRunTimeMacro.feature @@ -136,3 +136,51 @@ Feature: PostgreSQL - Verify data transfer to PostgreSQL sink with macro argumen Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @Plugin-1526 + Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink using connection arguments and operations as macro + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "PostgreSQLConnectionArguments" + Then Click on the Macro button of Property: "operationName" and set the value to: "PostgreSQLOperationName" + Then Click on the Macro button of Property: "tableName" and set the value to: "PostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "PostgreSQLSchemaName" + Then Validate "PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "PostgreSQLConnectionArgumentsList" for key "PostgreSQLConnectionArguments" + Then Enter runtime argument value "PostgreSQLOperationName" for key "PostgreSQLOperationName" + Then Enter runtime argument value "targetTable" for key "PostgreSQLTableName" + Then Enter runtime argument value "schema" for key "PostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table diff --git a/postgresql-plugin/src/e2e-test/resources/pluginParameters.properties b/postgresql-plugin/src/e2e-test/resources/pluginParameters.properties index eeccdb18e..bad2510be 100644 --- a/postgresql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/postgresql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -74,6 +74,9 @@ invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table invalidBoundingQueryValue=select; invalidTable=table #POSTGRESQL Valid Properties +PostgreSQLConnectionArgumentsList=fetchsize=1000 +PostgreSQLOperationName=insert +PostgreSQLTableKey=col2 connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] connectionTimeout=150 numberOfSplits=2