diff --git a/src/e2e-test/features/spanner/sink/BigQueryToSpanner_WithMacro.feature b/src/e2e-test/features/spanner/sink/BigQueryToSpanner_WithMacro.feature new file mode 100644 index 0000000000..b9eade9038 --- /dev/null +++ b/src/e2e-test/features/spanner/sink/BigQueryToSpanner_WithMacro.feature @@ -0,0 +1,69 @@ +@Spanner_Sink @SPANNER_TEST +Feature: Spanner Sink - Verification of BigQuery to Spanner successful data transfer with macro arguments + + @BQ_SOURCE_TEST @SPANNER_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to Spanner with macro arguments + Given Open Datafusion Project to configure pipeline + When Source is BigQuery + When Sink is Spanner + Then Open BigQuery source properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Open Spanner sink properties + Then Enter Spanner property reference name + Then Enter Spanner property "projectId" as macro argument "spannerProjectId" + Then Enter Spanner property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter Spanner property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter Spanner property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter Spanner property "instanceId" as macro argument "spannerInstanceId" + Then Enter Spanner property "databaseName" as macro argument "spannerDatabaseName" + Then Enter Spanner property "table" as macro argument "spannerTablename" + Then Enter Spanner property "primaryKey" as macro argument "spannerSinkPrimaryKey" + Then Enter Spanner cmek property "encryptionKeyName" as macro argument "cmekSpanner" if cmek is enabled + Then Validate "Spanner" plugin properties + Then Close the Spanner properties + Then Connect source as "BigQuery" and sink as "Spanner" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Sink Table Name key "spannerTablename" + Then Enter runtime argument value "spannerSinkPrimaryKeyBQ" for key "spannerSinkPrimaryKey" + Then Enter runtime argument value "cmekSpanner" for Spanner cmek property key "cmekSpanner" if Spanner cmek is enabled + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Sink Table Name key "spannerTablename" + Then Enter runtime argument value "spannerSinkPrimaryKeyBQ" for key "spannerSinkPrimaryKey" + Then Enter runtime argument value "cmekSpanner" for Spanner cmek property key "cmekSpanner" if Spanner cmek is enabled + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate records transferred to target spanner table with record counts of BigQuery table diff --git a/src/e2e-test/features/spanner/sink/GCSToSpanner_WithMacro.feature b/src/e2e-test/features/spanner/sink/GCSToSpanner_WithMacro.feature new file mode 100644 index 0000000000..8d2b59bea6 --- /dev/null +++ b/src/e2e-test/features/spanner/sink/GCSToSpanner_WithMacro.feature @@ -0,0 +1,71 @@ +@Spanner_Sink @SPANNER_TEST +Feature: Spanner Sink - Verification of GCS to Spanner successful data transfer with macro arguments + + @GCS_CSV_TEST @SPANNER_SINK_TEST + Scenario:Validate successful records transfer from GCS to Spanner with macro arguments + Given Open Datafusion Project to configure pipeline + When Source is GCS + When Sink is Spanner + Then Open GCS source properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter GCS property "path" as macro argument "gcsSourcePath" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Enter GCS source property "skipHeader" as macro argument "gcsSkipHeader" + Then Enter GCS source property output schema "outputSchema" as macro argument "gcsOutputSchema" + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Open Spanner sink properties + Then Enter Spanner property reference name + Then Enter Spanner property "projectId" as macro argument "spannerProjectId" + Then Enter Spanner property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter Spanner property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter Spanner property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter Spanner property "instanceId" as macro argument "spannerInstanceId" + Then Enter Spanner property "databaseName" as macro argument "spannerDatabaseName" + Then Enter Spanner property "table" as macro argument "spannerTablename" + Then Enter Spanner property "primaryKey" as macro argument "spannerSinkPrimaryKey" + Then Enter Spanner cmek property "encryptionKeyName" as macro argument "cmekSpanner" if cmek is enabled + Then Validate "Spanner" plugin properties + Then Close the Spanner properties + Then Connect source as "GCS" and sink as "Spanner" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "gcsCsvFile" for GCS source property path key "gcsSourcePath" + Then Enter runtime argument value "gcsSkipHeaderTrue" for key "gcsSkipHeader" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "gcsCSVFileOutputSchema" for key "gcsOutputSchema" + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Sink Table Name key "spannerTablename" + Then Enter runtime argument value "spannerSinkPrimaryKeyGCS" for key "spannerSinkPrimaryKey" + Then Enter runtime argument value "cmekSpanner" for Spanner cmek property key "cmekSpanner" if Spanner cmek is enabled + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "gcsCsvFile" for GCS source property path key "gcsSourcePath" + Then Enter runtime argument value "gcsSkipHeaderTrue" for key "gcsSkipHeader" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "gcsCSVFileOutputSchema" for key "gcsOutputSchema" + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Sink Table Name key "spannerTablename" + Then Enter runtime argument value "spannerSinkPrimaryKeyGCS" for key "spannerSinkPrimaryKey" + Then Enter runtime argument value "cmekSpanner" for Spanner cmek property key "cmekSpanner" if Spanner cmek is enabled + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" diff --git a/src/e2e-test/features/spanner/sink/SpannerToSpanner.feature b/src/e2e-test/features/spanner/sink/SpannerToSpanner.feature new file mode 100644 index 0000000000..ba6f296fea --- /dev/null +++ b/src/e2e-test/features/spanner/sink/SpannerToSpanner.feature @@ -0,0 +1,39 @@ +@Spanner_Sink @SPANNER_TEST +Feature: Spanner Sink - Verification of Spanner to Spanner data transfer + + @SPANNER_SINK_TEST + Scenario: To verify data is getting transferred successfully from Spanner to Spanner with all supported datatype + Given Open Datafusion Project to configure pipeline + When Source is Spanner + When Sink is Spanner + Then Connect source as "Spanner" and sink as "Spanner" to establish connection + Then Open Spanner source properties + Then Enter Spanner property reference name + Then Enter Spanner property projectId "projectId" + Then Override Service account details if set in environment variables + Then Enter Spanner property InstanceId + Then Enter Spanner source property DatabaseName + Then Enter Spanner source property TableName + Then Validate output schema with expectedSchema "spannerSourceSchema" + Then Validate "Spanner" plugin properties + Then Close the Spanner properties + Then Open Spanner sink properties + Then Enter Spanner property reference name + Then Enter Spanner property projectId "projectId" + Then Override Service account details if set in environment variables + Then Enter Spanner property InstanceId + Then Enter Spanner sink property DatabaseName + Then Enter Spanner sink property TableName + Then Enter Spanner sink property primary key "spannerSinkPrimaryKeySpanner" + Then Validate "Spanner" plugin properties + Then Close the Spanner properties + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate records transferred to target spanner table with record counts of source spanner table diff --git a/src/e2e-test/features/spanner/source/SpannerToBigQuery_WithMacro.feature b/src/e2e-test/features/spanner/source/SpannerToBigQuery_WithMacro.feature new file mode 100644 index 0000000000..3f9da09843 --- /dev/null +++ b/src/e2e-test/features/spanner/source/SpannerToBigQuery_WithMacro.feature @@ -0,0 +1,69 @@ +@Spanner_Source @SPANNER_TEST +Feature: Spanner Source - Verification of Spanner to BigQuery successful data transfer with macro arguments + + @BQ_SINK_TEST + Scenario:Validate successful records transfer from Spanner to BigQuery with macro arguments + Given Open Datafusion Project to configure pipeline + When Source is Spanner + When Sink is BigQuery + Then Open Spanner source properties + Then Enter Spanner property reference name + Then Enter Spanner property "projectId" as macro argument "spannerProjectId" + Then Enter Spanner property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter Spanner property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter Spanner property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter Spanner property "instanceId" as macro argument "spannerInstanceId" + Then Enter Spanner property "databaseName" as macro argument "spannerDatabaseName" + Then Enter Spanner property "table" as macro argument "spannerTablename" + Then Validate "Spanner" plugin properties + Then Close the Spanner properties + Then Open BigQuery sink properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqTargetTable" + Then Enter BigQuery sink property "truncateTable" as macro argument "bqTruncateTable" + Then Enter BigQuery sink property "updateTableSchema" as macro argument "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Connect source as "Spanner" and sink as "BigQuery" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Source Table Name key "spannerTablename" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery sink table name key "bqTargetTable" + Then Enter runtime argument value "bqTruncateTableTrue" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchemaTrue" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Source Table Name key "spannerTablename" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery sink table name key "bqTargetTable" + Then Enter runtime argument value "bqTruncateTableTrue" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchemaTrue" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate records transferred to target BigQuery table with record counts of spanner table diff --git a/src/e2e-test/features/spanner/source/SpannerToGCS_WithMacro.feature b/src/e2e-test/features/spanner/source/SpannerToGCS_WithMacro.feature new file mode 100644 index 0000000000..feced09e9f --- /dev/null +++ b/src/e2e-test/features/spanner/source/SpannerToGCS_WithMacro.feature @@ -0,0 +1,64 @@ +@Spanner_Source @SPANNER_TEST +Feature: Spanner Source - Verification of Spanner to GCS successful data transfer with macro arguments + + @GCS_SINK_TEST + Scenario:Validate successful records transfer from Spanner to GCS with macro arguments + Given Open Datafusion Project to configure pipeline + When Source is Spanner + When Sink is GCS + Then Open Spanner source properties + Then Enter Spanner property reference name + Then Enter Spanner property "projectId" as macro argument "spannerProjectId" + Then Enter Spanner property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter Spanner property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter Spanner property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter Spanner property "instanceId" as macro argument "spannerInstanceId" + Then Enter Spanner property "databaseName" as macro argument "spannerDatabaseName" + Then Enter BigQuery property "table" as macro argument "spannerTablename" + Then Validate "Spanner" plugin properties + Then Close the Spanner properties + Then Open GCS sink properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter GCS property "path" as macro argument "gcsSinkPath" + Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Connect source as "Spanner" and sink as "GCS" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Source Table Name key "spannerTablename" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "avroFormat" for key "gcsFormat" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "success" + Then Click on preview data for GCS sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "spannerProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value for Spanner Instance ID key "spannerInstanceId" + Then Enter runtime argument value for Spanner Database Name key "spannerDatabaseName" + Then Enter runtime argument value for Spanner Source Table Name key "spannerTablename" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "avroFormat" for key "gcsFormat" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify data is transferred to target GCS bucket diff --git a/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerBase.java b/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerBase.java index e1d4a35b31..02cdd7563a 100644 --- a/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerBase.java +++ b/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerBase.java @@ -15,13 +15,14 @@ */ package io.cdap.plugin.spanner.stepsdesign; +import io.cdap.e2e.pages.locators.CdfStudioLocators; import io.cdap.e2e.utils.BigQueryClient; -import io.cdap.e2e.utils.CdfHelper; import io.cdap.e2e.utils.ElementHelper; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import io.cdap.plugin.spanner.actions.SpannerActions; import io.cdap.plugin.spanner.locators.SpannerLocators; +import io.cdap.plugin.utils.E2EHelper; import io.cdap.plugin.utils.SpannerClient; import io.cucumber.java.en.Then; import org.junit.Assert; @@ -33,7 +34,7 @@ /** * Spanner plugin related common test step definitions. */ -public class SpannerBase implements CdfHelper { +public class SpannerBase implements E2EHelper { @Then("Enter Spanner property reference name") public void enterSpannerPropertyReferenceName() { @@ -67,6 +68,11 @@ public void validateRecordsTransferredToTargetBigQueryTableWithRecordCountsOfSpa Assert.assertEquals(spannerTableRecordCount, bqTargetRecordCount); } + @Then("Enter Spanner property {string} as macro argument {string}") + public void enterSpannerPropertyAsMacroArgument(String pluginProperty, String macroArgument) { + enterPropertyAsMacroArgument(pluginProperty, macroArgument); + } + @Then("Validate records transferred to target BigQuery table with record counts of spanner Import Query {string}") public void validateRecordsTransferredToTargetBigQueryTableWithRecordCountsOfSpannerImportQuery(String query) throws IOException, InterruptedException { @@ -152,4 +158,14 @@ public void validateRecordsTransferredToTargetSpannerTableWithRecordCountsOfSour Assert.assertEquals(spannerSourceTableRecordCount, spannerTargetTableRecordCount); } + + @Then("Enter runtime argument value for Spanner Instance ID key {string}") + public void enterRuntimeArgumentValueForSpannerInstanceIDKey(String runtimeArgumentKey) { + ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey), TestSetupHooks.spannerInstance); + } + + @Then("Enter runtime argument value for Spanner Database Name key {string}") + public void enterRuntimeArgumentValueForSpannerDatabaseNameKey(String runtimeArgumentKey) { + ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey), TestSetupHooks.spannerDatabase); + } } diff --git a/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSink.java b/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSink.java index f24f12afaf..b7fafb5b22 100644 --- a/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSink.java +++ b/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSink.java @@ -16,10 +16,12 @@ package io.cdap.plugin.spanner.stepsdesign; import io.cdap.e2e.pages.actions.CdfStudioActions; -import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.pages.locators.CdfStudioLocators; +import io.cdap.e2e.utils.ElementHelper; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import io.cdap.plugin.spanner.actions.SpannerActions; +import io.cdap.plugin.utils.E2EHelper; import io.cdap.plugin.utils.SpannerClient; import io.cucumber.java.en.Then; import io.cucumber.java.en.When; @@ -29,7 +31,7 @@ /** * Spanner sink plugin related test step definitions. */ -public class SpannerSink implements CdfHelper { +public class SpannerSink implements E2EHelper { @When("Sink is Spanner") public void sinkIsSpanner() { @@ -62,6 +64,21 @@ public void enterSpannerSinkPropertyPrimaryKey(String primaryKey) { SpannerActions.enterPrimaryKey(PluginPropertyUtils.pluginProp(primaryKey)); } + @Then("Enter runtime argument value for Spanner Sink Table Name key {string}") + public void enterRuntimeArgumentValueForSpannerSinkTableNameKey(String runtimeArgumentKey) { + ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey), TestSetupHooks.spannerTargetTable); + } + + @Then("Enter Spanner cmek property {string} as macro argument {string} if cmek is enabled") + public void enterSpannerCmekPropertyAsMacroArgumentIfCmekIsEnabled(String pluginProperty, String macroArgument) { + String cmekSpanner = PluginPropertyUtils.pluginProp("cmekSpanner"); + if (cmekSpanner != null) { + enterPropertyAsMacroArgument(pluginProperty, macroArgument); + return; + } + BeforeActions.scenario.write("CMEK not enabled"); + } + @Then("Enter Spanner sink property encryption key name {string} if cmek is enabled") public void enterSpannerSinkPropertyEncryptionKeyNameStringIfCmekIsEnabled(String cmek) { String cmekSpanner = PluginPropertyUtils.pluginProp(cmek); @@ -73,6 +90,18 @@ public void enterSpannerSinkPropertyEncryptionKeyNameStringIfCmekIsEnabled(Strin } } + @Then("Enter runtime argument value {string} for Spanner cmek property key {string} if Spanner cmek is enabled") + public void enterRuntimeArgumentValueForSpannerCmekPropertyKeyIfSpannerCmekIsEnabled + (String value, String runtimeArgumentKey) { + String cmekSpanner = PluginPropertyUtils.pluginProp(value); + if (cmekSpanner != null) { + ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey), cmekSpanner); + BeforeActions.scenario.write("Spanner encryption key name - " + cmekSpanner); + return; + } + BeforeActions.scenario.write("CMEK not enabled"); + } + @Then("Validate the cmek key {string} of target Spanner database if cmek is enabled") public void validateTheCmekKeyOfTargetSpannerDatabaseIfCmekIsEnabled(String cmek) { String cmekSpanner = PluginPropertyUtils.pluginProp(cmek); diff --git a/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSource.java b/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSource.java index 5554dc78bc..720fa20328 100644 --- a/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSource.java +++ b/src/e2e-test/java/io/cdap/plugin/spanner/stepsdesign/SpannerSource.java @@ -15,7 +15,9 @@ */ package io.cdap.plugin.spanner.stepsdesign; +import io.cdap.e2e.pages.locators.CdfStudioLocators; import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.ElementHelper; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import io.cdap.plugin.spanner.actions.SpannerActions; @@ -47,6 +49,11 @@ public void enterSpannerSourcePropertyTableName() { SpannerActions.enterTableName(TestSetupHooks.spannerSourceTable); } + @Then("Enter runtime argument value for Spanner Source Table Name key {string}") + public void enterRuntimeArgumentValueForSpannerSourceTableNameKey(String runtimeArgumentKey) { + ElementHelper.sendKeys(CdfStudioLocators.runtimeArgsValue(runtimeArgumentKey), TestSetupHooks.spannerSourceTable); + } + @Then("Enter the Spanner source property Import Query {string}") public void enterTheSpannerSourcePropertyImportQuery(String query) { SpannerActions.enterImportQuery(PluginPropertyUtils.pluginProp(query)); diff --git a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java index dbb7add2e3..a8b3bee469 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java @@ -34,7 +34,10 @@ public enum CdfPluginPropertyLocator { OUTPUT_SCHEMA_MACRO_INPUT("Output Schema-macro-input"), GCS_DELETE_OBJECTS_TO_DELETE("paths"), GCS_MOVE_SOURCE_PATH("sourcePath"), - GCS_MOVE_DESTINATION_PATH("destPath"); + GCS_MOVE_DESTINATION_PATH("destPath"), + INSTANCE("instance"), + DATABASE("database"), + KEYS("keys"); public String pluginProperty; CdfPluginPropertyLocator(String property) { @@ -70,6 +73,9 @@ public enum CdfPluginPropertyLocator { .put("objectsToDelete", CdfPluginPropertyLocator.GCS_DELETE_OBJECTS_TO_DELETE) .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) + .put("instanceId", CdfPluginPropertyLocator.INSTANCE) + .put("databaseName", CdfPluginPropertyLocator.DATABASE) + .put("primaryKey", CdfPluginPropertyLocator.KEYS) .build(); } diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index e43b02c8a4..eb9579f5c2 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -3,6 +3,7 @@ dataset=test_automation serviceAccountType=filePath serviceAccount=auto-detect csvFormat=csv +avroFormat=avro ## GCS-PLUGIN-PROPERTIES-START gcsMandatoryProperties=referenceName, path, format