Skip to content

Commit 4a086e7

Browse files
authored
Merge pull request #1335 from cloudsufi/gcs_multipart_policy
e2e test added for gcs multipart upload
2 parents 34e3956 + 57e8143 commit 4a086e7

File tree

3 files changed

+61
-1
lines changed

3 files changed

+61
-1
lines changed

pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1210,6 +1210,16 @@
12101210
<version>1.2.8</version>
12111211
<scope>runtime</scope>
12121212
</dependency>
1213+
<dependency>
1214+
<groupId>com.google.apis</groupId>
1215+
<artifactId>google-api-services-storage</artifactId>
1216+
<version>v1-rev20220604-1.32.1</version>
1217+
</dependency>
1218+
<dependency>
1219+
<groupId>com.google.cloud</groupId>
1220+
<artifactId>google-cloud-storage</artifactId>
1221+
<version>2.8.0</version>
1222+
</dependency>
12131223
</dependencies>
12141224

12151225
</profile>

src/e2e-test/features/bigquery/source/BigQueryToGCS.feature

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,3 +116,43 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans
116116
Then Open and capture logs
117117
Then Verify the pipeline status is "Succeeded"
118118
Then Verify data is transferred to target GCS bucket
119+
120+
@BQ_SOURCE_DATATYPE_TEST @GCS_SINK_MULTI_PART_UPLOAD
121+
Scenario:Validate successful records transfer from BigQuery to GCS with bucket having delete multi part upload policy enabled
122+
Given Open Datafusion Project to configure pipeline
123+
When Expand Plugin group in the LHS plugins list: "Source"
124+
When Select plugin: "BigQuery" from the plugins list as: "Source"
125+
When Expand Plugin group in the LHS plugins list: "Sink"
126+
When Select plugin: "GCS" from the plugins list as: "Sink"
127+
Then Navigate to the properties page of plugin: "BigQuery"
128+
And Enter input plugin property: "referenceName" with value: "Reference"
129+
And Replace input plugin property: "project" with value: "projectId"
130+
And Enter input plugin property: "datasetProject" with value: "datasetprojectId"
131+
And Replace input plugin property: "dataset" with value: "dataset"
132+
Then Override Service account details if set in environment variables
133+
And Enter input plugin property: "table" with value: "bqSourceTable"
134+
Then Click on the Get Schema button
135+
Then Validate output schema with expectedSchema "bqSourceSchemaDatatype"
136+
Then Validate "BigQuery" plugin properties
137+
Then Close the BigQuery properties
138+
Then Navigate to the properties page of plugin: "GCS"
139+
Then Enter input plugin property: "referenceName" with value: "sourceRef"
140+
Then Replace input plugin property: "project" with value: "projectId"
141+
Then Enter GCS sink property path
142+
Then Select dropdown plugin property: "select-format" with option value: "json"
143+
Then Validate "GCS" plugin properties
144+
Then Close the Plugin Properties page
145+
Then Connect source as "BigQuery" and sink as "GCS" to establish connection
146+
Then Save the pipeline
147+
Then Preview and run the pipeline
148+
Then Wait till pipeline preview is in running state
149+
Then Open and capture pipeline preview logs
150+
Then Verify the preview run status of pipeline in the logs is "succeeded"
151+
Then Close the pipeline logs
152+
Then Close the preview
153+
Then Deploy the pipeline
154+
Then Run the Pipeline in Runtime
155+
Then Wait till pipeline is in running state
156+
Then Open and capture logs
157+
Then Verify the pipeline status is "Succeeded"
158+
Then Validate the values of records transferred to GCS bucket is equal to the values from source BigQuery table

src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ public static void createTargetGCSBucketWithCSVFile() throws IOException, URISyn
211211
BeforeActions.scenario.write("GCS target bucket name - " + gcsTargetBucketName);
212212
}
213213

214-
@After(order = 1, value = "@GCS_SINK_TEST or @GCS_SINK_EXISTING_BUCKET_TEST")
214+
@After(order = 1, value = "@GCS_SINK_TEST or @GCS_SINK_EXISTING_BUCKET_TEST or @GCS_SINK_MULTI_PART_UPLOAD")
215215
public static void deleteTargetBucketWithFile() {
216216
deleteGCSBucket(gcsTargetBucketName);
217217
PluginPropertyUtils.removePluginProp("gcsTargetBucketName");
@@ -1030,4 +1030,14 @@ public static void createSinkBQExistingDatatypeTable() throws IOException, Inter
10301030
PluginPropertyUtils.addPluginProp(" bqTargetTable", bqTargetTable);
10311031
BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " updated successfully");
10321032
}
1033+
private static String createGCSBucketLifeCycle() throws IOException, URISyntaxException {
1034+
String bucketName = StorageClient.createBucketwithLifeCycle("00000000-e2e-" + UUID.randomUUID(), 30).getName();
1035+
PluginPropertyUtils.addPluginProp("gcsTargetBucketName", bucketName);
1036+
return bucketName;
1037+
}
1038+
1039+
@Before(order = 1, value = "@GCS_SINK_MULTI_PART_UPLOAD")
1040+
public static void createBucketWithLifeCycle() throws IOException, URISyntaxException {
1041+
gcsTargetBucketName = createGCSBucketLifeCycle();
1042+
BeforeActions.scenario.write("GCS target bucket name - " + gcsTargetBucketName); }
10331043
}

0 commit comments

Comments
 (0)