@@ -22,82 +22,82 @@ steps:
2222 env :
2323 - ' CODECOV_TOKEN=${_CODECOV_TOKEN}'
2424
25- # # 4a. Run integration tests concurrently with unit tests (DSv1, Scala 2.12)
26- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
27- # id: 'integration-tests-2.12'
28- # waitFor: ['unit-tests']
29- # entrypoint: 'bash'
30- # args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.12']
31- # env:
32- # - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
33- # - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
34- # - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
35- # - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
25+ # 4a. Run integration tests concurrently with unit tests (DSv1, Scala 2.12)
26+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
27+ id : ' integration-tests-2.12'
28+ waitFor : ['unit-tests']
29+ entrypoint : ' bash'
30+ args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.12']
31+ env :
32+ - ' GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
33+ - ' TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
34+ - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
35+ - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
3636
37- # # 4b. Run integration tests concurrently with unit tests (DSv1, Scala 2.13)
38- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
39- # id: 'integration-tests-2.13'
40- # waitFor: ['unit-tests']
41- # entrypoint: 'bash'
42- # args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.13']
43- # env:
44- # - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
45- # - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
46- # - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
47- # - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
37+ # 4b. Run integration tests concurrently with unit tests (DSv1, Scala 2.13)
38+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
39+ id : ' integration-tests-2.13'
40+ waitFor : ['unit-tests']
41+ entrypoint : ' bash'
42+ args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.13']
43+ env :
44+ - ' GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
45+ - ' TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
46+ - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
47+ - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
4848
49- # # 4c. Run integration tests concurrently with unit tests (DSv2, Spark 3.1)
50- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
51- # id: 'integration-tests-3.1'
52- # waitFor: ['integration-tests-2.12']
53- # entrypoint: 'bash'
54- # args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.1']
55- # env:
56- # - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
57- # - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
58- # - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
59- # - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
49+ # 4c. Run integration tests concurrently with unit tests (DSv2, Spark 3.1)
50+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
51+ id : ' integration-tests-3.1'
52+ waitFor : ['integration-tests-2.12']
53+ entrypoint : ' bash'
54+ args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.1']
55+ env :
56+ - ' GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
57+ - ' TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
58+ - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
59+ - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
6060
61- # # 4d. Run integration tests concurrently with unit tests (DSv2, Spark 3.2)
62- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
63- # id: 'integration-tests-3.2'
64- # waitFor: ['integration-tests-2.13']
65- # entrypoint: 'bash'
66- # args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.2']
67- # env:
68- # - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
69- # - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
70- # - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
71- # - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
61+ # 4d. Run integration tests concurrently with unit tests (DSv2, Spark 3.2)
62+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
63+ id : ' integration-tests-3.2'
64+ waitFor : ['integration-tests-2.13']
65+ entrypoint : ' bash'
66+ args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.2']
67+ env :
68+ - ' GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
69+ - ' TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
70+ - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
71+ - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
7272
73- # # 4e. Run integration tests concurrently with unit tests (DSv2, Spark 3.3)
74- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
75- # id: 'integration-tests-3.3'
76- # waitFor: ['integration-tests-3.1']
77- # entrypoint: 'bash'
78- # args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.3']
79- # env:
80- # - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
81- # - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
82- # - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
83- # - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
73+ # 4e. Run integration tests concurrently with unit tests (DSv2, Spark 3.3)
74+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
75+ id : ' integration-tests-3.3'
76+ waitFor : ['integration-tests-3.1']
77+ entrypoint : ' bash'
78+ args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.3']
79+ env :
80+ - ' GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
81+ - ' TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
82+ - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
83+ - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
8484
85- # # 4f. Run integration tests concurrently with unit tests (DSv2, Spark 3.4)
86- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
87- # id: 'integration-tests-3.4'
88- # waitFor: ['integration-tests-3.2']
89- # entrypoint: 'bash'
90- # args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.4']
91- # env:
92- # - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
93- # - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
94- # - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
95- # - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
85+ # 4f. Run integration tests concurrently with unit tests (DSv2, Spark 3.4)
86+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
87+ id : ' integration-tests-3.4'
88+ waitFor : ['integration-tests-3.2']
89+ entrypoint : ' bash'
90+ args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.4']
91+ env :
92+ - ' GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
93+ - ' TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
94+ - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
95+ - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
9696
9797 # 4g. Run integration tests concurrently with unit tests (DSv2, Spark 3.5)
9898 - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
9999 id : ' integration-tests-3.5'
100- # waitFor: ['integration-tests-3.3']
100+ waitFor : ['integration-tests-3.3']
101101 entrypoint : ' bash'
102102 args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.5']
103103 env :
@@ -109,7 +109,7 @@ steps:
109109 # 4h. Run integration tests concurrently with unit tests (DSv2, Spark 3.5)
110110 - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
111111 id : ' integration-tests-4.0'
112- # waitFor: ['integration-tests-3.4']
112+ waitFor : ['integration-tests-3.4']
113113 entrypoint : ' bash'
114114 args : ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-4.0']
115115 env :
@@ -130,14 +130,14 @@ steps:
130130 - ' BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
131131 - ' BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
132132
133- # # 5. Upload coverage to CodeCov
134- # - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
135- # id: 'upload-it-to-codecov'
136- # waitFor: ['integration-tests-2.12','integration-tests-2.13','integration-tests-3.1','integration-tests-3.2','integration-tests-3.3', 'integration-tests-3.4', 'integration-tests-3.5', 'integration-tests-4.0', 'integration-tests-4.1']
137- # entrypoint: 'bash'
138- # args: ['/workspace/cloudbuild/presubmit.sh', 'upload-it-to-codecov']
139- # env:
140- # - 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
133+ # 5. Upload coverage to CodeCov
134+ - name : ' gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
135+ id : ' upload-it-to-codecov'
136+ waitFor : ['integration-tests-2.12','integration-tests-2.13','integration-tests-3.1','integration-tests-3.2','integration-tests-3.3', 'integration-tests-3.4', 'integration-tests-3.5', 'integration-tests-4.0', 'integration-tests-4.1']
137+ entrypoint : ' bash'
138+ args : ['/workspace/cloudbuild/presubmit.sh', 'upload-it-to-codecov']
139+ env :
140+ - ' CODECOV_TOKEN=${_CODECOV_TOKEN}'
141141
142142
143143# Tests take around 1 hr 15 mins in general.
0 commit comments