Skip to content

Commit 12f7972

Browse files
committed
Write failed tests to crawl_failed tables
1 parent 7d2327b commit 12f7972

File tree

1 file changed

+9
-3
lines changed

1 file changed

+9
-3
lines changed

internal/process_test.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -1012,11 +1012,14 @@ def generate_har(self):
10121012
# Upload the HAR to GCS for "successful" tests
10131013
uploaded = False
10141014
har_filename = os.path.basename(har_file)
1015+
needs_upload = self.job['success']
1016+
if not needs_upload and 'metadata' in self.job and 'retry_count' in self.job['metadata'] and self.job['metadata']['retry_count'] >= 2:
1017+
needs_upload = True
10151018
if 'gcs_har_upload' in self.job and \
10161019
'bucket' in self.job['gcs_har_upload'] and \
10171020
'path' in self.job['gcs_har_upload'] and \
10181021
os.path.exists(har_file) and \
1019-
self.job['success']:
1022+
needs_upload:
10201023
try:
10211024
from google.cloud import storage
10221025
client = storage.Client()
@@ -1032,8 +1035,11 @@ def generate_har(self):
10321035
except Exception:
10331036
logging.exception('Error uploading HAR to Cloud Storage')
10341037

1035-
if uploaded and 'bq_datastore' in self.job:
1036-
self.upload_bigquery(har, har_filename, self.job['bq_datastore'])
1038+
if uploaded:
1039+
if self.job['success'] and 'bq_datastore' in self.job:
1040+
self.upload_bigquery(har, har_filename, self.job['bq_datastore'])
1041+
elif 'bq_datastore_failures' in self.job:
1042+
self.upload_bigquery(har, har_filename, self.job['bq_datastore_failures'])
10371043

10381044
# Delete the local HAR file if it was only supposed to be uploaded
10391045
if not self.options.har:

0 commit comments

Comments
 (0)