File tree 1 file changed +9
-3
lines changed
1 file changed +9
-3
lines changed Original file line number Diff line number Diff line change @@ -1012,11 +1012,14 @@ def generate_har(self):
1012
1012
# Upload the HAR to GCS for "successful" tests
1013
1013
uploaded = False
1014
1014
har_filename = os .path .basename (har_file )
1015
+ needs_upload = self .job ['success' ]
1016
+ if not needs_upload and 'metadata' in self .job and 'retry_count' in self .job ['metadata' ] and self .job ['metadata' ]['retry_count' ] >= 2 :
1017
+ needs_upload = True
1015
1018
if 'gcs_har_upload' in self .job and \
1016
1019
'bucket' in self .job ['gcs_har_upload' ] and \
1017
1020
'path' in self .job ['gcs_har_upload' ] and \
1018
1021
os .path .exists (har_file ) and \
1019
- self . job [ 'success' ] :
1022
+ needs_upload :
1020
1023
try :
1021
1024
from google .cloud import storage
1022
1025
client = storage .Client ()
@@ -1032,8 +1035,11 @@ def generate_har(self):
1032
1035
except Exception :
1033
1036
logging .exception ('Error uploading HAR to Cloud Storage' )
1034
1037
1035
- if uploaded and 'bq_datastore' in self .job :
1036
- self .upload_bigquery (har , har_filename , self .job ['bq_datastore' ])
1038
+ if uploaded :
1039
+ if self .job ['success' ] and 'bq_datastore' in self .job :
1040
+ self .upload_bigquery (har , har_filename , self .job ['bq_datastore' ])
1041
+ elif 'bq_datastore_failures' in self .job :
1042
+ self .upload_bigquery (har , har_filename , self .job ['bq_datastore_failures' ])
1037
1043
1038
1044
# Delete the local HAR file if it was only supposed to be uploaded
1039
1045
if not self .options .har :
You can’t perform that action at this time.
0 commit comments