1111from globus_sdk import TransferAPIError , TransferClient , TransferData
1212from globus_sdk .services .transfer .response .iterable import IterableTransferResponse
1313from six .moves .urllib .parse import urlparse
14- from .utils import ts_utc
1514
1615from .settings import logger
16+ from .utils import ts_utc
1717
1818hpss_endpoint_map = {
1919 "ALCF" : "de463ec4-6d04-11e5-ba46-22000b92c6ec" ,
@@ -158,9 +158,10 @@ def file_exists(name: str) -> bool:
158158 return False
159159
160160
161- def globus_transfer (
161+ # C901 'globus_transfer' is too complex (20)
162+ def globus_transfer ( # noqa: C901
162163 remote_ep : str , remote_path : str , name : str , transfer_type : str , non_blocking : bool
163- ): # noqa: C901
164+ ):
164165 global transfer_client
165166 global local_endpoint
166167 global remote_endpoint
@@ -223,10 +224,14 @@ def globus_transfer(
223224 # NOTE: How we behave here depends upon whether we want to support mutliple active transfers.
224225 # Presently, we do not, except inadvertantly (if status == PENDING)
225226 if prev_task_status == "ACTIVE" :
226- logger .info (f"{ ts_utc ()} : Previous task_id { task_id } Still Active. Returning." )
227+ logger .info (
228+ f"{ ts_utc ()} : Previous task_id { task_id } Still Active. Returning."
229+ )
227230 return "ACTIVE"
228231 elif prev_task_status == "SUCCEEDED" :
229- logger .info (f"{ ts_utc ()} : Previous task_id { task_id } status = SUCCEEDED. Continuing." )
232+ logger .info (
233+ f"{ ts_utc ()} : Previous task_id { task_id } status = SUCCEEDED. Continuing."
234+ )
230235 src_ep = task ["source_endpoint_id" ]
231236 dst_ep = task ["destination_endpoint_id" ]
232237 label = task ["label" ]
@@ -237,22 +242,26 @@ def globus_transfer(
237242 )
238243 )
239244 else :
240- logger .error (f"{ ts_utc ()} : Previous task_id { task_id } status = { prev_task_status } . Continuing." )
245+ logger .error (
246+ f"{ ts_utc ()} : Previous task_id { task_id } status = { prev_task_status } . Continuing."
247+ )
241248
242249 # DEBUG: review accumulated items in TransferData
243250 logger .info (f"{ ts_utc ()} : TransferData: accumulated items:" )
244251 attribs = transfer_data .__dict__
245- for item in attribs [' data' ][ ' DATA' ]:
246- if item [' DATA_TYPE' ] == "transfer_item" :
252+ for item in attribs [" data" ][ " DATA" ]:
253+ if item [" DATA_TYPE" ] == "transfer_item" :
247254 print (f" source item: { item ['source_path' ]} " )
248-
255+
249256 # SUBMIT new transfer here
250257 logger .info (f"{ ts_utc ()} : DIVING: Submit Transfer for { transfer_data ['label' ]} " )
251258 task = submit_transfer_with_checks (transfer_data )
252259 task_id = task .get ("task_id" )
253260 # NOTE: This log message is misleading. If we have accumulated multiple tar files for transfer,
254261 # the "lable" given here refers only to the LAST tarfile in the TransferData list.
255- logger .info (f"{ ts_utc ()} : SURFACE Submit Transfer returned new task_id = { task_id } for label { transfer_data ['label' ]} " )
262+ logger .info (
263+ f"{ ts_utc ()} : SURFACE Submit Transfer returned new task_id = { task_id } for label { transfer_data ['label' ]} "
264+ )
256265
257266 transfer_data = None
258267 except TransferAPIError as e :
@@ -272,7 +281,9 @@ def globus_transfer(
272281 # test for blocking on new task_id
273282 task_status = "UNKNOWN"
274283 if not non_blocking :
275- task_status = globus_block_wait (task_id = task_id , wait_timeout = 7200 , polling_interval = 10 , max_retries = 5 )
284+ task_status = globus_block_wait (
285+ task_id = task_id , wait_timeout = 7200 , polling_interval = 10 , max_retries = 5
286+ )
276287 else :
277288 logger .info (f"{ ts_utc ()} : NO BLOCKING (task_wait) for task_id { task_id } " )
278289
@@ -285,35 +296,47 @@ def globus_transfer(
285296 return task_status
286297
287298
288- def globus_block_wait (task_id : str , wait_timeout : int , polling_interval : int , max_retries : int ):
299+ def globus_block_wait (
300+ task_id : str , wait_timeout : int , polling_interval : int , max_retries : int
301+ ):
289302 global transfer_client
290303
291304 # poll every "polling_interval" seconds to speed up small transfers. Report every 2 hours, stop waiting aftert 5*2 = 10 hours
292- logger .info (f"{ ts_utc ()} : BLOCKING START: invoking task_wait for task_id = { task_id } " )
305+ logger .info (
306+ f"{ ts_utc ()} : BLOCKING START: invoking task_wait for task_id = { task_id } "
307+ )
293308 task_status = "UNKNOWN"
294309 retry_count = 0
295310 while retry_count < max_retries :
296311 try :
297312 # Wait for the task to complete
298- transfer_client .task_wait (task_id , timeout = wait_timeout , polling_interval = 10 )
299- except GlobusHTTPError as e :
300- logger .error (f"Exception: { e } " )
313+ transfer_client .task_wait (
314+ task_id , timeout = wait_timeout , polling_interval = 10
315+ )
316+ # except GlobusHTTPError as e:
317+ # logger.error(f"Exception: {e}")
301318 except Exception as e :
302319 logger .error (f"Unexpected Exception: { e } " )
303320 else :
304321 curr_task = transfer_client .get_task (task_id )
305- task_status = curr_task [' status' ]
322+ task_status = curr_task [" status" ]
306323 if task_status == "SUCCEEDED" :
307324 break
308325 finally :
309326 retry_count += 1
310- logger .info (f"{ ts_utc ()} : BLOCKING retry_count = { retry_count } of { max_retries } of timeout { wait_timeout } seconds" )
327+ logger .info (
328+ f"{ ts_utc ()} : BLOCKING retry_count = { retry_count } of { max_retries } of timeout { wait_timeout } seconds"
329+ )
311330
312331 if retry_count == max_retries :
313- logger .info (f"{ ts_utc ()} : BLOCKING EXHAUSTED { max_retries } of timeout { wait_timeout } seconds" )
332+ logger .info (
333+ f"{ ts_utc ()} : BLOCKING EXHAUSTED { max_retries } of timeout { wait_timeout } seconds"
334+ )
314335 task_status = "EXHAUSTED_TIMEOUT_RETRIES"
315336
316- logger .info (f"{ ts_utc ()} : BLOCKING ENDS: task_id { task_id } returned from task_wait with status { task_status } " )
337+ logger .info (
338+ f"{ ts_utc ()} : BLOCKING ENDS: task_id { task_id } returned from task_wait with status { task_status } "
339+ )
317340
318341 return task_status
319342
0 commit comments