@@ -263,19 +263,23 @@ async def send_log(session, url, data, headers):
263263 return resp .status , resp .url
264264 except aiohttp .ClientResponseError as e :
265265 if e .status == 400 :
266- raise BadRequestException (_format_error (e , "Unexpected payload" ))
266+ raise BadRequestException (
267+ _format_error (e , "Unexpected payload" ))
267268 elif e .status == 403 :
268- raise BadRequestException (_format_error (e , "Review your license key" ))
269+ raise BadRequestException (
270+ _format_error (e , "Review your license key" ))
269271 elif e .status == 404 :
270272 raise BadRequestException (
271273 _format_error (e , "Review the region endpoint" )
272274 )
273275 elif e .status == 429 :
274- logger .error (f"There was a { e .status } error. Reason: { e .message } " )
276+ logger .error (
277+ f"There was a { e .status } error. Reason: { e .message } " )
275278 # Now retry the request
276279 continue
277280 elif e .status == 408 :
278- logger .error (f"There was a { e .status } error. Reason: { e .message } " )
281+ logger .error (
282+ f"There was a { e .status } error. Reason: { e .message } " )
279283 # Now retry the request
280284 continue
281285 elif 400 <= e .status < 500 :
@@ -293,14 +297,14 @@ def create_log_payload_request(data, session):
293297
294298async def _fetch_data_from_s3 (bucket , key , context ):
295299 """
296- Stream data from S3 bucket. Create batches of size MAX_PAYLOAD_SIZE
297- and create async requests from batches
300+ Stream data from S3 bucket. Create batches of size MAX_PAYLOAD_SIZE
301+ and create async requests from batches
298302 """
299- log_file_size = boto3 .resource ("s3" ).Bucket (bucket ).Object (key ).content_length
303+ log_file_size = boto3 .resource ('s3' ).Bucket (
304+ bucket ).Object (key ).content_length
300305 if log_file_size > MAX_FILE_SIZE :
301306 logger .error (
302- "The log file uploaded to S3 is larger than the supported max size of 400MB"
303- )
307+ "The log file uploaded to S3 is larger than the supported max size of 400MB" )
304308 return
305309 BATCH_SIZE_FACTOR = _get_batch_size_factor ()
306310 s3MetaData = {
@@ -367,33 +371,30 @@ def get_s3_event(event):
367371# Lambda handler #
368372####################
369373
370-
371374def lambda_handler (event , context ):
372375 # Get bucket from s3 upload event
373376 _setting_console_logging_level ()
374377 s3_event = get_s3_event (event )
375- bucket_name = s3_event ["bucket" ]["name" ]
376- object_key = urllib .parse .unquote_plus (
378+ bucket = s3_event ["bucket" ]["name" ]
379+ key = urllib .parse .unquote_plus (
377380 s3_event ["object" ]["key" ], encoding = "utf-8" )
378381
379- # Allow user to skip log file using regex pattern set in env variable: S3_IGNORE_PATTERN
382+ # Allow user to skip log file using regex pattern set in env variable: S3_IGNORE_PATTERN
380383 if _is_ignore_log_file (key ):
381384 logger .debug (f"Ignore log file based on S3_IGNORE_PATTERN: { key } " )
382385 return {'statusCode' : 200 , 'message' : 'ignored this log' }
383386
384387 try :
385- asyncio .run (_fetch_data_from_s3 (bucket_name , object_key , context ))
388+ asyncio .run (_fetch_data_from_s3 (bucket , key , context ))
386389 except KeyError as e :
387390 logger .error (e )
388391 logger .error (
389- f"Error getting object { object_key } from bucket { bucket_name } . Make sure they exist and your bucket is in the same region as this function."
390- )
392+ f'Error getting object { key } from bucket { bucket } . Make sure they exist and your bucket is in the same region as this function.' )
391393 raise e
392394 except OSError as e :
393395 logger .error (e )
394396 logger .error (
395- f"Error processing the object { object_key } from bucket { bucket_name } ."
396- )
397+ f"Error processing the object { key } from bucket { bucket } ." )
397398 raise e
398399 except MaxRetriesException as e :
399400 logger .error ("Retry limit reached. Failed to send log entry." )
@@ -405,8 +406,8 @@ def lambda_handler(event, context):
405406 logger .error (f"Error occurred: { e } " )
406407 raise e
407408 else :
408- return {" statusCode" : 200 , " message" : " Uploaded logs to New Relic" }
409+ return {' statusCode' : 200 , ' message' : ' Uploaded logs to New Relic' }
409410
410411
411412if __name__ == "__main__" :
412- lambda_handler ("" , "" )
413+ lambda_handler ('' , '' )
0 commit comments