Skip to content

Commit e632559

Browse files
committed
added support for providing content type for bytes file uploaded
1 parent 8f48ba0 commit e632559

File tree

2 files changed

+16
-14
lines changed

2 files changed

+16
-14
lines changed

osbot_aws/aws/s3/S3.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -290,8 +290,8 @@ def file_copy(self, bucket_source, key_source, bucket_destination, key_destinati
290290
kwargs_file_copy['ContentType'] = content_type
291291
return self.client().copy_object(**kwargs_file_copy)
292292

293-
def file_create_from_bytes(self, file_bytes, bucket, key, metadata=None):
294-
return self.file_upload_from_bytes(file_body=file_bytes, bucket=bucket, key=key, metadata=metadata)
293+
def file_create_from_bytes(self, file_bytes, bucket, key, metadata=None, content_type=None):
294+
return self.file_upload_from_bytes(file_body=file_bytes, bucket=bucket, key=key, metadata=metadata, content_type=content_type)
295295

296296
def file_create_from_string_as_gzip(self, file_contents, bucket, key):
297297
import gzip
@@ -381,11 +381,11 @@ def file_upload(self,file , bucket, folder):
381381
self.file_upload_to_key(file, bucket, key) # upload file
382382
return key # return path to file uploaded (if succeeded)
383383

384-
def file_upload_from_bytes(self, file_body, bucket, key, metadata=None):
385-
if type(metadata) is not dict:
386-
metadata = {}
384+
def file_upload_from_bytes(self, file_body, bucket, key, metadata=None, content_type=None):
385+
if type(metadata) is not dict: metadata = {}
386+
if content_type is None : content_type = S3_DEFAULT_FILE_CONTENT_TYPE
387387
metadata['created_by'] = 'osbot_aws.aws.s3.S3.file_upload_from_bytes'
388-
self.s3().put_object(Body=file_body, Bucket=bucket, Key=key, Metadata=metadata) # todo: see if there are use cases that need the version_Id, since at moment we only return True (which basically represents a lack of an error/exception)
388+
self.s3().put_object(Body=file_body, Bucket=bucket, Key=key, Metadata=metadata, ContentType=content_type) # todo: see if there are use cases that need the version_Id, since at moment we only return True (which basically represents a lack of an error/exception)
389389
return True
390390

391391
def file_upload_to_key(self, file, bucket, key, set_content_type=True):

osbot_aws/aws/s3/S3__DB_Base.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -128,22 +128,24 @@ def s3_folder_files__all(self, folder='', full_path=True):
128128
def s3_folder_list(self, folder='', return_full_path=False):
129129
return self.s3().folder_list(s3_bucket=self.s3_bucket(), parent_folder=folder, return_full_path=return_full_path)
130130

131-
def s3_save_bytes(self,data, s3_key, metadata=None ):
132-
kwargs = dict(bucket = self.s3_bucket(),
133-
key = s3_key ,
134-
file_body = data ,
135-
metadata = metadata)
131+
def s3_save_bytes(self,data, s3_key, metadata=None , content_type=None):
132+
kwargs = dict(bucket = self.s3_bucket(),
133+
key = s3_key ,
134+
file_body = data ,
135+
metadata = metadata ,
136+
content_type = content_type )
136137
return self.s3().file_upload_from_bytes(**kwargs)
137138

138-
def s3_save_data(self, data, s3_key, metadata=None):
139+
def s3_save_data(self, data, s3_key, metadata=None, content_type=None):
139140
from osbot_utils.utils.Json import json_dumps, json_to_gz
140141

141142
if self.save_as_gz:
142143
data = json_to_gz(data)
143144
kwargs = dict(bucket=self.s3_bucket(), key=s3_key)
144145
if type(data) == bytes:
145-
kwargs['file_body'] = data
146-
kwargs['metadata' ] = metadata
146+
kwargs['file_body' ] = data
147+
kwargs['metadata' ] = metadata
148+
kwargs['content_type'] = content_type
147149
return self.s3().file_upload_from_bytes(**kwargs)
148150
else:
149151
data_as_str = json_dumps(data)

0 commit comments

Comments
 (0)