Skip to content

Commit d75c94e

Browse files
committed
make aws keys optional (for use IAM S3 role)
1 parent d0a1b48 commit d75c94e

File tree

1 file changed

+4
-10
lines changed

1 file changed

+4
-10
lines changed

lib/refile/s3.rb

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@ module Refile
88
#
99
# @example
1010
# backend = Refile::Backend::S3.new(
11-
# access_key_id: "xyz",
12-
# secret_access_key: "abcd1234",
1311
# region: "sa-east-1",
1412
# bucket: "my-bucket",
1513
# prefix: "files"
@@ -19,12 +17,10 @@ module Refile
1917
class S3
2018
extend Refile::BackendMacros
2119

22-
attr_reader :access_key_id, :max_size
20+
attr_reader :max_size
2321

2422
# Sets up an S3 backend with the given credentials.
2523
#
26-
# @param [String] access_key_id
27-
# @param [String] secret_access_key
2824
# @param [String] region The AWS region to connect to
2925
# @param [String] bucket The name of the bucket where files will be stored
3026
# @param [String] prefix A prefix to add to all files. Prefixes on S3 are kind of like folders.
@@ -33,10 +29,8 @@ class S3
3329
# @param [Hash] s3_options Additional options to initialize S3 with
3430
# @see http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/Core/Configuration.html
3531
# @see http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/S3.html
36-
def initialize(access_key_id:, secret_access_key:, region:, bucket:, max_size: nil, prefix: nil, hasher: Refile::RandomHasher.new, **s3_options)
37-
@access_key_id = access_key_id
38-
@secret_access_key = secret_access_key
39-
@s3_options = { access_key_id: access_key_id, secret_access_key: secret_access_key, region: region }.merge s3_options
32+
def initialize(region:, bucket:, max_size: nil, prefix: nil, hasher: Refile::RandomHasher.new, **s3_options)
33+
@s3_options = { region: region }.merge s3_options
4034
@s3 = Aws::S3::Resource.new @s3_options
4135
@bucket_name = bucket
4236
@bucket = @s3.bucket @bucket_name
@@ -52,7 +46,7 @@ def initialize(access_key_id:, secret_access_key:, region:, bucket:, max_size: n
5246
verify_uploadable def upload(uploadable)
5347
id = @hasher.hash(uploadable)
5448

55-
if uploadable.is_a?(Refile::File) and uploadable.backend.is_a?(S3) and uploadable.backend.access_key_id == access_key_id
49+
if uploadable.is_a?(Refile::File) and uploadable.backend.is_a?(S3)
5650
object(id).copy_from(copy_source: [@bucket_name, uploadable.backend.object(uploadable.id).key].join("/"))
5751
else
5852
object(id).put(body: uploadable, content_length: uploadable.size)

0 commit comments

Comments
 (0)