File tree 2 files changed +7
-3
lines changed
2 files changed +7
-3
lines changed Original file line number Diff line number Diff line change 81
81
# so setting it to 0 means each operation will be tried once.
82
82
S3_RETRY_COUNT = from_conf ("S3_RETRY_COUNT" , 7 )
83
83
84
+ # Number of concurrent S3 processes for parallel operations.
85
+ S3_WORKER_COUNT = from_conf ("S3_WORKER_COUNT" , 64 )
86
+
84
87
# Number of retries on *transient* failures (such as SlowDown errors). Note
85
88
# that if after S3_TRANSIENT_RETRY_COUNT times, all operations haven't been done,
86
89
# it will try up to S3_RETRY_COUNT again so the total number of tries can be up to
Original file line number Diff line number Diff line change 44
44
TRANSIENT_RETRY_START_LINE ,
45
45
)
46
46
import metaflow .tracing as tracing
47
-
48
- NUM_WORKERS_DEFAULT = 64
47
+ from metaflow .metaflow_config import (
48
+ S3_WORKER_COUNT ,
49
+ )
49
50
50
51
DOWNLOAD_FILE_THRESHOLD = 2 * TransferConfig ().multipart_threshold
51
52
DOWNLOAD_MAX_CHUNK = 2 * 1024 * 1024 * 1024 - 1
@@ -656,7 +657,7 @@ def common_options(func):
656
657
)
657
658
@click .option (
658
659
"--num-workers" ,
659
- default = NUM_WORKERS_DEFAULT ,
660
+ default = S3_WORKER_COUNT ,
660
661
show_default = True ,
661
662
help = "Number of concurrent connections." ,
662
663
)
You can’t perform that action at this time.
0 commit comments