Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion benchmark/benchmarks/benchmark_config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def get_prefetch_config(self) -> Dict[str, Any]:
prefetch_cfg = self.cfg.benchmarks.prefetch
return {
'max_memory_target': getattr(prefetch_cfg, 'max_memory_target', None),
'max_read_window_size': getattr(prefetch_cfg, 'max_read_window_size', None),
}

def get_crt_config(self) -> Dict[str, Any]:
Expand All @@ -83,5 +84,5 @@ def get_crt_config(self) -> Dict[str, Any]:
def get_client_config(self) -> Dict[str, Any]:
client_cfg = self.cfg.benchmarks.client
return {
'read_window_size': getattr(client_cfg, 'read_window_size', 2147483648), # Reaslitic default value 8M/2G?
'max_read_window_size': getattr(client_cfg, 'max_read_window_size', 2147483648), # Realistic default value of 2GiB
}
2 changes: 1 addition & 1 deletion benchmark/benchmarks/client_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def run_benchmark(self) -> Dict[str, Any]:

if self.backpressure:
subprocess_args.append("--enable-backpressure")
if (initial_window_size := self.client_config['read_window_size']) is not None:
if (initial_window_size := self.client_config['max_read_window_size']) is not None:
subprocess_args.extend(["--initial-window-size", str(initial_window_size)])

if (run_time := self.common_config['run_time']) is not None:
Expand Down
3 changes: 3 additions & 0 deletions benchmark/benchmarks/prefetch_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ def run_benchmark(self) -> None:
if not self.common_config['download_checksums']:
prefetch_env["EXPERIMENTAL_MOUNTPOINT_NO_DOWNLOAD_INTEGRITY_VALIDATION"] = "ON"

if self.prefetch_config['max_read_window_size'] is not None:
prefetch_env["UNSTABLE_MOUNTPOINT_MAX_PREFETCH_WINDOW_SIZE"] = str(self.prefetch_config['max_read_window_size'])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For prefetcher, we could configure initial and max window size, do we want to configure both? Would it be a like for like comparision with client backpressure benchmark if we just configure max window size?


subprocess_env = os.environ.copy() | prefetch_env
log.debug("Subprocess env: %s", subprocess_env)

Expand Down
3 changes: 2 additions & 1 deletion benchmark/conf/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ benchmarks:

prefetch:
max_memory_target: !!null
max_read_window_size: !!null

crt:
crt_benchmarks_path: !!null
Expand All @@ -62,7 +63,7 @@ benchmarks:
# None

client_backpressure:
read_window_size: !!null #2147483648
max_read_window_size: !!null #2147483648
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this a constant read window size or can it go up to this value?



hydra:
Expand Down
2 changes: 1 addition & 1 deletion mountpoint-s3-client/examples/client_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ struct CliArgs {
#[arg(
long,
help = "Initial read window size in bytes, used to dictate how far ahead we request data from S3",
default_value = "0"
default_value = "2147483648"
Copy link
Contributor

@sahityadg sahityadg Aug 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My understanding is this is a fixed read window size in client benchmarks. So should we rename the parameter to read_window_size?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: can we also document here somehow that this number corresponds to 2GiB?

)]
initial_window_size: Option<usize>,
#[arg(long, help = "Output file to write the results to", value_name = "OUTPUT_FILE")]
Expand Down
10 changes: 5 additions & 5 deletions mountpoint-s3-fs/examples/prefetch_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ pub struct CliArgs {
#[arg(
long,
help = "Size of read requests requests to the prefetcher",
default_value_t = 128 * 1024,
default_value_t = 256 * 1024,
value_name = "BYTES",
)]
read_size: usize,
Expand Down Expand Up @@ -233,9 +233,9 @@ fn main() -> anyhow::Result<()> {
let received_size = received_bytes.load(Ordering::SeqCst);
total_bytes += received_size;
println!(
"{iteration}: received {received_size} bytes in {:.2}s: {:.2} Gib/s",
"{iteration}: received {received_size} bytes in {:.2}s: {:.2} Gb/s",
elapsed.as_secs_f64(),
(received_size as f64) / elapsed.as_secs_f64() / (1024 * 1024 * 1024 / 8) as f64
(received_size as f64) / elapsed.as_secs_f64() / (1000 * 1000 * 1000 / 8) as f64
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: can we make this value more readable? not sure if best to use a clearly named variable or a function to reuse in line 251 too

);
iter_results.push(json!({
"iteration": iteration,
Expand All @@ -246,9 +246,9 @@ fn main() -> anyhow::Result<()> {
}
let total_elapsed = total_start.elapsed();
println!(
"\nTotal: {iteration} iterations, {total_bytes} bytes in {:.2}s: {:.2} Gib/s",
"\nTotal: {iteration} iterations, {total_bytes} bytes in {:.2}s: {:.2} Gb/s",
total_elapsed.as_secs_f64(),
(total_bytes as f64) / total_elapsed.as_secs_f64() / (1024 * 1024 * 1024 / 8) as f64
(total_bytes as f64) / total_elapsed.as_secs_f64() / (1000 * 1000 * 1000 / 8) as f64
);

if let Some(output_path) = args.output_file {
Expand Down
Loading