Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit ef6b22a

Browse files
committedMay 22, 2025·
disable amp by default on cpu
1 parent 6b00363 commit ef6b22a

File tree

1 file changed

+3
-0
lines changed

1 file changed

+3
-0
lines changed
 

‎benchmarks/torchbench_model.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -373,6 +373,9 @@ def is_accelerator_tpu(self):
373373
return self.benchmark_experiment.accelerator == "tpu"
374374

375375
def use_amp(self):
376+
# AMP is only supported on cuda and tpu, not on cpu.
377+
if self.benchmark_experiment.accelerator == "cpu":
378+
return False
376379
return self.is_training() or self.model_name in config(
377380
).dtype.force_amp_for_fp16_bf16_models
378381

0 commit comments

Comments
 (0)
Please sign in to comment.