Skip to content

Commit 562e838

Browse files
xinhe3XuehaoSun
authored andcommitted
workaround for v1.20 missing attribution
Signed-off-by: Xin He <[email protected]>
1 parent 7feebad commit 562e838

File tree

2 files changed

+3
-1
lines changed

2 files changed

+3
-1
lines changed

neural_compressor/torch/algorithms/fp8_quant/_core/utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def quantize_dynamic_op(config, mod_type):
9191

9292

9393
def set_runtime_scale_patching_mode(scaling_method_name):
94-
if is_runtime_scale_patching():
94+
if is_runtime_scale_patching() and hasattr(htexp, "_set_scale_attributes"):
9595
assert (
9696
scaling_method_name in runtime_scale_patching_supported_methods_list
9797
), f"Scaling method \"{scaling_method_name}\" is not supported for runtime scale patching (graph recompile reduction). Cannot set scaling attributes."

test/3x/torch/algorithms/fp8_quant/unit_tests/test_runtime_scale_patching.py

+2
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import shutil
55
import copy
66
import habana_frameworks.torch.core as htcore
7+
import habana_frameworks.torch.utils.experimental as htexp
78

89
from ..tester import RUNTIME_SCALE_PATCHING_SUPPORTED_METHODS_LIST, SCALE_METHODS_KEY_ERROR, run_with_raised_exception
910
from neural_compressor.torch.algorithms.fp8_quant._core.common import is_runtime_scale_patching
@@ -47,6 +48,7 @@ def temp_directory():
4748
shutil.rmtree(temp_dir)
4849

4950

51+
@pytest.mark.skipif(not hasattr(htexp, "_set_scale_attributes"), reason="scale attributes not supported")
5052
@pytest.mark.parametrize("scale_method", ScaleMethod)
5153
@pytest.mark.parametrize("scale_format", ["SCALAR", "CONST"])
5254
@pytest.mark.parametrize("dynamic_scale_patching", [True, False])

0 commit comments

Comments
 (0)