We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent d4465c8 commit 7fbcbcaCopy full SHA for 7fbcbca
torchtune/training/_distributed.py
@@ -36,17 +36,13 @@
36
from torchtune.modules.peft import get_adapter_state_dict
37
from torchtune.utils import get_device, get_logger
38
from torchtune.utils._logging import deprecated
39
-from torchtune.utils._version import torch_version_ge
40
41
_log: logging.Logger = get_logger()
42
43
44
_valid_distributed_single_node_nnodes = ["1:1", "1"]
45
46
-torch_version = torch.__version__
47
-_DISTRIBUTED_STATE_DICT_API_IS_AVAILABLE = (
48
- "dev" not in torch_version and torch_version_ge("2.6.0")
49
-) or ("dev" in torch_version and torch_version.split("dev")[1] >= "20241220")
+_DISTRIBUTED_STATE_DICT_API_IS_AVAILABLE = False
50
51
52
def _get_sharding_strategy(strategy: str) -> ShardingStrategy:
0 commit comments