diff --git a/infra/ansible/config/vars.yaml b/infra/ansible/config/vars.yaml index 9f95fad978b..120a6ae7ae9 100644 --- a/infra/ansible/config/vars.yaml +++ b/infra/ansible/config/vars.yaml @@ -7,7 +7,7 @@ cuda_compute_capabilities: 7.0,7.5,8.0,9.0 llvm_debian_repo: bullseye clang_version: 17 # PyTorch and PyTorch/XLA wheel versions. -package_version: 2.6.0 +package_version: 2.7.0 # If set to true, wheels will be renamed to $WHEEL_NAME-nightly-cp38-cp38-linux_x86_64.whl. nightly_release: false # Whether to preinstall libtpu in the PyTorch/XLA wheel. Ignored for GPU build. diff --git a/infra/tpu-pytorch-releases/artifacts.auto.tfvars b/infra/tpu-pytorch-releases/artifacts.auto.tfvars index 1afbb76c093..c4f338d5b96 100644 --- a/infra/tpu-pytorch-releases/artifacts.auto.tfvars +++ b/infra/tpu-pytorch-releases/artifacts.auto.tfvars @@ -1,4 +1,4 @@ -nightly_package_version = "2.6.0" +nightly_package_version = "2.7.0" # Built once a day from master. nightly_builds = [ @@ -50,6 +50,118 @@ nightly_builds = [ # Built on push to specific tag. versioned_builds = [ + # Remove libtpu from PyPI builds, pre-C++11 ABI builds + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.9" + bundle_libtpu = "0" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.10" + bundle_libtpu = "0" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.11" + bundle_libtpu = "0" + }, + # Remove libtpu from PyPI builds, C++11 ABI builds + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.9" + bundle_libtpu = "0" + cxx11_abi = "1" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.10" + bundle_libtpu = "0" + cxx11_abi = "1" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.11" + bundle_libtpu = "0" + cxx11_abi = "1" + }, + # Bundle libtpu for Kaggle + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1+libtpu" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "tpu" + python_version = "3.10" + bundle_libtpu = "1" + }, + # CUDA 12.4, see PyTorch decision: https://github.com/pytorch/pytorch/issues/138609 + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "cuda" + cuda_version = "12.4" + python_version = "3.9" + }, + { + git_tag = "v2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + accelerator = "cuda" + cuda_version = "12.4" + python_version = "3.10" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "cuda" + cuda_version = "12.4" + python_version = "3.11" + }, + # CUDA 12.6, see PyTorch decision: https://github.com/pytorch/pytorch/issues/138609 + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "cuda" + cuda_version = "12.6" + python_version = "3.9" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "cuda" + cuda_version = "12.6" + python_version = "3.10" + }, + { + git_tag = "v2.6.0-rc1" + package_version = "2.6.0-rc1" + pytorch_git_rev = "v2.6.0-rc1" + accelerator = "cuda" + cuda_version = "12.6" + python_version = "3.11" + }, # Remove libtpu from PyPI builds { git_tag = "v2.5.1" diff --git a/setup.py b/setup.py index d9ad05c81b7..fe2bfbe1108 100644 --- a/setup.py +++ b/setup.py @@ -100,7 +100,7 @@ def get_git_head_sha(base_dir): def get_build_version(xla_git_sha): - version = os.getenv('TORCH_XLA_VERSION', '2.6.0') + version = os.getenv('TORCH_XLA_VERSION', '2.7.0') if build_util.check_env_flag('GIT_VERSIONED_XLA_BUILD', default='TRUE'): try: version += '+git' + xla_git_sha[:7]