|
| 1 | +import warnings |
| 2 | + |
| 3 | +import numpy as np |
| 4 | +from absl.testing import parameterized |
| 5 | + |
| 6 | +from keras.src import backend |
| 7 | +from keras.src import layers |
| 8 | +from keras.src import models |
| 9 | +from keras.src import testing |
| 10 | +from keras.src.backend import distribution_lib as backend_dlib |
| 11 | +from keras.src.distribution import distribution_lib |
| 12 | + |
| 13 | + |
| 14 | +class JAXTrainerTest(testing.TestCase, parameterized.TestCase): |
| 15 | + def _skip_if_not_distributed(self): |
| 16 | + if backend.backend() != "jax": |
| 17 | + self.skipTest("Requires JAX backend") |
| 18 | + if len(backend_dlib.list_devices()) < 2: |
| 19 | + self.skipTest("Requires at least 2 devices") |
| 20 | + |
| 21 | + def _make_distribution(self, dist_type): |
| 22 | + if dist_type == "data_parallel": |
| 23 | + return distribution_lib.DataParallel() |
| 24 | + devices = backend_dlib.list_devices() |
| 25 | + n = len(devices) |
| 26 | + mesh = distribution_lib.DeviceMesh((n,), ["model"], devices) |
| 27 | + layout_map = distribution_lib.LayoutMap(mesh) |
| 28 | + layout_map[".*dense.*kernel"] = distribution_lib.TensorLayout( |
| 29 | + [None, "model"] |
| 30 | + ) |
| 31 | + layout_map[".*dense.*bias"] = distribution_lib.TensorLayout(["model"]) |
| 32 | + return distribution_lib.ModelParallel(layout_map=layout_map) |
| 33 | + |
| 34 | + # ---------------------------------------------------------------- |
| 35 | + # Mixed-sharding warning tests |
| 36 | + # ---------------------------------------------------------------- |
| 37 | + @parameterized.named_parameters( |
| 38 | + {"testcase_name": "DataParallel", "dist_type": "data_parallel"}, |
| 39 | + {"testcase_name": "ModelParallel", "dist_type": "model_parallel"}, |
| 40 | + ) |
| 41 | + def test_warns_when_model_built_outside_scope(self, dist_type): |
| 42 | + """Model built outside distribution -> mixed warning on compile.""" |
| 43 | + self._skip_if_not_distributed() |
| 44 | + import jax |
| 45 | + |
| 46 | + n = len(backend_dlib.list_devices()) |
| 47 | + units = n * max(1, 4 // n) |
| 48 | + dist = self._make_distribution(dist_type) |
| 49 | + |
| 50 | + # Model created outside any distribution scope — weights are local. |
| 51 | + model = models.Sequential([layers.Dense(units, input_shape=(16,))]) |
| 52 | + |
| 53 | + for w in model.weights: |
| 54 | + self.assertIsInstance( |
| 55 | + w.value.sharding, jax.sharding.SingleDeviceSharding |
| 56 | + ) |
| 57 | + |
| 58 | + inputs = np.random.normal(size=(8, 16)).astype("float32") |
| 59 | + labels = np.random.normal(size=(8, units)).astype("float32") |
| 60 | + |
| 61 | + with dist.scope(): |
| 62 | + model.compile(loss="mse", optimizer="adam") |
| 63 | + with warnings.catch_warnings(record=True) as caught: |
| 64 | + warnings.simplefilter("always") |
| 65 | + model._symbolic_build(data_batch=(inputs[:2], labels[:2])) |
| 66 | + model._get_state_sharding_spec() |
| 67 | + |
| 68 | + mixed = [w for w in caught if "mix of local" in str(w.message)] |
| 69 | + self.assertGreater( |
| 70 | + len(mixed), |
| 71 | + 0, |
| 72 | + "Expected a mixed-sharding warning but none was raised", |
| 73 | + ) |
| 74 | + msg = str(mixed[0].message) |
| 75 | + self.assertIn("SingleDeviceSharding", msg) |
| 76 | + self.assertIn("set_distribution", msg) |
| 77 | + |
| 78 | + @parameterized.named_parameters( |
| 79 | + {"testcase_name": "DataParallel", "dist_type": "data_parallel"}, |
| 80 | + {"testcase_name": "ModelParallel", "dist_type": "model_parallel"}, |
| 81 | + ) |
| 82 | + def test_no_warning_when_model_built_inside_scope(self, dist_type): |
| 83 | + """Model built inside distribution scope -> no warning.""" |
| 84 | + self._skip_if_not_distributed() |
| 85 | + |
| 86 | + n = len(backend_dlib.list_devices()) |
| 87 | + units = n * max(1, 4 // n) |
| 88 | + dist = self._make_distribution(dist_type) |
| 89 | + |
| 90 | + # Model created inside scope — weights get proper sharding. |
| 91 | + with dist.scope(): |
| 92 | + model = models.Sequential([layers.Dense(units, input_shape=(16,))]) |
| 93 | + |
| 94 | + inputs = np.random.normal(size=(8, 16)).astype("float32") |
| 95 | + labels = np.random.normal(size=(8, units)).astype("float32") |
| 96 | + |
| 97 | + with dist.scope(): |
| 98 | + model.compile(loss="mse", optimizer="adam") |
| 99 | + with warnings.catch_warnings(record=True) as caught: |
| 100 | + warnings.simplefilter("always") |
| 101 | + model._symbolic_build(data_batch=(inputs[:2], labels[:2])) |
| 102 | + model._get_state_sharding_spec() |
| 103 | + |
| 104 | + mixed = [w for w in caught if "mix of local" in str(w.message)] |
| 105 | + self.assertEqual( |
| 106 | + len(mixed), |
| 107 | + 0, |
| 108 | + "Unexpected mixed-sharding warning when model is " |
| 109 | + "built inside scope", |
| 110 | + ) |
0 commit comments