Skip to content

Commit 963c8af

Browse files
authored
Increased the number of random characters used when creating resources to minimize collisions (#214)
<!-- REMOVE IRRELEVANT COMMENTS BEFORE CREATING A PULL REQUEST --> ## Changes <!-- Summary of your changes that are easy to understand. Add screenshots when necessary --> * Increased the number of random characters used when creating resources from 4 to 8. In larger projects like DQX, which rely heavily on PyTester, collisions are occurring too frequently with the previous shorter identifiers * Bumped hatch version to avoid click compatibility issues ### Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> Changes covered by regression testing. - [x] manually tested - [ ] added unit tests - [ ] added integration tests - [ ] verified on staging environment (screenshot attached)
1 parent a54f9b7 commit 963c8af

File tree

14 files changed

+34
-34
lines changed

14 files changed

+34
-34
lines changed

.github/workflows/acceptance.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ jobs:
3737
python-version: '3.10'
3838

3939
- name: Install hatch
40-
run: pip install hatch==1.9.4
40+
run: pip install hatch==1.15.0
4141

4242
- name: Fetch relevant branches
4343
run: |

.github/workflows/downstreams.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ jobs:
4444

4545
- name: Install toolchain
4646
run: |
47-
pip install hatch==1.9.4
47+
pip install hatch==1.15.0
4848
- name: Downstreams
4949
uses: databrickslabs/sandbox/downstreams@acceptance/v0.4.2
5050
with:

.github/workflows/nightly.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ jobs:
3232
python-version: '3.10'
3333

3434
- name: Install hatch
35-
run: pip install hatch==1.9.4
35+
run: pip install hatch==1.15.0
3636

3737
- name: Run nightly tests
3838
uses: databrickslabs/sandbox/acceptance@acceptance/v0.4.2

.github/workflows/push.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ on:
1515
- main
1616

1717
env:
18-
HATCH_VERSION: 1.9.4
18+
HATCH_VERSION: 1.15.0
1919

2020
jobs:
2121
ci:

.github/workflows/release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ jobs:
2727

2828
- name: Build wheels
2929
run: |
30-
pip install hatch==1.9.4
30+
pip install hatch==1.15.0
3131
hatch build
3232
3333
- name: Draft release

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -989,7 +989,7 @@ Keyword Arguments:
989989
Usage:
990990
```python
991991
def test_storage_credential(env_or_skip, make_storage_credential, make_random):
992-
random = make_random(6).lower()
992+
random = make_random(8).lower()
993993
credential_name = f"dummy-{random}"
994994
make_storage_credential(
995995
credential_name=credential_name,
@@ -1021,7 +1021,7 @@ def test_volume_creation(make_catalog, make_schema, make_volume, make_random):
10211021
schema = make_schema(catalog_name=catalog.name)
10221022

10231023
# Generate a random name for the volume
1024-
volume_name = f"dummy_vol_{make_random(6).lower()}"
1024+
volume_name = f"dummy_vol_{make_random(8).lower()}"
10251025

10261026
# Create the volume
10271027
volume = make_volume(

src/databricks/labs/pytester/fixtures/catalog.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def create( # pylint: disable=too-many-locals,too-many-arguments,too-many-state
122122
schema = make_schema(catalog_name=catalog_name)
123123
catalog_name = schema.catalog_name
124124
schema_name = schema.name
125-
name = name or f"dummy_t{make_random(4).lower()}"
125+
name = name or f"dummy_t{make_random(8).lower()}"
126126
table_type: TableType | None = None
127127
data_source_format = None
128128
storage_location = None
@@ -267,7 +267,7 @@ def test_catalog_fixture(make_catalog, make_schema, make_table):
267267
def create(
268268
*, catalog_name: str = "hive_metastore", name: str | None = None, location: str | None = None
269269
) -> SchemaInfo:
270-
name = name or f"dummy_s{make_random(4)}".lower()
270+
name = name or f"dummy_s{make_random(8)}".lower()
271271
full_name = f"{catalog_name}.{name}".lower()
272272
schema_ddl = f"CREATE SCHEMA {full_name}"
273273
if location:
@@ -307,7 +307,7 @@ def test_catalog_fixture(make_catalog, make_schema, make_table):
307307
"""
308308

309309
def create(*, name: str | None = None) -> CatalogInfo:
310-
name = name or f"dummy_c{make_random(4)}".lower()
310+
name = name or f"dummy_c{make_random(8)}".lower()
311311
catalog_info = ws.catalogs.create(name=name, properties={"RemoveAfter": watchdog_remove_after})
312312
if isinstance(catalog_info, Mock):
313313
catalog_info.name = name
@@ -359,7 +359,7 @@ def create(
359359
catalog_name = schema.catalog_name
360360
schema_name = schema.name
361361

362-
name = name or f"dummy_f{make_random(4)}".lower()
362+
name = name or f"dummy_f{make_random(8)}".lower()
363363

364364
# Note: the Watchdog does not explicitly scan for functions; they are purged along with their parent schema.
365365
# As such the function can't be marked (and doesn't need to be if the schema as marked) for purge protection.
@@ -409,7 +409,7 @@ def make_storage_credential(ws, watchdog_remove_after) -> Generator[Callable[...
409409
Usage:
410410
```python
411411
def test_storage_credential(env_or_skip, make_storage_credential, make_random):
412-
random = make_random(6).lower()
412+
random = make_random(8).lower()
413413
credential_name = f"dummy-{random}"
414414
make_storage_credential(
415415
credential_name=credential_name,
@@ -471,7 +471,7 @@ def test_volume_creation(make_catalog, make_schema, make_volume, make_random):
471471
schema = make_schema(catalog_name=catalog.name)
472472
473473
# Generate a random name for the volume
474-
volume_name = f"dummy_vol_{make_random(6).lower()}"
474+
volume_name = f"dummy_vol_{make_random(8).lower()}"
475475
476476
# Create the volume
477477
volume = make_volume(
@@ -498,7 +498,7 @@ def create(
498498
schema_name = schema.name
499499

500500
if not name:
501-
name = f"dummy_v{make_random(6).lower()}"
501+
name = f"dummy_v{make_random(8).lower()}"
502502

503503
volume_info = ws.volumes.create(
504504
catalog_name=catalog_name,

src/databricks/labs/pytester/fixtures/compute.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def test_cluster_policy(make_cluster_policy):
4848

4949
def create(*, name: str | None = None, **kwargs) -> CreatePolicyResponse:
5050
if name is None:
51-
name = f"dummy-{make_random(4)}-{watchdog_purge_suffix}"
51+
name = f"dummy-{make_random(8)}-{watchdog_purge_suffix}"
5252
if "definition" not in kwargs:
5353
kwargs["definition"] = json.dumps(
5454
{
@@ -92,7 +92,7 @@ def create(
9292
**kwargs,
9393
) -> Wait[ClusterDetails]:
9494
if cluster_name is None:
95-
cluster_name = f"dummy-{make_random(4)}"
95+
cluster_name = f"dummy-{make_random(8)}"
9696
if spark_version is None:
9797
spark_version = ws.clusters.select_spark_version(latest=True)
9898
if single_node:
@@ -148,7 +148,7 @@ def test_instance_pool(make_instance_pool):
148148

149149
def create(*, instance_pool_name=None, node_type_id=None, **kwargs) -> CreateInstancePoolResponse:
150150
if instance_pool_name is None:
151-
instance_pool_name = f"dummy-{make_random(4)}"
151+
instance_pool_name = f"dummy-{make_random(8)}"
152152
if node_type_id is None:
153153
node_type_id = ws.clusters.select_node_type(local_disk=True, min_memory_gb=16)
154154
pool = ws.instance_pools.create(
@@ -223,16 +223,16 @@ def create( # pylint: disable=too-many-arguments
223223
raise ValueError(
224224
"The `tasks` parameter is exclusive with the `path`, `content` `spark_conf` and `libraries` parameters."
225225
)
226-
name = name or f"dummy-j{make_random(4)}"
226+
name = name or f"dummy-j{make_random(8)}"
227227
tags = tags or {}
228228
tags["RemoveAfter"] = tags.get("RemoveAfter", watchdog_remove_after)
229229
if not tasks:
230230
node_type_id = None
231231
if instance_pool_id is None:
232232
node_type_id = ws.clusters.select_node_type(local_disk=True, min_memory_gb=16)
233233
task = Task(
234-
task_key=make_random(4),
235-
description=make_random(4),
234+
task_key=make_random(8),
235+
description=make_random(8),
236236
new_cluster=ClusterSpec(
237237
num_workers=1,
238238
node_type_id=node_type_id,
@@ -292,7 +292,7 @@ def test_pipeline(make_pipeline, make_pipeline_permissions, make_group):
292292

293293
def create(**kwargs) -> CreatePipelineResponse:
294294
if "name" not in kwargs:
295-
kwargs["name"] = f"sdk-{make_random(4)}-{watchdog_purge_suffix}"
295+
kwargs["name"] = f"sdk-{make_random(8)}-{watchdog_purge_suffix}"
296296
if "libraries" not in kwargs:
297297
notebook_library = NotebookLibrary(path=make_notebook().as_posix())
298298
kwargs["libraries"] = [PipelineLibrary(notebook=notebook_library)]
@@ -342,7 +342,7 @@ def create(
342342
**kwargs,
343343
) -> Wait[GetWarehouseResponse]:
344344
if warehouse_name is None:
345-
warehouse_name = f"dummy-{make_random(4)}"
345+
warehouse_name = f"dummy-{make_random(8)}"
346346
if warehouse_type is None:
347347
warehouse_type = CreateWarehouseRequestWarehouseType.PRO
348348
if cluster_size is None:

src/databricks/labs/pytester/fixtures/iam.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def test_new_user(make_user, ws):
4949

5050
@retried(on=[ResourceConflict], timeout=timedelta(seconds=30))
5151
def create(**kwargs) -> User:
52-
user_name = f"dummy-{make_random(4)}-{watchdog_purge_suffix}@example.com".lower()
52+
user_name = f"dummy-{make_random(8)}-{watchdog_purge_suffix}@example.com".lower()
5353
user = ws.users.create(user_name=user_name, **kwargs)
5454
log_workspace_link(user.user_name, f'settings/workspace/identity-and-access/users/{user.id}')
5555
return user
@@ -171,7 +171,7 @@ def create(
171171
**kwargs,
172172
):
173173
kwargs["display_name"] = (
174-
f"sdk-{make_random(4)}-{watchdog_purge_suffix}" if display_name is None else display_name
174+
f"sdk-{make_random(8)}-{watchdog_purge_suffix}" if display_name is None else display_name
175175
)
176176
if members is not None:
177177
kwargs["members"] = _scim_values(members)

src/databricks/labs/pytester/fixtures/ml.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def create(
6161
folder = make_directory(path=path)
6262
if experiment_name is None:
6363
# The purge suffix is needed here as well, just in case the path was supplied.
64-
experiment_name = f"dummy-{make_random(4)}-{watchdog_purge_suffix}"
64+
experiment_name = f"dummy-{make_random(8)}-{watchdog_purge_suffix}"
6565
experiment = ws.experiments.create_experiment(name=f"{folder}/{experiment_name}", **kwargs)
6666
log_workspace_link(f'{experiment_name} experiment', f'ml/experiments/{experiment.experiment_id}', anchor=False)
6767
return experiment
@@ -95,7 +95,7 @@ def test_models(make_group, make_model, make_registered_model_permissions):
9595

9696
def create(*, model_name: str | None = None, **kwargs) -> ModelDatabricks:
9797
if model_name is None:
98-
model_name = f"dummy-{make_random(4)}"
98+
model_name = f"dummy-{make_random(8)}"
9999
remove_after_tag = ModelTag(key="RemoveAfter", value=watchdog_remove_after)
100100
if 'tags' not in kwargs:
101101
kwargs["tags"] = [remove_after_tag]
@@ -143,7 +143,7 @@ def create(
143143
model_name: str | None = None,
144144
model_version: str | None = None,
145145
) -> Wait[ServingEndpointDetailed]:
146-
endpoint_name = endpoint_name or make_random(4)
146+
endpoint_name = endpoint_name or make_random(8)
147147
model_name = model_name or "system.ai.llama_v3_2_1b_instruct"
148148
if not model_version and "." not in model_name: # The period in the name signals it is NOT workspace local
149149
try:

0 commit comments

Comments
 (0)