Skip to content

Commit 636f362

Browse files
authored
Merge pull request #53 from lsst-dm/tickets/DM-53459
DM-53459: Fix Alembic/SQLAlchemy warnings.
2 parents 80dd401 + 58d3911 commit 636f362

File tree

8 files changed

+62
-62
lines changed

8 files changed

+62
-62
lines changed

migrations/_oneshot/datasets/int_1.0.0_to_uuid_1.0.0/2101fbf51ad3.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
Revision ID: 2101fbf51ad3
44
Revises: 635083325f20
55
Create Date: 2021-05-04 16:31:05.926989
6-
76
"""
87
from __future__ import annotations
98

@@ -452,24 +451,24 @@ def _drop_columns(table_name: str, table_info: TableInfo, schema: str | None) ->
452451
for index_dict in table_info.indices:
453452
index_name = index_dict["name"]
454453
_LOG.debug("Dropping index %s", index_dict)
455-
batch_op.drop_index(index_name) # type: ignore[attr-defined]
454+
batch_op.drop_index(index_name)
456455

457456
for unique_dict in table_info.unique_constraints:
458457
unique_name = unique_dict["name"]
459458
_LOG.debug("Dropping unique constraint %s", unique_name)
460-
batch_op.drop_constraint(unique_name) # type: ignore[attr-defined]
459+
batch_op.drop_constraint(unique_name)
461460

462461
for fk_dict in table_info.foreign_keys:
463462
fk_name = fk_dict["name"]
464463
_LOG.debug("Dropping foreign key %s", fk_name)
465-
batch_op.drop_constraint(fk_name) # type: ignore[attr-defined]
464+
batch_op.drop_constraint(fk_name)
466465

467466
if table_info.primary_key:
468467
_LOG.debug("Primary key: %s", table_info.primary_key)
469468
pk_name = table_info.primary_key["name"]
470469
if pk_name:
471470
_LOG.debug("Dropping primary key %s", pk_name)
472-
batch_op.drop_constraint(pk_name) # type: ignore[attr-defined]
471+
batch_op.drop_constraint(pk_name)
473472
else:
474473
dialect = op.get_bind().dialect
475474
if dialect.name == "sqlite":
@@ -483,13 +482,13 @@ def _drop_columns(table_name: str, table_info: TableInfo, schema: str | None) ->
483482
idx = columns.index(id_col)
484483
columns[idx] += "_uuid"
485484
_LOG.debug("Creating primary key %s", columns)
486-
batch_op.create_primary_key( # type: ignore[attr-defined]
485+
batch_op.create_primary_key(
487486
f"{table_name}_pkey", columns
488487
)
489488

490489
# drop column
491490
_LOG.debug("Dropping column %s", id_col)
492-
batch_op.drop_column(id_col) # type: ignore[attr-defined]
491+
batch_op.drop_column(id_col)
493492

494493
# drop a sequence as well
495494
if table_name == "dataset":
@@ -509,7 +508,7 @@ def _rename_column(table_name: str, schema: str | None) -> None:
509508
_LOG.debug("Renaming uuid column in table %s to %s", table_name, id_col)
510509

511510
with op.batch_alter_table(table_name, schema) as batch_op:
512-
batch_op.alter_column( # type: ignore[attr-defined]
511+
batch_op.alter_column(
513512
f"{id_col}_uuid", new_column_name=id_col, nullable=False
514513
)
515514

@@ -524,20 +523,20 @@ def _make_indices(table_name: str, table_info: TableInfo, schema: str | None) ->
524523
pk_name = f"{table_name}_pkey"
525524
_LOG.debug("Adding primary key %s", pk_name)
526525
columns = table_info.primary_key["constrained_columns"]
527-
batch_op.create_primary_key(pk_name, columns) # type: ignore[attr-defined]
526+
batch_op.create_primary_key(pk_name, columns)
528527

529528
for unique_dict in table_info.unique_constraints:
530529
unique_name = unique_dict["name"]
531530
_LOG.debug("Adding unique constraint %s", unique_name)
532531
columns = unique_dict["column_names"]
533-
batch_op.create_unique_constraint(unique_name, columns) # type: ignore[attr-defined]
532+
batch_op.create_unique_constraint(unique_name, columns)
534533

535534
for index_dict in table_info.indices:
536535
index_name = index_dict["name"]
537536
_LOG.debug("Adding index %s", index_dict)
538537
columns = index_dict["column_names"]
539538
unique = index_dict["unique"]
540-
batch_op.create_index(index_name, columns, unique=unique) # type: ignore[attr-defined]
539+
batch_op.create_index(index_name, columns, unique=unique)
541540

542541
for fk_dict in table_info.foreign_keys:
543542
fk_name = fk_dict["name"]
@@ -551,7 +550,7 @@ def _make_indices(table_name: str, table_info: TableInfo, schema: str | None) ->
551550
ondelete = None
552551
if table_name.startswith(DYNAMIC_TABLES_PREFIX):
553552
ondelete = "CASCADE"
554-
batch_op.create_foreign_key( # type: ignore[attr-defined]
553+
batch_op.create_foreign_key(
555554
fk_name, ref_table, local_cols, remote_cols, ondelete=ondelete, referent_schema=ref_schema
556555
)
557556

migrations/datasets/4e2d7a28475b.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
Revision ID: 4e2d7a28475b
44
Revises: 2101fbf51ad3
55
Create Date: 2023-03-31 22:25:32.651155
6-
76
"""
87

98
import datetime
@@ -149,10 +148,10 @@ def _migrate_default(
149148
with op.batch_alter_table("dataset", schema) as batch_op:
150149
if server_default is None:
151150
# If removing a default it has to be done first
152-
batch_op.alter_column("ingest_date", server_default=None) # type: ignore[attr-defined]
151+
batch_op.alter_column("ingest_date", server_default=None)
153152
with op.batch_alter_table("dataset", schema) as batch_op:
154153
# Change the type of the column.
155-
batch_op.alter_column( # type: ignore[attr-defined]
154+
batch_op.alter_column(
156155
"ingest_date", type_=column_type, server_default=server_default
157156
)
158157

migrations/dimensions-config/bf6308af80aa.py

Lines changed: 35 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
Revision ID: bf6308af80aa
44
Revises: 380002bcbb26
55
Create Date: 2022-05-16 12:11:17.906600
6-
76
"""
87
import logging
98

@@ -74,7 +73,7 @@ def _FKConstraint(
7473
schema: str | None = None,
7574
ondelete: str | None = None,
7675
) -> sa.schema.ForeignKeyConstraint:
77-
"""Create foreign key constraint."""
76+
# Create foreign key constraint.
7877
fk_name = "_".join(["fkey", src_table, tgt_table] + tgt_columns + src_columns)
7978
fk_name = shrinkDatabaseEntityName(fk_name, bind)
8079
tgt_columns = [f"{tgt_table}.{col}" for col in tgt_columns]
@@ -85,7 +84,8 @@ def _FKConstraint(
8584

8685
def _migrate_visit_definition() -> None:
8786
"""Make new visit_system_membership table, fill from existing data
88-
in visit_definition."""
87+
in visit_definition.
88+
"""
8989

9090
mig_context = context.get_context()
9191
bind = mig_context.bind
@@ -147,23 +147,38 @@ def _migrate_visit_definition() -> None:
147147
visit_definition.columns["visit"],
148148
).distinct()
149149
sql = visit_membership.insert().from_select(["instrument", "visit_system", "visit"], selection)
150-
op.execute(sql) # type: ignore[arg-type]
150+
op.execute(sql)
151151

152152
# Drop visit_system from visit_definition.
153153
with op.batch_alter_table("visit_definition", schema=schema) as batch_op:
154154
# visit_system is in PK. Postgres drops PK when the column is dropped,
155155
# but sqlite complains that column cannot be dropped if it's in PK.
156-
# The workaround is to create PK with new columns, but Postgres also
157-
# requires existing PK to be dropped first, and in sqlite we do not
158-
# even have name for PK constraint, so it cannot be dropped
159-
# explicitly.
156+
# Dropping PK in SQLite cannot be done as we generate PKs without name.
157+
# Simply trying to replace PK using different columns generates
158+
# SAWarning. The workaround is to re-create PK with old columns and
159+
# name it, then drop that named PK. Postgres also requires existing PK
160+
# to be dropped first before re-creating it.
161+
162+
# Drop PK in Postgres.
160163
if bind.dialect.name == "postgresql":
161-
batch_op.drop_constraint("visit_definition_pkey") # type: ignore[attr-defined]
162-
batch_op.create_primary_key( # type: ignore[attr-defined]
164+
batch_op.drop_constraint("visit_definition_pkey")
165+
166+
# Re-create PK and give it a name.
167+
batch_op.create_primary_key(
168+
"visit_definition_pkey", ["instrument", "visit_system", "exposure"]
169+
)
170+
171+
# Now drop named PK.
172+
batch_op.drop_constraint("visit_definition_pkey")
173+
174+
# Create PK with different columns.
175+
batch_op.create_primary_key(
163176
"visit_definition_pkey", ["instrument", "exposure", "visit"]
164177
)
165-
batch_op.drop_index("visit_definition_fkidx_instrument_visit_system") # type: ignore[attr-defined]
166-
batch_op.drop_column("visit_system") # type: ignore[attr-defined]
178+
179+
# Finally drop index and column.
180+
batch_op.drop_index("visit_definition_fkidx_instrument_visit_system")
181+
batch_op.drop_column("visit_system")
167182

168183

169184
def _migrate_instrument() -> None:
@@ -202,10 +217,10 @@ def _migrate_visit() -> None:
202217

203218
_LOG.info("migrating visit table")
204219
with op.batch_alter_table("visit", schema=schema) as batch_op:
205-
batch_op.drop_index("visit_fkidx_instrument_visit_system") # type: ignore[attr-defined]
206-
batch_op.drop_column("visit_system") # type: ignore[attr-defined]
207-
batch_op.add_column(sa.Column("seq_num", sa.BigInteger)) # type: ignore[attr-defined]
208-
batch_op.add_column(sa.Column("azimuth", sa.Float)) # type: ignore[attr-defined]
220+
batch_op.drop_index("visit_fkidx_instrument_visit_system")
221+
batch_op.drop_column("visit_system")
222+
batch_op.add_column(sa.Column("seq_num", sa.BigInteger))
223+
batch_op.add_column(sa.Column("azimuth", sa.Float))
209224

210225
# Fill seq_num column with the lowest value of matching exposure.seq_num.
211226
#
@@ -259,10 +274,10 @@ def _migrate_exposure(has_simulated: bool) -> None:
259274

260275
_LOG.info("migrating exposure table")
261276
with op.batch_alter_table("exposure", schema=schema) as batch_op:
262-
batch_op.add_column(sa.Column("seq_start", sa.BigInteger)) # type: ignore[attr-defined]
263-
batch_op.add_column(sa.Column("seq_end", sa.BigInteger)) # type: ignore[attr-defined]
264-
batch_op.add_column(sa.Column("azimuth", sa.Float)) # type: ignore[attr-defined]
265-
batch_op.add_column(sa.Column("has_simulated", sa.Boolean)) # type: ignore[attr-defined]
277+
batch_op.add_column(sa.Column("seq_start", sa.BigInteger))
278+
batch_op.add_column(sa.Column("seq_end", sa.BigInteger))
279+
batch_op.add_column(sa.Column("azimuth", sa.Float))
280+
batch_op.add_column(sa.Column("has_simulated", sa.Boolean))
266281

267282
table = sa.schema.Table("exposure", metadata, autoload_with=bind, schema=schema)
268283
op.execute(

migrations/dimensions-config/c5ae3a2cd7c2.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
Revision ID: c5ae3a2cd7c2
44
Revises: bf6308af80aa
55
Create Date: 2022-11-25 12:04:18.424257
6-
76
"""
87

98
import sqlalchemy as sa
@@ -76,7 +75,7 @@ def _update_config(config: dict) -> dict:
7675
column_type = sa.Text()
7776
else:
7877
column_type = sa.String(column_size)
79-
batch_op.alter_column(column, type_=column_type) # type: ignore[attr-defined]
78+
batch_op.alter_column(column, type_=column_type)
8079

8180
assert mig_context.bind is not None, "Requires an existing bind"
8281
if mig_context.bind.dialect.name == "sqlite":
@@ -85,6 +84,6 @@ def _update_config(config: dict) -> dict:
8584
constraint = f'length("{column}")<={column_size} AND length("{column}")>=1'
8685
if old_column_size <= 32:
8786
# Constraint only exists for shorter strings.
88-
batch_op.drop_constraint(constraint_name) # type: ignore[attr-defined]
87+
batch_op.drop_constraint(constraint_name)
8988
if column_size <= 32:
90-
batch_op.create_check_constraint(constraint_name, sa.text(constraint)) # type: ignore
89+
batch_op.create_check_constraint(constraint_name, sa.text(constraint))

migrations/dimensions/035dcf13ef18.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
Revision ID: 035dcf13ef18
44
Revises: 1601d5973bf8
55
Create Date: 2022-05-19 15:40:18.561744
6-
76
"""
87
import logging
98
from typing import cast
@@ -26,9 +25,11 @@
2625

2726

2827
def upgrade() -> None:
29-
"""Summary of changes for this migration:
28+
"""Upgrade from version 6.0.1 to version 6.0.2.
29+
30+
Summary of changes for this migration:
3031
31-
- implied dimension columns (non-PK foreign keys) add NOT NULL
32+
- implied dimension columns (non-PK foreign keys) add NOT NULL.
3233
3334
Note that schema digests are not updated. Just before we updated code for
3435
6.0.2 versions we disabled validation of checksums. Older code that uses
@@ -82,7 +83,7 @@ def _do_migration(nullable: bool, manager_version: str) -> None:
8283
_LOG.info("Add NULL to column %s.%s", element_name, other_element)
8384
else:
8485
_LOG.info("Add NOT NULL to column %s.%s", element_name, other_element)
85-
batch_op.alter_column(other_element, nullable=nullable) # type: ignore[attr-defined]
86+
batch_op.alter_column(other_element, nullable=nullable)
8687

8788
attributes.update_manager_version(MANAGER, manager_version)
8889

mypy.ini

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@ warn_redundant_casts = True
55
ignore_missing_imports = False
66
disallow_untyped_defs = True
77
disallow_incomplete_defs = True
8+
strict_equality = True
9+
warn_unused_ignores = True
10+
warn_unreachable = True
811

912
[mypy-sqlalchemy.*]
1013
ignore_missing_imports = True
@@ -18,21 +21,3 @@ ignore_missing_imports = True
1821
[mypy-lsst.*]
1922
ignore_missing_imports = True
2023
ignore_errors = True
21-
22-
# Check all of daf.butler_migrate...
23-
24-
[mypy-lsst.daf.butler_migrate.*]
25-
ignore_missing_imports = False
26-
ignore_errors = False
27-
disallow_untyped_defs = True
28-
disallow_incomplete_defs = True
29-
strict_equality = True
30-
warn_unreachable = True
31-
warn_unused_ignores = True
32-
33-
; [mypy-lsst.daf.butler.*]
34-
; ignore_missing_imports = False
35-
; ignore_errors = False
36-
37-
; [mypy-lsst.daf.butler.cli.*]
38-
; ignore_missing_imports = True

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,5 +151,5 @@ checks = [
151151
exclude = [
152152
"^test_.*", # Do not test docstrings in test code.
153153
'^commands\.', # Click docstrings, not numpydoc
154-
'\.__exit__', # Do not check docstrings in context methods.
154+
'\._', # Do not check docstrings in private methods.
155155
]

python/lsst/daf/butler_migrate/config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,8 @@ def from_mig_path(
7474
cfg.set_main_option("script_location", alembic_folder)
7575
cfg.set_section_option("alembic", "file_template", "%%(rev)s")
7676
cfg.set_section_option("alembic", "prepend_sys_path", ".")
77+
# We use space as separateor in version_locations below.
78+
cfg.set_section_option("alembic", "path_separator", "space")
7779
_LOG.debug(
7880
"alembic_folder: %r, single_tree: %r, one_shot_tree: %r",
7981
alembic_folder,

0 commit comments

Comments
 (0)