diff --git a/api/src/opentrons/protocol_engine/__init__.py b/api/src/opentrons/protocol_engine/__init__.py index 5e90c7235bf..84c80c1555e 100644 --- a/api/src/opentrons/protocol_engine/__init__.py +++ b/api/src/opentrons/protocol_engine/__init__.py @@ -30,6 +30,11 @@ LabwareOffsetCreate, LabwareOffsetVector, LegacyLabwareOffsetLocation, + LabwareOffsetLocationSequence, + OnLabwareOffsetLocationSequenceComponent, + OnModuleOffsetLocationSequenceComponent, + OnAddressableAreaOffsetLocationSequenceComponent, + LabwareOffsetLocationSequenceComponents, LabwareMovementStrategy, AddressableOffsetVector, DeckPoint, @@ -96,7 +101,13 @@ # public value interfaces and models "LabwareOffset", "LabwareOffsetCreate", + "LegacyLabwareOffsetCreate", + "LabwareOffsetLocationSequence", "LabwareOffsetVector", + "OnLabwareOffsetLocationSequenceComponent", + "OnModuleOffsetLocationSequenceComponent", + "OnAddressableAreaOffsetLocationSequenceComponent", + "LabwareOffsetLocationSequenceComponents", "LegacyLabwareOffsetCreate", "LegacyLabwareOffsetLocation", "LabwareMovementStrategy", diff --git a/api/src/opentrons/protocol_engine/labware_offset_standardization.py b/api/src/opentrons/protocol_engine/labware_offset_standardization.py index 836d40cb700..74626af5595 100644 --- a/api/src/opentrons/protocol_engine/labware_offset_standardization.py +++ b/api/src/opentrons/protocol_engine/labware_offset_standardization.py @@ -37,9 +37,10 @@ def standardize_labware_offset_create( ) -def _legacy_offset_location_to_offset_location_sequence( +def legacy_offset_location_to_offset_location_sequence( location: LegacyLabwareOffsetLocation, deck_definition: DeckDefinitionV5 ) -> LabwareOffsetLocationSequence: + """Convert a legacy location to a new-style sequence.""" sequence: LabwareOffsetLocationSequence = [] if location.definitionUri: sequence.append( @@ -165,7 +166,7 @@ def _locations_for_create( } ) return ( - _legacy_offset_location_to_offset_location_sequence( + legacy_offset_location_to_offset_location_sequence( normalized, deck_definition ), normalized, diff --git a/api/src/opentrons/protocol_engine/types/__init__.py b/api/src/opentrons/protocol_engine/types/__init__.py index fbaef870f3e..bf1f524a7a7 100644 --- a/api/src/opentrons/protocol_engine/types/__init__.py +++ b/api/src/opentrons/protocol_engine/types/__init__.py @@ -94,6 +94,7 @@ OnLabwareOffsetLocationSequenceComponent, OnModuleOffsetLocationSequenceComponent, OnAddressableAreaOffsetLocationSequenceComponent, + LabwareOffsetLocationSequenceComponents, ) from .labware_offset_vector import LabwareOffsetVector from .well_position import ( @@ -204,6 +205,7 @@ # Labware offset location "LegacyLabwareOffsetLocation", "LabwareOffsetLocationSequence", + "LabwareOffsetLocationSequenceComponents", "OnLabwareOffsetLocationSequenceComponent", "OnModuleOffsetLocationSequenceComponent", "OnAddressableAreaOffsetLocationSequenceComponent", diff --git a/api/src/opentrons/protocol_engine/types/labware_offset_location.py b/api/src/opentrons/protocol_engine/types/labware_offset_location.py index 2b992a4da01..2a4ebe9ebe6 100644 --- a/api/src/opentrons/protocol_engine/types/labware_offset_location.py +++ b/api/src/opentrons/protocol_engine/types/labware_offset_location.py @@ -3,7 +3,7 @@ This is its own module to fix circular imports. """ -from typing import Optional, Literal +from typing import Optional, Literal, Annotated from pydantic import BaseModel, Field @@ -48,12 +48,16 @@ class OnAddressableAreaOffsetLocationSequenceComponent(BaseModel): ) -LabwareOffsetLocationSequenceComponents = ( +LabwareOffsetLocationSequenceComponentsUnion = ( OnLabwareOffsetLocationSequenceComponent | OnModuleOffsetLocationSequenceComponent | OnAddressableAreaOffsetLocationSequenceComponent ) +LabwareOffsetLocationSequenceComponents = Annotated[ + LabwareOffsetLocationSequenceComponentsUnion, Field(discriminator="kind") +] + LabwareOffsetLocationSequence = list[LabwareOffsetLocationSequenceComponents] diff --git a/robot-server/robot_server/labware_offsets/_search_query_builder.py b/robot-server/robot_server/labware_offsets/_search_query_builder.py new file mode 100644 index 00000000000..d6630f59169 --- /dev/null +++ b/robot-server/robot_server/labware_offsets/_search_query_builder.py @@ -0,0 +1,177 @@ +"""Helper to build a search query.""" + +from __future__ import annotations +from typing import Final, TYPE_CHECKING + +import sqlalchemy + +from opentrons.protocol_engine import ModuleModel + +from robot_server.persistence.tables import ( + labware_offset_table, + labware_offset_location_sequence_components_table, +) +from .models import DoNotFilterType, DO_NOT_FILTER + +if TYPE_CHECKING: + from typing_extensions import Self + + +class SearchQueryBuilder: + """Helper class to build a search query. + + This object is stateful, and should be kept around just long enough to have the parameters + of a single search injected. + """ + + def __init__(self) -> None: + """Build the object.""" + super().__init__() + self._filter_original: Final = sqlalchemy.select( + labware_offset_table.c.row_id, + labware_offset_table.c.offset_id, + labware_offset_table.c.definition_uri, + labware_offset_table.c.vector_x, + labware_offset_table.c.vector_y, + labware_offset_table.c.vector_z, + labware_offset_table.c.created_at, + labware_offset_table.c.active, + labware_offset_location_sequence_components_table.c.sequence_ordinal, + labware_offset_location_sequence_components_table.c.component_kind, + labware_offset_location_sequence_components_table.c.primary_component_value, + ).select_from( + sqlalchemy.join( + labware_offset_table, + labware_offset_location_sequence_components_table, + labware_offset_table.c.row_id + == labware_offset_location_sequence_components_table.c.offset_id, + ) + ) + self._offset_location_alias: Final = ( + labware_offset_location_sequence_components_table.alias() + ) + self._current_base_filter_statement = self._filter_original + self._current_positive_location_filter: ( + sqlalchemy.sql.selectable.Exists | None + ) = None + self._current_negative_filter_subqueries: list[ + sqlalchemy.sql.selectable.Exists + ] = [] + + def _positive_query(self) -> sqlalchemy.sql.selectable.Exists: + if self._current_positive_location_filter is not None: + return self._current_positive_location_filter + return sqlalchemy.exists().where( + self._offset_location_alias.c.offset_id + == labware_offset_location_sequence_components_table.c.offset_id + ) + + def build_query(self) -> sqlalchemy.sql.selectable.Selectable: + """Render the query into a sqlalchemy object suitable for passing to the database.""" + statement = self._current_base_filter_statement + if self._current_positive_location_filter is not None: + statement = statement.where(self._current_positive_location_filter) + for subq in self._current_negative_filter_subqueries: + statement = statement.where(sqlalchemy.not_(subq)) + statement = statement.order_by(labware_offset_table.c.row_id).order_by( + labware_offset_location_sequence_components_table.c.sequence_ordinal + ) + return statement + + def do_active_filter(self, active: bool) -> Self: + """Filter to only rows that are active (active=True) or inactive (active=False).""" + self._current_base_filter_statement = self._current_base_filter_statement.where( + labware_offset_table.c.active == active + ) + return self + + def do_id_filter(self, id_filter: str | DoNotFilterType) -> Self: + """Filter to rows with only the given offset ID.""" + if id_filter is DO_NOT_FILTER: + return self + + self._current_base_filter_statement = self._current_base_filter_statement.where( + labware_offset_table.c.offset_id == id_filter + ) + return self + + def do_definition_uri_filter( + self, definition_uri_filter: str | DoNotFilterType + ) -> Self: + """Filter to rows of an offset that apply to a definition URI.""" + if definition_uri_filter is DO_NOT_FILTER: + return self + self._current_base_filter_statement = self._current_base_filter_statement.where( + labware_offset_table.c.definition_uri == definition_uri_filter + ) + return self + + def do_on_addressable_area_filter( + self, + addressable_area_filter: str | DoNotFilterType, + ) -> Self: + """Filter to rows of an offset that applies to the given addressable area.""" + if addressable_area_filter is DO_NOT_FILTER: + return self + self._current_positive_location_filter = ( + self._positive_query() + .where(self._offset_location_alias.c.component_kind == "onAddressableArea") + .where( + self._offset_location_alias.c.primary_component_value + == addressable_area_filter + ) + ) + return self + + def do_on_labware_filter( + self, labware_uri_filter: str | DoNotFilterType | None + ) -> Self: + """Filter to the rows of an offset located on the given labware (or no labware).""" + if labware_uri_filter is DO_NOT_FILTER: + return self + if labware_uri_filter is None: + self._current_negative_filter_subqueries.append( + sqlalchemy.exists() + .where( + self._offset_location_alias.c.offset_id + == labware_offset_location_sequence_components_table.c.offset_id + ) + .where(self._offset_location_alias.c.component_kind == "onLabware") + ) + return self + self._current_positive_location_filter = ( + self._positive_query() + .where(self._offset_location_alias.c.component_kind == "onLabware") + .where( + self._offset_location_alias.c.primary_component_value + == labware_uri_filter + ) + ) + return self + + def do_on_module_filter( + self, + module_model_filter: ModuleModel | DoNotFilterType | None, + ) -> Self: + """Filter to the rows of an offset located on the given module (or no module).""" + if module_model_filter is DO_NOT_FILTER: + return self + if module_model_filter is None: + self._current_negative_filter_subqueries.append( + sqlalchemy.exists() + .where( + self._offset_location_alias.c.offset_id + == labware_offset_location_sequence_components_table.c.offset_id + ) + .where(self._offset_location_alias.c.component_kind == "onModule") + ) + return self + self._current_positive_location_filter = ( + self._positive_query() + .where(self._offset_location_alias.c.component_kind == "onModule") + .where( + self._offset_location_alias.c.primary_component_value + == module_model_filter.value + ) + ) + return self diff --git a/robot-server/robot_server/labware_offsets/models.py b/robot-server/robot_server/labware_offsets/models.py index fcc3d2f2200..7b3f523fccd 100644 --- a/robot-server/robot_server/labware_offsets/models.py +++ b/robot-server/robot_server/labware_offsets/models.py @@ -1,11 +1,98 @@ """Request/response models for the `/labwareOffsets` endpoints.""" +from datetime import datetime +import enum +from typing import Literal, Annotated, Final, TypeAlias, Sequence -from typing import Literal +from pydantic import BaseModel, Field + +from opentrons.protocol_engine import ( + LabwareOffsetVector, +) +from opentrons.protocol_engine.types.labware_offset_location import ( + LabwareOffsetLocationSequenceComponentsUnion, +) from robot_server.errors.error_responses import ErrorDetails +class _DoNotFilter(enum.Enum): + DO_NOT_FILTER = enum.auto() + + +DO_NOT_FILTER: Final = _DoNotFilter.DO_NOT_FILTER +"""A sentinel value for when a filter should not be applied. + +This is different from filtering on `None`, which returns only entries where the +value is equal to `None`. +""" + + +DoNotFilterType: TypeAlias = Literal[_DoNotFilter.DO_NOT_FILTER] +"""The type of `DO_NOT_FILTER`, as `NoneType` is to `None`. + +Unfortunately, mypy doesn't let us write `Literal[DO_NOT_FILTER]`. Use this instead. +""" + + +class UnknownLabwareOffsetLocationSequenceComponent(BaseModel): + """A labware offset location sequence component from the future.""" + + kind: Literal["unknown"] = "unknown" + storedKind: str + primaryValue: str + + +# This is redefined here so we can add stuff to it easily +StoredLabwareOffsetLocationSequenceComponents = Annotated[ + LabwareOffsetLocationSequenceComponentsUnion, Field(discriminator="kind") +] + + +ReturnedLabwareOffsetLocationSequenceComponents = Annotated[ + LabwareOffsetLocationSequenceComponentsUnion + | UnknownLabwareOffsetLocationSequenceComponent, + Field(discriminator="kind"), +] + + +class StoredLabwareOffsetCreate(BaseModel): + """Create an offset for storage.""" + + definitionUri: str = Field(..., description="The URI for the labware's definition.") + + locationSequence: Sequence[StoredLabwareOffsetLocationSequenceComponents] = Field( + ..., + description="Where the labware is located on the robot. Can represent all locations, but may not be present for older runs.", + min_length=1, + ) + vector: LabwareOffsetVector = Field( + ..., + description="The offset applied to matching labware.", + ) + + +class StoredLabwareOffset(BaseModel): + """An offset that the robot adds to a pipette's position when it moves to labware.""" + + # This is a separate thing from the model defined in protocol engine because as a new API it does + # not have to handle legacy locations. There is probably a better way to do this than to copy the model + # contents, but I'm not sure what it is. + id: str = Field(..., description="Unique labware offset record identifier.") + createdAt: datetime = Field(..., description="When this labware offset was added.") + definitionUri: str = Field(..., description="The URI for the labware's definition.") + + locationSequence: Sequence[ReturnedLabwareOffsetLocationSequenceComponents] = Field( + ..., + description="Where the labware is located on the robot. Can represent all locations, but may not be present for older runs.", + min_length=1, + ) + vector: LabwareOffsetVector = Field( + ..., + description="The offset applied to matching labware.", + ) + + class LabwareOffsetNotFound(ErrorDetails): """An error returned when a requested labware offset does not exist.""" diff --git a/robot-server/robot_server/labware_offsets/router.py b/robot-server/robot_server/labware_offsets/router.py index 3f0ada1d46e..4ebf532d657 100644 --- a/robot-server/robot_server/labware_offsets/router.py +++ b/robot-server/robot_server/labware_offsets/router.py @@ -9,12 +9,7 @@ from pydantic.json_schema import SkipJsonSchema from server_utils.fastapi_utils.light_router import LightRouter -from opentrons.protocol_engine import ( - LabwareOffset, - LegacyLabwareOffsetCreate, - ModuleModel, -) -from opentrons.types import DeckSlotName +from opentrons.protocol_engine import ModuleModel from robot_server.labware_offsets.models import LabwareOffsetNotFound from robot_server.service.dependencies import get_current_time, get_unique_id @@ -28,12 +23,17 @@ ) from .store import ( - DO_NOT_FILTER, - DoNotFilterType, LabwareOffsetNotFoundError, LabwareOffsetStore, + IncomingStoredLabwareOffset, ) from .fastapi_dependencies import get_labware_offset_store +from .models import ( + StoredLabwareOffset, + StoredLabwareOffsetCreate, + DO_NOT_FILTER, + DoNotFilterType, +) router = LightRouter() @@ -58,18 +58,26 @@ async def post_labware_offset( # noqa: D103 store: Annotated[LabwareOffsetStore, fastapi.Depends(get_labware_offset_store)], new_offset_id: Annotated[str, fastapi.Depends(get_unique_id)], new_offset_created_at: Annotated[datetime, fastapi.Depends(get_current_time)], - request_body: Annotated[RequestModel[LegacyLabwareOffsetCreate], fastapi.Body()], -) -> PydanticResponse[SimpleBody[LabwareOffset]]: - new_offset = LabwareOffset.model_construct( + request_body: Annotated[RequestModel[StoredLabwareOffsetCreate], fastapi.Body()], +) -> PydanticResponse[SimpleBody[StoredLabwareOffset]]: + new_offset = IncomingStoredLabwareOffset( id=new_offset_id, createdAt=new_offset_created_at, definitionUri=request_body.data.definitionUri, - location=request_body.data.location, + locationSequence=request_body.data.locationSequence, vector=request_body.data.vector, ) store.add(new_offset) return await PydanticResponse.create( - content=SimpleBody.model_construct(data=new_offset), + content=SimpleBody.model_construct( + data=StoredLabwareOffset( + id=new_offset_id, + createdAt=new_offset_created_at, + definitionUri=request_body.data.definitionUri, + locationSequence=request_body.data.locationSequence, + vector=request_body.data.vector, + ) + ), status_code=201, ) @@ -101,8 +109,8 @@ async def get_labware_offsets( # noqa: D103 ), ), ] = DO_NOT_FILTER, - location_slot_name: Annotated[ - Json[DeckSlotName] | SkipJsonSchema[DoNotFilterType], + location_addressable_area_name: Annotated[ + Json[str] | SkipJsonSchema[DoNotFilterType], fastapi.Query( alias="locationSlotName", description="Filter for exact matches on the `location.slotName` field.", @@ -141,7 +149,7 @@ async def get_labware_offsets( # noqa: D103 alias="pageLength", description="The maximum number of entries to return." ), ] = "unlimited", -) -> PydanticResponse[SimpleMultiBody[LabwareOffset]]: +) -> PydanticResponse[SimpleMultiBody[StoredLabwareOffset]]: if cursor not in (0, None) or page_length != "unlimited": # todo(mm, 2024-12-06): Support this when LabwareOffsetStore supports it. raise NotImplementedError( @@ -151,7 +159,7 @@ async def get_labware_offsets( # noqa: D103 result_data = store.search( id_filter=id, definition_uri_filter=definition_uri, - location_slot_name_filter=location_slot_name, + location_addressable_area_filter=location_addressable_area_name, location_definition_uri_filter=location_definition_uri, location_module_model_filter=location_module_model, ) @@ -163,7 +171,7 @@ async def get_labware_offsets( # noqa: D103 ) return await PydanticResponse.create( - SimpleMultiBody[LabwareOffset].model_construct( + SimpleMultiBody[StoredLabwareOffset].model_construct( data=result_data, meta=meta, ) @@ -183,7 +191,7 @@ async def delete_labware_offset( # noqa: D103 str, fastapi.Path(description="The `id` field of the offset to delete."), ], -) -> PydanticResponse[SimpleBody[LabwareOffset]]: +) -> PydanticResponse[SimpleBody[StoredLabwareOffset]]: try: deleted_offset = store.delete(offset_id=id) except LabwareOffsetNotFoundError as e: @@ -201,7 +209,7 @@ async def delete_labware_offset( # noqa: D103 include_in_schema=False, # todo(mm, 2025-01-08): Include for v8.4.0. ) async def delete_all_labware_offsets( # noqa: D103 - store: Annotated[LabwareOffsetStore, fastapi.Depends(get_labware_offset_store)] + store: Annotated[LabwareOffsetStore, fastapi.Depends(get_labware_offset_store)], ) -> PydanticResponse[SimpleEmptyBody]: store.delete_all() return await PydanticResponse.create(SimpleEmptyBody.model_construct()) diff --git a/robot-server/robot_server/labware_offsets/store.py b/robot-server/robot_server/labware_offsets/store.py index dbeccc728a1..e17c1a30686 100644 --- a/robot-server/robot_server/labware_offsets/store.py +++ b/robot-server/robot_server/labware_offsets/store.py @@ -1,43 +1,52 @@ # noqa: D100 -import enum -from typing import Final, Literal, TypeAlias +from datetime import datetime +from dataclasses import dataclass +from typing import Iterator, Sequence +from typing_extensions import assert_never from opentrons.protocol_engine.types import ( - LabwareOffset, - LegacyLabwareOffsetLocation, LabwareOffsetVector, ModuleModel, + OnAddressableAreaOffsetLocationSequenceComponent, + OnModuleOffsetLocationSequenceComponent, + OnLabwareOffsetLocationSequenceComponent, ) -from opentrons.types import DeckSlotName -from robot_server.persistence.tables import labware_offset_table +from robot_server.persistence.tables import ( + labware_offset_table, + labware_offset_location_sequence_components_table, +) +from .models import ( + StoredLabwareOffset, + DoNotFilterType, + DO_NOT_FILTER, + StoredLabwareOffsetLocationSequenceComponents, + ReturnedLabwareOffsetLocationSequenceComponents, + UnknownLabwareOffsetLocationSequenceComponent, +) import sqlalchemy import sqlalchemy.exc +from ._search_query_builder import SearchQueryBuilder -class _DoNotFilter(enum.Enum): - DO_NOT_FILTER = enum.auto() - - -DO_NOT_FILTER: Final = _DoNotFilter.DO_NOT_FILTER -"""A sentinel value for when a filter should not be applied. +ReturnedLabwareOffsetLocationSequence = Sequence[ + ReturnedLabwareOffsetLocationSequenceComponents +] -This is different from filtering on `None`, which returns only entries where the -value is equal to `None`. -""" +@dataclass +class IncomingStoredLabwareOffset: + """Internal class for representing valid incoming offsets.""" -DoNotFilterType: TypeAlias = Literal[_DoNotFilter.DO_NOT_FILTER] -"""The type of `DO_NOT_FILTER`, as `NoneType` is to `None`. + id: str + createdAt: datetime + definitionUri: str + locationSequence: Sequence[StoredLabwareOffsetLocationSequenceComponents] + vector: LabwareOffsetVector -Unfortunately, mypy doesn't let us write `Literal[DO_NOT_FILTER]`. Use this instead. -""" - -# todo(mm, 2024-12-06): Convert to be SQL-based and persistent instead of in-memory. -# https://opentrons.atlassian.net/browse/EXEC-1015 class LabwareOffsetStore: """A persistent store for labware offsets, to support the `/labwareOffsets` endpoints.""" @@ -50,78 +59,74 @@ def __init__(self, sql_engine: sqlalchemy.engine.Engine) -> None: """ self._sql_engine = sql_engine - def add(self, offset: LabwareOffset) -> None: + def add( + self, + offset: IncomingStoredLabwareOffset, + ) -> None: """Store a new labware offset.""" with self._sql_engine.begin() as transaction: + offset_row_id = transaction.execute( + sqlalchemy.insert(labware_offset_table).values( + _pydantic_to_sql_offset(offset) + ) + ).inserted_primary_key.row_id transaction.execute( - sqlalchemy.insert(labware_offset_table).values(_pydantic_to_sql(offset)) + sqlalchemy.insert( + labware_offset_location_sequence_components_table + ).values( + list( + _pydantic_to_sql_location_sequence_iterator( + offset, offset_row_id + ) + ) + ) ) def search( self, id_filter: str | DoNotFilterType = DO_NOT_FILTER, definition_uri_filter: str | DoNotFilterType = DO_NOT_FILTER, - location_slot_name_filter: DeckSlotName | DoNotFilterType = DO_NOT_FILTER, - location_module_model_filter: ModuleModel - | None - | DoNotFilterType = DO_NOT_FILTER, + location_addressable_area_filter: str | DoNotFilterType = DO_NOT_FILTER, + location_module_model_filter: ( + ModuleModel | None | DoNotFilterType + ) = DO_NOT_FILTER, location_definition_uri_filter: str | None | DoNotFilterType = DO_NOT_FILTER, # todo(mm, 2024-12-06): Support pagination (cursor & pageLength query params). # The logic for that is currently duplicated across several places in # robot-server and api. We should try to clean that up, or at least avoid # making it worse. - ) -> list[LabwareOffset]: + ) -> list[StoredLabwareOffset]: """Return all matching labware offsets in order from oldest-added to newest.""" - statement = ( - sqlalchemy.select(labware_offset_table) - .order_by(labware_offset_table.c.row_id) - .where(labware_offset_table.c.active == True) # noqa: E712 + builder = ( + SearchQueryBuilder() + .do_active_filter(True) + .do_id_filter(id_filter) + .do_definition_uri_filter(definition_uri_filter) + .do_on_addressable_area_filter(location_addressable_area_filter) + .do_on_module_filter(location_module_model_filter) + .do_on_labware_filter(location_definition_uri_filter) ) - - if id_filter is not DO_NOT_FILTER: - statement = statement.where(labware_offset_table.c.offset_id == id_filter) - if definition_uri_filter is not DO_NOT_FILTER: - statement = statement.where( - labware_offset_table.c.definition_uri == definition_uri_filter - ) - if location_slot_name_filter is not DO_NOT_FILTER: - statement = statement.where( - labware_offset_table.c.location_slot_name - == location_slot_name_filter.value - ) - if location_module_model_filter is not DO_NOT_FILTER: - location_module_model_filter_value = ( - location_module_model_filter.value - if location_module_model_filter is not None - else None - ) - statement = statement.where( - labware_offset_table.c.location_module_model - == location_module_model_filter_value - ) - if location_definition_uri_filter is not DO_NOT_FILTER: - statement = statement.where( - labware_offset_table.c.location_definition_uri - == location_definition_uri_filter - ) + query = builder.build_query() with self._sql_engine.begin() as transaction: - result = transaction.execute(statement).all() + result = transaction.execute(query).all() - return [_sql_to_pydantic(row) for row in result] + if len(result) == 0: + return [] + return list(_collate_sql_to_pydantic(result)) - def delete(self, offset_id: str) -> LabwareOffset: + def delete(self, offset_id: str) -> StoredLabwareOffset: """Delete a labware offset by its ID. Return what was just deleted.""" + builder = SearchQueryBuilder().do_id_filter(offset_id) + query = builder.build_query() with self._sql_engine.begin() as transaction: try: - row_to_delete = transaction.execute( - sqlalchemy.select(labware_offset_table).where( - labware_offset_table.c.offset_id == offset_id - ) - ).one() + offset_rows = transaction.execute(query).all() except sqlalchemy.exc.NoResultFound: raise LabwareOffsetNotFoundError(bad_offset_id=offset_id) from None - if not row_to_delete.active: + if len(offset_rows) == 0: + raise LabwareOffsetNotFoundError(bad_offset_id=offset_id) + if not offset_rows[0].active: # Already soft-deleted. raise LabwareOffsetNotFoundError(bad_offset_id=offset_id) @@ -131,7 +136,7 @@ def delete(self, offset_id: str) -> LabwareOffset: .values(active=False) ) - return _sql_to_pydantic(row_to_delete) + return next(_collate_sql_to_pydantic(offset_rows)) def delete_all(self) -> None: """Delete all labware offsets.""" @@ -149,36 +154,120 @@ def __init__(self, bad_offset_id: str) -> None: self.bad_offset_id = bad_offset_id -def _sql_to_pydantic(row: sqlalchemy.engine.Row) -> LabwareOffset: - return LabwareOffset( - id=row.offset_id, - createdAt=row.created_at, - definitionUri=row.definition_uri, - location=LegacyLabwareOffsetLocation( - slotName=DeckSlotName(row.location_slot_name), - moduleModel=row.location_module_model, - definitionUri=row.location_definition_uri, - ), - vector=LabwareOffsetVector( - x=row.vector_x, - y=row.vector_y, - z=row.vector_z, +def _sql_sequence_component_to_pydantic_sequence_component( + component_row: sqlalchemy.engine.Row, +) -> ReturnedLabwareOffsetLocationSequenceComponents: + if component_row.component_kind == "onLabware": + return OnLabwareOffsetLocationSequenceComponent( + labwareUri=component_row.primary_component_value + ) + elif component_row.component_kind == "onModule": + return OnModuleOffsetLocationSequenceComponent( + moduleModel=ModuleModel(component_row.primary_component_value) + ) + elif component_row.component_kind == "onAddressableArea": + return OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName=component_row.primary_component_value + ) + else: + return UnknownLabwareOffsetLocationSequenceComponent( + storedKind=component_row.component_kind, + primaryValue=component_row.primary_component_value, + ) + + +def _collate_sql_locations( + first_row: sqlalchemy.engine.Row, rest_rows: Iterator[sqlalchemy.engine.Row] +) -> tuple[ + list[ReturnedLabwareOffsetLocationSequenceComponents], sqlalchemy.engine.Row | None +]: + offset_id = first_row.offset_id + location_sequence: list[ReturnedLabwareOffsetLocationSequenceComponents] = [ + _sql_sequence_component_to_pydantic_sequence_component(first_row) + ] + while True: + try: + row = next(rest_rows) + except StopIteration: + return location_sequence, None + if row.offset_id != offset_id: + return location_sequence, row + location_sequence.append( + _sql_sequence_component_to_pydantic_sequence_component(row) + ) + + +def _sql_to_pydantic( + first_row: sqlalchemy.engine.Row, rest_rows: Iterator[sqlalchemy.engine.Row] +) -> tuple[StoredLabwareOffset, sqlalchemy.engine.Row | None]: + location_sequence, next_row = _collate_sql_locations(first_row, rest_rows) + return ( + StoredLabwareOffset( + id=first_row.offset_id, + createdAt=first_row.created_at, + definitionUri=first_row.definition_uri, + locationSequence=location_sequence, + vector=LabwareOffsetVector( + x=first_row.vector_x, + y=first_row.vector_y, + z=first_row.vector_z, + ), ), + next_row, ) -def _pydantic_to_sql(labware_offset: LabwareOffset) -> dict[str, object]: +def _collate_sql_to_pydantic( + query_results: list[sqlalchemy.engine.Row], +) -> Iterator[StoredLabwareOffset]: + row_iter = iter(query_results) + row: sqlalchemy.engine.Row | None = next(row_iter) + while row: + result, row = _sql_to_pydantic(row, row_iter) + yield result + + +def _pydantic_to_sql_offset( + labware_offset: IncomingStoredLabwareOffset, +) -> dict[str, object]: return dict( offset_id=labware_offset.id, definition_uri=labware_offset.definitionUri, - location_slot_name=labware_offset.location.slotName.value, - location_module_model=labware_offset.location.moduleModel.value - if labware_offset.location.moduleModel is not None - else None, - location_definition_uri=labware_offset.location.definitionUri, vector_x=labware_offset.vector.x, vector_y=labware_offset.vector.y, vector_z=labware_offset.vector.z, created_at=labware_offset.createdAt, active=True, ) + + +def _pydantic_to_sql_location_sequence_iterator( + labware_offset: IncomingStoredLabwareOffset, offset_row_id: int +) -> Iterator[dict[str, object]]: + for index, component in enumerate(labware_offset.locationSequence): + if isinstance(component, OnLabwareOffsetLocationSequenceComponent): + yield dict( + offset_id=offset_row_id, + sequence_ordinal=index, + component_kind=component.kind, + primary_component_value=component.labwareUri, + component_value_json=component.model_dump_json(), + ) + elif isinstance(component, OnModuleOffsetLocationSequenceComponent): + yield dict( + offset_id=offset_row_id, + sequence_ordinal=index, + component_kind=component.kind, + primary_component_value=component.moduleModel.value, + component_value_json=component.model_dump_json(), + ) + elif isinstance(component, OnAddressableAreaOffsetLocationSequenceComponent): + yield dict( + offset_id=offset_row_id, + sequence_ordinal=index, + component_kind=component.kind, + primary_component_value=component.addressableAreaName, + component_value_json=component.model_dump_json(), + ) + else: + assert_never(component) diff --git a/robot-server/robot_server/persistence/_migrations/v9_to_v10.py b/robot-server/robot_server/persistence/_migrations/v9_to_v10.py new file mode 100644 index 00000000000..e76ea7217b1 --- /dev/null +++ b/robot-server/robot_server/persistence/_migrations/v9_to_v10.py @@ -0,0 +1,126 @@ +"""Migrate the persistence directory from schema 9 to schema 10. + +Summary of changes from schema 9: + +- Adds a new `labware_offset_sequence_components` table. +""" + +from pathlib import Path + +import sqlalchemy + +from opentrons_shared_data.deck.types import DeckDefinitionV5 +from opentrons_shared_data.deck import load as load_deck + +from opentrons.types import DeckSlotName +from opentrons.protocols.api_support.deck_type import ( + guess_from_global_config as guess_deck_type_from_global_config, +) +from opentrons.protocol_engine import LegacyLabwareOffsetLocation, DeckType, ModuleModel +from opentrons.protocol_engine.labware_offset_standardization import ( + legacy_offset_location_to_offset_location_sequence, +) + +from robot_server.persistence.database import sql_engine_ctx +from robot_server.persistence.file_and_directory_names import DB_FILE +from robot_server.persistence.tables import schema_10, schema_9 + +from ._util import copy_contents +from .._folder_migrator import Migration + + +class Migration9to10(Migration): # noqa: D101 + def migrate(self, source_dir: Path, dest_dir: Path) -> None: + """Migrate the persistence directory from schema 9 to 10.""" + copy_contents(source_dir=source_dir, dest_dir=dest_dir) + + # First we create the new version of our labware offsets table and sequence table + with sql_engine_ctx( + dest_dir / DB_FILE + ) as engine, engine.begin() as transaction: + schema_10.labware_offset_table.create(transaction) + schema_10.labware_offset_location_sequence_components_table.create( + transaction + ) + # Then we upmigrate the data to the new tables + _upmigrate_stored_offsets(transaction) + # Then, we drop the table with we don't care about anymore + schema_9.labware_offset_table.drop(transaction) + + +def _upmigrate_stored_offsets(connection: sqlalchemy.engine.Connection) -> None: + # grab the deck def. middlewares aren't up yet so we can't use the nice version + deck_definition = load_deck( + DeckType(guess_deck_type_from_global_config()), version=5 + ) + + offsets = connection.execute(sqlalchemy.select(schema_9.labware_offset_table)) + + for offset in offsets: + new_row = connection.execute( + sqlalchemy.insert(schema_10.labware_offset_table).values( + _v9_offset_to_v10_offset(offset) + ) + ).inserted_primary_key.row_id + connection.execute( + sqlalchemy.insert( + schema_10.labware_offset_location_sequence_components_table + ).values( + _v9_offset_to_v10_offset_locations(offset, new_row, deck_definition) + ) + ) + + +def _v9_offset_to_v10_offset(v9_offset: sqlalchemy.engine.Row) -> dict[str, object]: + return dict( + offset_id=v9_offset.offset_id, + definition_uri=v9_offset.definition_uri, + vector_x=v9_offset.vector_x, + vector_y=v9_offset.vector_y, + vector_z=v9_offset.vector_z, + created_at=v9_offset.created_at, + active=v9_offset.active, + ) + + +def _v9_offset_to_v10_offset_locations( + v9_offset: sqlalchemy.engine.Row, v10_id: int, deck_definition: DeckDefinitionV5 +) -> list[dict[str, object]]: + location_sequence = legacy_offset_location_to_offset_location_sequence( + LegacyLabwareOffsetLocation( + slotName=DeckSlotName(v9_offset.location_slot_name), + moduleModel=( + ModuleModel(v9_offset.location_module_model) + if v9_offset.location_module_model is not None + else None + ), + definitionUri=v9_offset.location_definition_uri, + ), + deck_definition, + ) + values: list[dict[str, object]] = [] + for index, sequence_component in enumerate(location_sequence): + primary_component_value = "" + component_value_json = "" + if sequence_component.kind == "onLabware": + primary_component_value = sequence_component.labwareUri + component_value_json = sequence_component.model_dump_json() + elif sequence_component.kind == "onModule": + primary_component_value = sequence_component.moduleModel.value + component_value_json = sequence_component.model_dump_json() + elif sequence_component.kind == "onAddressableArea": + primary_component_value = sequence_component.addressableAreaName + component_value_json = sequence_component.model_dump_json() + else: + # This should never happen since we're exhaustively checking kinds here + continue + values.append( + dict( + offset_id=v10_id, + sequence_ordinal=index, + component_kind=sequence_component.kind, + primary_component_value=primary_component_value, + component_value_json=component_value_json, + ) + ) + return values diff --git a/robot-server/robot_server/persistence/file_and_directory_names.py b/robot-server/robot_server/persistence/file_and_directory_names.py index 1d191a0f311..11613dab801 100644 --- a/robot-server/robot_server/persistence/file_and_directory_names.py +++ b/robot-server/robot_server/persistence/file_and_directory_names.py @@ -8,7 +8,7 @@ from typing import Final -LATEST_VERSION_DIRECTORY: Final = "9" +LATEST_VERSION_DIRECTORY: Final = "10" DECK_CONFIGURATION_FILE: Final = "deck_configuration.json" PROTOCOLS_DIRECTORY: Final = "protocols" diff --git a/robot-server/robot_server/persistence/persistence_directory.py b/robot-server/robot_server/persistence/persistence_directory.py index e5a86b31af2..4df4a111e1c 100644 --- a/robot-server/robot_server/persistence/persistence_directory.py +++ b/robot-server/robot_server/persistence/persistence_directory.py @@ -1,6 +1,5 @@ """Create or reset the server's persistence directory.""" - from pathlib import Path from logging import getLogger from shutil import rmtree @@ -19,6 +18,7 @@ v6_to_v7, v7_to_v8, v8_to_v9, + v9_to_v10, ) from .file_and_directory_names import LATEST_VERSION_DIRECTORY @@ -69,7 +69,8 @@ def make_migration_orchestrator(prepared_root: Path) -> MigrationOrchestrator: # internal robots. v6_to_v7.Migration6to7(subdirectory="7.1"), v7_to_v8.Migration7to8(subdirectory="8"), - v8_to_v9.Migration8to9(subdirectory=LATEST_VERSION_DIRECTORY), + v8_to_v9.Migration8to9(subdirectory="9"), + v9_to_v10.Migration9to10(subdirectory=LATEST_VERSION_DIRECTORY), ], temp_file_prefix="temp-", ) diff --git a/robot-server/robot_server/persistence/tables/__init__.py b/robot-server/robot_server/persistence/tables/__init__.py index 3c20c3f8a93..42ed01005d6 100644 --- a/robot-server/robot_server/persistence/tables/__init__.py +++ b/robot-server/robot_server/persistence/tables/__init__.py @@ -1,7 +1,7 @@ """SQL database schemas.""" # Re-export the latest schema. -from .schema_9 import ( +from .schema_10 import ( metadata, protocol_table, analysis_table, @@ -14,6 +14,7 @@ data_files_table, boolean_setting_table, labware_offset_table, + labware_offset_location_sequence_components_table, PrimitiveParamSQLEnum, ProtocolKindSQLEnum, BooleanSettingKey, @@ -35,6 +36,7 @@ "data_files_table", "boolean_setting_table", "labware_offset_table", + "labware_offset_location_sequence_components_table", "PrimitiveParamSQLEnum", "ProtocolKindSQLEnum", "BooleanSettingKey", diff --git a/robot-server/robot_server/persistence/tables/schema_10.py b/robot-server/robot_server/persistence/tables/schema_10.py new file mode 100644 index 00000000000..b403f2feeb1 --- /dev/null +++ b/robot-server/robot_server/persistence/tables/schema_10.py @@ -0,0 +1,420 @@ +"""v10 of our SQLite schema.""" + +import enum +import sqlalchemy + +from robot_server.persistence._utc_datetime import UTCDateTime + + +metadata = sqlalchemy.MetaData() + + +class PrimitiveParamSQLEnum(enum.Enum): + """Enum type to store primitive param type.""" + + INT = "int" + FLOAT = "float" + BOOL = "bool" + STR = "str" + + +class ProtocolKindSQLEnum(enum.Enum): + """What kind a stored protocol is.""" + + STANDARD = "standard" + QUICK_TRANSFER = "quick-transfer" + + +class DataFileSourceSQLEnum(enum.Enum): + """The source this data file is from.""" + + UPLOADED = "uploaded" + GENERATED = "generated" + + +class CommandStatusSQLEnum(enum.Enum): + """Command status sql enum.""" + + QUEUED = "queued" + RUNNING = "running" + SUCCEEDED = "succeeded" + FAILED = "failed" + + +protocol_table = sqlalchemy.Table( + "protocol", + metadata, + sqlalchemy.Column( + "id", + sqlalchemy.String, + primary_key=True, + ), + sqlalchemy.Column( + "created_at", + UTCDateTime, + nullable=False, + ), + sqlalchemy.Column("protocol_key", sqlalchemy.String, nullable=True), + sqlalchemy.Column( + "protocol_kind", + sqlalchemy.Enum( + ProtocolKindSQLEnum, + values_callable=lambda obj: [e.value for e in obj], + validate_strings=True, + create_constraint=True, + ), + index=True, + nullable=False, + ), +) + + +analysis_table = sqlalchemy.Table( + "analysis", + metadata, + sqlalchemy.Column( + "id", + sqlalchemy.String, + primary_key=True, + ), + sqlalchemy.Column( + "protocol_id", + sqlalchemy.String, + sqlalchemy.ForeignKey("protocol.id"), + index=True, + nullable=False, + ), + sqlalchemy.Column( + "analyzer_version", + sqlalchemy.String, + nullable=False, + ), + sqlalchemy.Column( + "completed_analysis", + # Stores a JSON string. See CompletedAnalysisStore. + sqlalchemy.String, + nullable=False, + ), +) + + +analysis_primitive_type_rtp_table = sqlalchemy.Table( + "analysis_primitive_rtp_table", + metadata, + sqlalchemy.Column( + "row_id", + sqlalchemy.Integer, + primary_key=True, + ), + sqlalchemy.Column( + "analysis_id", + sqlalchemy.ForeignKey("analysis.id"), + nullable=False, + ), + sqlalchemy.Column( + "parameter_variable_name", + sqlalchemy.String, + nullable=False, + ), + sqlalchemy.Column( + "parameter_type", + sqlalchemy.Enum( + PrimitiveParamSQLEnum, + values_callable=lambda obj: [e.value for e in obj], + create_constraint=True, + # todo(mm, 2024-09-24): Can we add validate_strings=True here? + ), + nullable=False, + ), + sqlalchemy.Column( + "parameter_value", + sqlalchemy.String, + nullable=False, + ), +) + + +analysis_csv_rtp_table = sqlalchemy.Table( + "analysis_csv_rtp_table", + metadata, + sqlalchemy.Column( + "row_id", + sqlalchemy.Integer, + primary_key=True, + ), + sqlalchemy.Column( + "analysis_id", + sqlalchemy.ForeignKey("analysis.id"), + nullable=False, + ), + sqlalchemy.Column( + "parameter_variable_name", + sqlalchemy.String, + nullable=False, + ), + sqlalchemy.Column( + "file_id", + sqlalchemy.ForeignKey("data_files.id"), + nullable=True, + ), +) + + +run_table = sqlalchemy.Table( + "run", + metadata, + sqlalchemy.Column( + "id", + sqlalchemy.String, + primary_key=True, + ), + sqlalchemy.Column( + "created_at", + UTCDateTime, + nullable=False, + ), + sqlalchemy.Column( + "protocol_id", + sqlalchemy.String, + sqlalchemy.ForeignKey("protocol.id"), + nullable=True, + ), + sqlalchemy.Column( + "state_summary", + sqlalchemy.String, + nullable=True, + ), + sqlalchemy.Column("engine_status", sqlalchemy.String, nullable=True), + sqlalchemy.Column("_updated_at", UTCDateTime, nullable=True), + sqlalchemy.Column( + "run_time_parameters", + # Stores a JSON string. See RunStore. + sqlalchemy.String, + nullable=True, + ), +) + + +action_table = sqlalchemy.Table( + "action", + metadata, + sqlalchemy.Column( + "id", + sqlalchemy.String, + primary_key=True, + ), + sqlalchemy.Column("created_at", UTCDateTime, nullable=False), + sqlalchemy.Column("action_type", sqlalchemy.String, nullable=False), + sqlalchemy.Column( + "run_id", + sqlalchemy.String, + sqlalchemy.ForeignKey("run.id"), + nullable=False, + ), +) + + +run_command_table = sqlalchemy.Table( + "run_command", + metadata, + sqlalchemy.Column("row_id", sqlalchemy.Integer, primary_key=True), + sqlalchemy.Column( + "run_id", sqlalchemy.String, sqlalchemy.ForeignKey("run.id"), nullable=False + ), + # command_index in commands enumeration + sqlalchemy.Column("index_in_run", sqlalchemy.Integer, nullable=False), + sqlalchemy.Column("command_id", sqlalchemy.String, nullable=False), + sqlalchemy.Column("command", sqlalchemy.String, nullable=False), + sqlalchemy.Column( + "command_intent", + sqlalchemy.String, + # nullable=True to match the underlying SQL, which is nullable because of a bug + # in the migration that introduced this column. This is not intended to ever be + # null in practice. + nullable=True, + ), + sqlalchemy.Column("command_error", sqlalchemy.String, nullable=True), + sqlalchemy.Column( + "command_status", + sqlalchemy.Enum( + CommandStatusSQLEnum, + values_callable=lambda obj: [e.value for e in obj], + validate_strings=True, + # nullable=True because it was easier for the migration to add the column + # this way. This is not intended to ever be null in practice. + nullable=True, + # todo(mm, 2024-11-20): We want create_constraint=True here. Something + # about the way we compare SQL in test_tables.py is making that difficult-- + # even when we correctly add the constraint in the migration, the SQL + # doesn't compare equal to what create_constraint=True here would emit. + create_constraint=False, + ), + ), + sqlalchemy.Index( + "ix_run_run_id_command_id", # An arbitrary name for the index. + "run_id", + "command_id", + unique=True, + ), + sqlalchemy.Index( + "ix_run_run_id_index_in_run", # An arbitrary name for the index. + "run_id", + "index_in_run", + unique=True, + ), + sqlalchemy.Index( + "ix_run_run_id_command_status_index_in_run", # An arbitrary name for the index. + "run_id", + "command_status", + "index_in_run", + unique=True, + ), +) + + +data_files_table = sqlalchemy.Table( + "data_files", + metadata, + sqlalchemy.Column( + "id", + sqlalchemy.String, + primary_key=True, + ), + sqlalchemy.Column( + "name", + sqlalchemy.String, + nullable=False, + ), + sqlalchemy.Column( + "file_hash", + sqlalchemy.String, + nullable=False, + ), + sqlalchemy.Column( + "created_at", + UTCDateTime, + nullable=False, + ), + sqlalchemy.Column( + "source", + sqlalchemy.Enum( + DataFileSourceSQLEnum, + values_callable=lambda obj: [e.value for e in obj], + validate_strings=True, + # create_constraint=False to match the underlying SQL, which omits + # the constraint because of a bug in the migration that introduced this + # column. This is not intended to ever have values other than those in + # DataFileSourceSQLEnum. + create_constraint=False, + ), + # nullable=True to match the underlying SQL, which is nullable because of a bug + # in the migration that introduced this column. This is not intended to ever be + # null in practice. + nullable=True, + ), +) + + +run_csv_rtp_table = sqlalchemy.Table( + "run_csv_rtp_table", + metadata, + sqlalchemy.Column( + "row_id", + sqlalchemy.Integer, + primary_key=True, + ), + sqlalchemy.Column( + "run_id", + sqlalchemy.ForeignKey("run.id"), + nullable=False, + ), + sqlalchemy.Column( + "parameter_variable_name", + sqlalchemy.String, + nullable=False, + ), + sqlalchemy.Column( + "file_id", + sqlalchemy.ForeignKey("data_files.id"), + nullable=True, + ), +) + + +class BooleanSettingKey(enum.Enum): + """Keys for boolean settings.""" + + ENABLE_ERROR_RECOVERY = "enable_error_recovery" + + +boolean_setting_table = sqlalchemy.Table( + "boolean_setting", + metadata, + sqlalchemy.Column( + "key", + sqlalchemy.Enum( + BooleanSettingKey, + values_callable=lambda obj: [e.value for e in obj], + validate_strings=True, + create_constraint=True, + ), + primary_key=True, + ), + sqlalchemy.Column( + "value", + sqlalchemy.Boolean, + nullable=False, + ), +) + + +labware_offset_table = sqlalchemy.Table( + "labware_offset_with_sequence", + metadata, + # Numeric row ID for ordering: + sqlalchemy.Column("row_id", sqlalchemy.Integer, primary_key=True), + # String UUID for exposing over HTTP: + sqlalchemy.Column( + "offset_id", sqlalchemy.String, nullable=False, unique=True, index=True + ), + # The URI identifying the labware definition that this offset applies to. + sqlalchemy.Column("definition_uri", sqlalchemy.String, nullable=False), + # The offset itself: + sqlalchemy.Column("vector_x", sqlalchemy.Float, nullable=False), + sqlalchemy.Column("vector_y", sqlalchemy.Float, nullable=False), + sqlalchemy.Column("vector_z", sqlalchemy.Float, nullable=False), + # Whether this record is "active", i.e. whether it should be considered as a + # candidate to apply to runs and affect actual robot motion: + sqlalchemy.Column("active", sqlalchemy.Boolean, nullable=False), + # When this record was created: + sqlalchemy.Column("created_at", UTCDateTime, nullable=False), +) + +labware_offset_location_sequence_components_table = sqlalchemy.Table( + "labware_offset_sequence_components", + metadata, + # ID for this row, which largely won't be used + sqlalchemy.Column("row_id", sqlalchemy.Integer, primary_key=True), + # Which offset this belongs to + sqlalchemy.Column( + "offset_id", + sqlalchemy.ForeignKey( + "labware_offset_with_sequence.row_id", + ), + nullable=False, + index=True, + ), + # Its position within the sequence + sqlalchemy.Column("sequence_ordinal", sqlalchemy.Integer, nullable=False), + # An identifier for the component; in practice this will be an enum entry (of the kind values + # of the LabwareOffsetSequenceComponent models) but by keeping that out of the schema we don't + # have to change the schema if we add something new there + sqlalchemy.Column("component_kind", sqlalchemy.String, nullable=False), + # The value of the component, which will differ in kind by what component it is, and would be + # annoying to further schematize without yet more normalization. If we ever add a sequence component + # that has more than one value in it (think twice before doing this), pick a primary value that you'll + # be searching by and put that here. + sqlalchemy.Column("primary_component_value", sqlalchemy.String, nullable=False), + # If the value of the component has more than one thing in it, dump it to json and put it here. + sqlalchemy.Column("component_value_json", sqlalchemy.String, nullable=False), +) diff --git a/robot-server/tests/integration/http_api/test_labware_offsets.tavern.yaml b/robot-server/tests/integration/http_api/test_labware_offsets.tavern.yaml index f84e5b15d56..0745f10a2ae 100644 --- a/robot-server/tests/integration/http_api/test_labware_offsets.tavern.yaml +++ b/robot-server/tests/integration/http_api/test_labware_offsets.tavern.yaml @@ -12,11 +12,17 @@ stages: json: data: definitionUri: definitionUri1 - location: - slotName: A1 - definitionUri: testNamespace/testLoadName/123 - moduleModel: thermocyclerModuleV2 - vector: { x: 1, y: 1, z: 1 } + locationSequence: + - kind: onLabware + labwareUri: testNamespace/testLoadName/123 + - kind: onModule + moduleModel: thermocyclerModuleV2 + - kind: onAddressableArea + addressableAreaName: A1 + vector: + x: 1 + y: 1 + z: 1 response: status_code: 201 json: @@ -24,11 +30,17 @@ stages: id: !anystr createdAt: !anystr definitionUri: definitionUri1 - location: - slotName: A1 - definitionUri: testNamespace/testLoadName/123 - moduleModel: thermocyclerModuleV2 - vector: { x: 1, y: 1, z: 1 } + locationSequence: + - kind: onLabware + labwareUri: testNamespace/testLoadName/123 + - kind: onModule + moduleModel: thermocyclerModuleV2 + - kind: onAddressableArea + addressableAreaName: A1 + vector: + x: 1 + y: 1 + z: 1 save: json: offset_1_data: data @@ -41,9 +53,13 @@ stages: json: data: definitionUri: definitionUri2 - location: - slotName: A2 - vector: { x: 2, y: 2, z: 2 } + locationSequence: + - kind: onAddressableArea + addressableAreaName: A2 + vector: + x: 2 + y: 2 + z: 2 response: status_code: 201 save: @@ -57,9 +73,13 @@ stages: json: data: definitionUri: definitionUri3 - location: - slotName: A3 - vector: { x: 3, y: 3, z: 3 } + locationSequence: + - kind: onAddressableArea + addressableAreaName: A3 + vector: + x: 3 + y: 3 + z: 3 response: status_code: 201 save: @@ -146,10 +166,11 @@ stages: json: data: definitionUri: testNamespace/loadName1/1 - location: - slotName: A1 - # No moduleModel - # No definitionUri + locationSequence: + - kind: onAddressableArea + addressableAreaName: A1 + # No moduleModel + # No definitionUri vector: x: 1 y: 2 @@ -166,10 +187,12 @@ stages: json: data: definitionUri: testNamespace/loadName2/1 - location: - slotName: A1 - moduleModel: temperatureModuleV2 - # No definitionUri + locationSequence: + - kind: onModule + moduleModel: temperatureModuleV2 + - kind: onAddressableArea + addressableAreaName: A1 + # No definitionUri vector: x: 1 y: 2 @@ -186,10 +209,12 @@ stages: json: data: definitionUri: testNamespace/loadName2/1 - location: - slotName: A1 - # no moduleModel - definitionUri: testNamespace/adapterLoadName/1 + locationSequence: + - kind: onLabware + labwareUri: testNamespace/adapterLoadName/1 + - kind: onAddressableArea + addressableAreaName: A1 + # no moduleModel vector: x: 1 y: 2 @@ -206,10 +231,13 @@ stages: json: data: definitionUri: testNamespace/loadName3/1 - location: - slotName: A1 - moduleModel: temperatureModuleV2 - definitionUri: testNamespace/adapterLoadName/1 + locationSequence: + - kind: onLabware + labwareUri: testNamespace/adapterLoadName/1 + - kind: onModule + moduleModel: temperatureModuleV2 + - kind: onAddressableArea + addressableAreaName: A1 vector: x: 1 y: 2 diff --git a/robot-server/tests/labware_offsets/test_store.py b/robot-server/tests/labware_offsets/test_store.py index 0b6048da86b..ad4648b2b83 100644 --- a/robot-server/tests/labware_offsets/test_store.py +++ b/robot-server/tests/labware_offsets/test_store.py @@ -6,16 +6,25 @@ import sqlalchemy from opentrons.protocol_engine import ( - LabwareOffset, - LegacyLabwareOffsetLocation, LabwareOffsetVector, + OnLabwareOffsetLocationSequenceComponent, + OnModuleOffsetLocationSequenceComponent, + OnAddressableAreaOffsetLocationSequenceComponent, ) from opentrons.protocol_engine.types import ModuleModel -from opentrons.types import DeckSlotName - +from robot_server.persistence.tables import ( + labware_offset_location_sequence_components_table, +) from robot_server.labware_offsets.store import ( LabwareOffsetStore, LabwareOffsetNotFoundError, + IncomingStoredLabwareOffset, +) +from robot_server.labware_offsets.models import ( + StoredLabwareOffset, + DoNotFilterType, + DO_NOT_FILTER, + UnknownLabwareOffsetLocationSequenceComponent, ) @@ -25,64 +34,237 @@ def subject(sql_engine: sqlalchemy.engine.Engine) -> LabwareOffsetStore: return LabwareOffsetStore(sql_engine) -def _get_all(store: LabwareOffsetStore) -> list[LabwareOffset]: +def _get_all(store: LabwareOffsetStore) -> list[StoredLabwareOffset]: return store.search() -def test_filter_fields(subject: LabwareOffsetStore) -> None: +@pytest.mark.parametrize( + argnames=[ + "id_filter", + "definition_uri_filter", + "location_addressable_area_filter", + "location_module_model_filter", + "location_labware_uri_filter", + "returned_ids", + ], + argvalues=[ + pytest.param( + "a", + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + ["a"], + id="id-only", + ), + pytest.param( + DO_NOT_FILTER, + "definitionUri a", + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + ["a", "c", "d", "e"], + id="labware-only", + ), + pytest.param( + "a", + "definitionUri a", + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + ["a"], + id="labware-and-id-matching", + ), + pytest.param( + "a", + "definitionUri b", + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + [], + id="labware-and-id-conflicting", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + "A1", + DO_NOT_FILTER, + DO_NOT_FILTER, + ["a", "c", "d", "e"], + id="aa-only", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + "A1", + None, + None, + ["c"], + id="aa-and-not-mod-or-lw", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + "A1", + None, + DO_NOT_FILTER, + ["c", "d"], + id="aa-and-not-module", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + "A1", + DO_NOT_FILTER, + None, + ["c", "e"], + id="aa-and-not-lw", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + ModuleModel.MAGNETIC_BLOCK_V1, + DO_NOT_FILTER, + ["b", "e"], + id="module-only", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + ModuleModel.MAGNETIC_BLOCK_V1, + None, + ["e"], + id="module-and-not-lw", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + "location.definitionUri a", + ["a", "d"], + id="lw-only", + ), + pytest.param( + DO_NOT_FILTER, + DO_NOT_FILTER, + DO_NOT_FILTER, + None, + "location.definitionUri a", + ["d"], + id="lw-and-not-module", + ), + ], +) +def test_filter_fields( + subject: LabwareOffsetStore, + id_filter: str | DoNotFilterType, + definition_uri_filter: str | DoNotFilterType, + location_addressable_area_filter: str | DoNotFilterType, + location_module_model_filter: ModuleModel | None | DoNotFilterType, + location_labware_uri_filter: str | None | DoNotFilterType, + returned_ids: list[str], +) -> None: """Test each filterable field to make sure it returns only matching entries.""" - offset_a = LabwareOffset( - id="a", - createdAt=datetime.now(timezone.utc), - definitionUri="definitionUri a", - location=LegacyLabwareOffsetLocation( - slotName=DeckSlotName.SLOT_A1, - moduleModel=ModuleModel.THERMOCYCLER_MODULE_V1, - definitionUri="location.definitionUri a", + offsets = { + "a": IncomingStoredLabwareOffset( + id="a", + createdAt=datetime.now(timezone.utc), + definitionUri="definitionUri a", + locationSequence=[ + OnLabwareOffsetLocationSequenceComponent( + labwareUri="location.definitionUri a" + ), + OnModuleOffsetLocationSequenceComponent( + moduleModel=ModuleModel.THERMOCYCLER_MODULE_V1 + ), + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ), + ], + vector=LabwareOffsetVector(x=1, y=2, z=3), ), - vector=LabwareOffsetVector(x=1, y=2, z=3), - ) - offset_b = LabwareOffset( - id="b", - createdAt=datetime.now(timezone.utc), - definitionUri="definitionUri b", - location=LegacyLabwareOffsetLocation( - slotName=DeckSlotName.SLOT_B1, - moduleModel=ModuleModel.MAGNETIC_BLOCK_V1, - definitionUri="location.definitionUri b", + "b": IncomingStoredLabwareOffset( + id="b", + createdAt=datetime.now(timezone.utc), + definitionUri="definitionUri b", + locationSequence=[ + OnLabwareOffsetLocationSequenceComponent( + labwareUri="location.definitionUri b" + ), + OnModuleOffsetLocationSequenceComponent( + moduleModel=ModuleModel.MAGNETIC_BLOCK_V1 + ), + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="B1" + ), + ], + vector=LabwareOffsetVector(x=2, y=4, z=6), ), - vector=LabwareOffsetVector(x=1, y=2, z=3), + "c": IncomingStoredLabwareOffset( + id="c", + createdAt=datetime.now(timezone.utc), + definitionUri="definitionUri a", + locationSequence=[ + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ) + ], + vector=LabwareOffsetVector(x=3, y=6, z=9), + ), + "d": IncomingStoredLabwareOffset( + id="d", + createdAt=datetime.now(timezone.utc), + definitionUri="definitionUri a", + locationSequence=[ + OnLabwareOffsetLocationSequenceComponent( + labwareUri="location.definitionUri a" + ), + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ), + ], + vector=LabwareOffsetVector(x=4, y=8, z=12), + ), + "e": IncomingStoredLabwareOffset( + id="e", + createdAt=datetime.now(timezone.utc), + definitionUri="definitionUri a", + locationSequence=[ + OnModuleOffsetLocationSequenceComponent( + moduleModel=ModuleModel.MAGNETIC_BLOCK_V1 + ), + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ), + ], + vector=LabwareOffsetVector(x=5, y=10, z=15), + ), + } + for offset in offsets.values(): + subject.add(offset) + results = subject.search( + id_filter=id_filter, + definition_uri_filter=definition_uri_filter, + location_addressable_area_filter=location_addressable_area_filter, + location_module_model_filter=location_module_model_filter, + location_definition_uri_filter=location_labware_uri_filter, + ) + assert sorted(results, key=lambda o: o.id,) == sorted( + [ + StoredLabwareOffset( + id=offsets[id_].id, + createdAt=offsets[id_].createdAt, + definitionUri=offsets[id_].definitionUri, + locationSequence=offsets[id_].locationSequence, + vector=offsets[id_].vector, + ) + for id_ in returned_ids + ], + key=lambda o: o.id, ) - - subject.add(offset_a) - subject.add(offset_b) - - assert subject.search(id_filter=offset_a.id) == [offset_a] - assert subject.search(id_filter=offset_b.id) == [offset_b] - - assert subject.search(definition_uri_filter=offset_a.definitionUri) == [offset_a] - assert subject.search(definition_uri_filter=offset_b.definitionUri) == [offset_b] - - assert subject.search(location_slot_name_filter=offset_a.location.slotName) == [ - offset_a - ] - assert subject.search(location_slot_name_filter=offset_b.location.slotName) == [ - offset_b - ] - - assert subject.search( - location_module_model_filter=offset_a.location.moduleModel - ) == [offset_a] - assert subject.search( - location_module_model_filter=offset_b.location.moduleModel - ) == [offset_b] - - assert subject.search( - location_definition_uri_filter=offset_a.location.definitionUri - ) == [offset_a] - assert subject.search( - location_definition_uri_filter=offset_b.location.definitionUri - ) == [offset_b] def test_filter_combinations(subject: LabwareOffsetStore) -> None: @@ -96,27 +278,41 @@ def test_filter_combinations(subject: LabwareOffsetStore) -> None: ("id-6", "definition-uri-b"), ] labware_offsets = [ - LabwareOffset( + IncomingStoredLabwareOffset( id=id, createdAt=datetime.now(timezone.utc), definitionUri=definition_uri, - location=LegacyLabwareOffsetLocation(slotName=DeckSlotName.SLOT_A1), + locationSequence=[ + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ) + ], vector=LabwareOffsetVector(x=1, y=2, z=3), ) for (id, definition_uri) in ids_and_definition_uris ] + outgoing_offsets = [ + StoredLabwareOffset( + id=offset.id, + createdAt=offset.createdAt, + definitionUri=offset.definitionUri, + locationSequence=offset.locationSequence, + vector=offset.vector, + ) + for offset in labware_offsets + ] for labware_offset in labware_offsets: subject.add(labware_offset) # No filters: - assert subject.search() == labware_offsets + assert subject.search() == outgoing_offsets # Filter on one thing: result = subject.search(definition_uri_filter="definition-uri-b") assert len(result) == 3 assert result == [ - entry for entry in labware_offsets if entry.definitionUri == "definition-uri-b" + entry for entry in outgoing_offsets if entry.definitionUri == "definition-uri-b" ] # Filter on two things: @@ -124,7 +320,7 @@ def test_filter_combinations(subject: LabwareOffsetStore) -> None: id_filter="id-2", definition_uri_filter="definition-uri-b", ) - assert result == [labware_offsets[1]] + assert result == [outgoing_offsets[1]] # Filters should be ANDed, not ORed, together: result = subject.search( @@ -136,16 +332,32 @@ def test_filter_combinations(subject: LabwareOffsetStore) -> None: def test_delete(subject: LabwareOffsetStore) -> None: """Test the `delete()` and `delete_all()` methods.""" - a, b, c = [ - LabwareOffset( + incoming_offsets = [ + IncomingStoredLabwareOffset( id=id, createdAt=datetime.now(timezone.utc), definitionUri="", - location=LegacyLabwareOffsetLocation(slotName=DeckSlotName.SLOT_A1), + locationSequence=[ + OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ) + ], vector=LabwareOffsetVector(x=1, y=2, z=3), ) for id in ["id-a", "id-b", "id-c"] ] + outgoing_offsets = [ + StoredLabwareOffset( + id=offset.id, + createdAt=offset.createdAt, + definitionUri=offset.definitionUri, + locationSequence=offset.locationSequence, + vector=offset.vector, + ) + for offset in incoming_offsets + ] + a, b, c = incoming_offsets + out_a, out_b, out_c = outgoing_offsets with pytest.raises(LabwareOffsetNotFoundError): subject.delete("b") @@ -153,10 +365,52 @@ def test_delete(subject: LabwareOffsetStore) -> None: subject.add(a) subject.add(b) subject.add(c) - assert subject.delete(b.id) == b - assert _get_all(subject) == [a, c] + + assert subject.delete(b.id) == out_b + assert _get_all(subject) == [out_a, out_c] with pytest.raises(LabwareOffsetNotFoundError): - subject.delete(b.id) + subject.delete(out_b.id) subject.delete_all() assert _get_all(subject) == [] + + +def test_handle_unknown( + subject: LabwareOffsetStore, sql_engine: sqlalchemy.engine.Engine +) -> None: + """Test returning an unknown offset.""" + original_location = OnAddressableAreaOffsetLocationSequenceComponent( + addressableAreaName="A1" + ) + incoming_valid = IncomingStoredLabwareOffset( + id="id-a", + createdAt=datetime.now(timezone.utc), + definitionUri="", + locationSequence=[original_location], + vector=LabwareOffsetVector(x=1, y=2, z=3), + ) + outgoing_offset = StoredLabwareOffset( + id=incoming_valid.id, + createdAt=incoming_valid.createdAt, + definitionUri=incoming_valid.definitionUri, + locationSequence=[ + original_location, + UnknownLabwareOffsetLocationSequenceComponent( + storedKind="asdasdad", primaryValue="ddddddd" + ), + ], + vector=incoming_valid.vector, + ) + subject.add(incoming_valid) + with sql_engine.begin() as transaction: + transaction.execute( + sqlalchemy.insert(labware_offset_location_sequence_components_table).values( + row_id=2, + offset_id=1, + sequence_ordinal=2, + component_kind="asdasdad", + primary_component_value="ddddddd", + component_value_json='{"asdasda": "dddddd", "kind": "asdasdad"}', + ) + ) + assert subject.search(id_filter="id-a") == [outgoing_offset] diff --git a/robot-server/tests/persistence/test_tables.py b/robot-server/tests/persistence/test_tables.py index 87110d69eb6..bdb463427ce 100644 --- a/robot-server/tests/persistence/test_tables.py +++ b/robot-server/tests/persistence/test_tables.py @@ -1,6 +1,5 @@ """Tests for SQL tables.""" - from pathlib import Path from typing import List, cast @@ -20,6 +19,7 @@ schema_7, schema_8, schema_9, + schema_10, ) # The statements that we expect to emit when we create a fresh database. @@ -160,6 +160,167 @@ ) """, """ + CREATE TABLE labware_offset_with_sequence ( + row_id INTEGER NOT NULL, + offset_id VARCHAR NOT NULL, + definition_uri VARCHAR NOT NULL, + vector_x FLOAT NOT NULL, + vector_y FLOAT NOT NULL, + vector_z FLOAT NOT NULL, + active BOOLEAN NOT NULL, + created_at DATETIME NOT NULL, + PRIMARY KEY (row_id) + ) + """, + """ + CREATE UNIQUE INDEX ix_labware_offset_with_sequence_offset_id ON labware_offset_with_sequence (offset_id) + """, + """ + CREATE TABLE labware_offset_sequence_components ( + row_id INTEGER NOT NULL, + offset_id INTEGER NOT NULL, + sequence_ordinal INTEGER NOT NULL, + component_kind VARCHAR NOT NULL, + primary_component_value VARCHAR NOT NULL, + component_value_json VARCHAR NOT NULL, + PRIMARY KEY (row_id), + FOREIGN KEY(offset_id) REFERENCES labware_offset_with_sequence (row_id) + ) + """, + """ + CREATE INDEX ix_labware_offset_sequence_components_offset_id ON labware_offset_sequence_components (offset_id) + """, +] + +EXPECTED_STATEMENTS_V10 = EXPECTED_STATEMENTS_LATEST + + +EXPECTED_STATEMENTS_V9 = [ + """ + CREATE TABLE protocol ( + id VARCHAR NOT NULL, + created_at DATETIME NOT NULL, + protocol_key VARCHAR, + protocol_kind VARCHAR(14) NOT NULL, + PRIMARY KEY (id), + CONSTRAINT protocolkindsqlenum CHECK (protocol_kind IN ('standard', 'quick-transfer')) + ) + """, + """ + CREATE TABLE analysis ( + id VARCHAR NOT NULL, + protocol_id VARCHAR NOT NULL, + analyzer_version VARCHAR NOT NULL, + completed_analysis VARCHAR NOT NULL, + PRIMARY KEY (id), + FOREIGN KEY(protocol_id) REFERENCES protocol (id) + ) + """, + """ + CREATE TABLE analysis_primitive_rtp_table ( + row_id INTEGER NOT NULL, + analysis_id VARCHAR NOT NULL, + parameter_variable_name VARCHAR NOT NULL, + parameter_type VARCHAR(5) NOT NULL, + parameter_value VARCHAR NOT NULL, + PRIMARY KEY (row_id), + FOREIGN KEY(analysis_id) REFERENCES analysis (id), + CONSTRAINT primitiveparamsqlenum CHECK (parameter_type IN ('int', 'float', 'bool', 'str')) + ) + """, + """ + CREATE TABLE analysis_csv_rtp_table ( + row_id INTEGER NOT NULL, + analysis_id VARCHAR NOT NULL, + parameter_variable_name VARCHAR NOT NULL, + file_id VARCHAR, + PRIMARY KEY (row_id), + FOREIGN KEY(analysis_id) REFERENCES analysis (id), + FOREIGN KEY(file_id) REFERENCES data_files (id) + ) + """, + """ + CREATE INDEX ix_analysis_protocol_id ON analysis (protocol_id) + """, + """ + CREATE TABLE run ( + id VARCHAR NOT NULL, + created_at DATETIME NOT NULL, + protocol_id VARCHAR, + state_summary VARCHAR, + engine_status VARCHAR, + _updated_at DATETIME, + run_time_parameters VARCHAR, + PRIMARY KEY (id), + FOREIGN KEY(protocol_id) REFERENCES protocol (id) + ) + """, + """ + CREATE TABLE action ( + id VARCHAR NOT NULL, + created_at DATETIME NOT NULL, + action_type VARCHAR NOT NULL, + run_id VARCHAR NOT NULL, + PRIMARY KEY (id), + FOREIGN KEY(run_id) REFERENCES run (id) + ) + """, + """ + CREATE TABLE run_command ( + row_id INTEGER NOT NULL, + run_id VARCHAR NOT NULL, + index_in_run INTEGER NOT NULL, + command_id VARCHAR NOT NULL, + command VARCHAR NOT NULL, + command_intent VARCHAR, + command_error VARCHAR, + command_status VARCHAR(9), + PRIMARY KEY (row_id), + FOREIGN KEY(run_id) REFERENCES run (id) + ) + """, + """ + CREATE UNIQUE INDEX ix_run_run_id_command_id ON run_command (run_id, command_id) + """, + """ + CREATE UNIQUE INDEX ix_run_run_id_index_in_run ON run_command (run_id, index_in_run) + """, + """ + CREATE UNIQUE INDEX ix_run_run_id_command_status_index_in_run ON run_command (run_id, command_status, index_in_run) + """, + """ + CREATE INDEX ix_protocol_protocol_kind ON protocol (protocol_kind) + """, + """ + CREATE TABLE data_files ( + id VARCHAR NOT NULL, + name VARCHAR NOT NULL, + file_hash VARCHAR NOT NULL, + created_at DATETIME NOT NULL, + source VARCHAR(9), + PRIMARY KEY (id) + ) + """, + """ + CREATE TABLE run_csv_rtp_table ( + row_id INTEGER NOT NULL, + run_id VARCHAR NOT NULL, + parameter_variable_name VARCHAR NOT NULL, + file_id VARCHAR, + PRIMARY KEY (row_id), + FOREIGN KEY(run_id) REFERENCES run (id), + FOREIGN KEY(file_id) REFERENCES data_files (id) + ) + """, + """ + CREATE TABLE boolean_setting ( + "key" VARCHAR(21) NOT NULL, + value BOOLEAN NOT NULL, + PRIMARY KEY ("key"), + CONSTRAINT booleansettingkey CHECK ("key" IN ('enable_error_recovery')) + ) + """, + """ CREATE TABLE labware_offset ( row_id INTEGER NOT NULL, offset_id VARCHAR NOT NULL, @@ -181,9 +342,6 @@ ] -EXPECTED_STATEMENTS_V9 = EXPECTED_STATEMENTS_LATEST - - EXPECTED_STATEMENTS_V8 = [ """ CREATE TABLE protocol ( @@ -831,6 +989,7 @@ def _normalize_statement(statement: str) -> str: ("metadata", "expected_statements"), [ (latest_metadata, EXPECTED_STATEMENTS_LATEST), + (schema_10.metadata, EXPECTED_STATEMENTS_V10), (schema_9.metadata, EXPECTED_STATEMENTS_V9), (schema_8.metadata, EXPECTED_STATEMENTS_V8), (schema_7.metadata, EXPECTED_STATEMENTS_V7),