Skip to content

Commit

Permalink
here is a bad server integration
Browse files Browse the repository at this point in the history
  • Loading branch information
sfoster1 committed Jan 25, 2025
1 parent a34b3ba commit 416b58b
Show file tree
Hide file tree
Showing 12 changed files with 722 additions and 45 deletions.
4 changes: 4 additions & 0 deletions api/src/opentrons/protocol_engine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@
from .types import (
LabwareOffset,
LabwareOffsetCreate,
LegacyLabwareOffsetCreate,
LabwareOffsetVector,
LegacyLabwareOffsetLocation,
LabwareOffsetLocationSequence,
LabwareMovementStrategy,
AddressableOffsetVector,
DeckPoint,
Expand Down Expand Up @@ -95,6 +97,8 @@
# public value interfaces and models
"LabwareOffset",
"LabwareOffsetCreate",
"LegacyLabwareOffsetCreate",
"LabwareOffsetLocationSequence",
"LabwareOffsetVector",
"LegacyLabwareOffsetLocation",
"LabwareMovementStrategy",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,10 @@ def standardize_labware_offset_create(
)


def _legacy_offset_location_to_offset_location_sequence(
def legacy_offset_location_to_offset_location_sequence(
location: LegacyLabwareOffsetLocation, deck_definition: DeckDefinitionV5
) -> LabwareOffsetLocationSequence:
"""Convert a legacy location to a new-style sequence."""
sequence: LabwareOffsetLocationSequence = []
if location.definitionUri:
sequence.append(
Expand Down Expand Up @@ -165,7 +166,7 @@ def _locations_for_create(
}
)
return (
_legacy_offset_location_to_offset_location_sequence(
legacy_offset_location_to_offset_location_sequence(
normalized, deck_definition
),
normalized,
Expand Down
1 change: 0 additions & 1 deletion api/src/opentrons/protocol_engine/types/labware.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
class OverlapOffset(Vec3f):
"""Offset representing overlap space of one labware on top of another labware or module."""


class LabwareOffset(BaseModel):
"""An offset that the robot adds to a pipette's position when it moves to a labware.
Expand Down
26 changes: 25 additions & 1 deletion robot-server/robot_server/labware_offsets/models.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,35 @@
"""Request/response models for the `/labwareOffsets` endpoints."""


from datetime import datetime
from typing import Literal

from pydantic import BaseModel, Field

from opentrons.protocol_engine import LabwareOffsetVector, LabwareOffsetLocationSequence

from robot_server.errors.error_responses import ErrorDetails


class StoredLabwareOffset(BaseModel):
"""An offset that the robot adds to a pipette's position when it moves to labware."""

# This is a separate thing from the model defined in protocol engine because as a new API it does
# not have to handle legacy locations. There is probably a better way to do this than to copy the model
# contents, but I'm not sure what it is.
id: str = Field(..., description="Unique labware offset record identifier.")
createdAt: datetime = Field(..., description="When this labware offset was added.")
definitionUri: str = Field(..., description="The URI for the labware's definition.")

locationSequence: LabwareOffsetLocationSequence | None = Field(
default=None,
description="Where the labware is located on the robot. Can represent all locations, but may not be present for older runs.",
)
vector: LabwareOffsetVector = Field(
...,
description="The offset applied to matching labware.",
)


class LabwareOffsetNotFound(ErrorDetails):
"""An error returned when a requested labware offset does not exist."""

Expand Down
16 changes: 8 additions & 8 deletions robot-server/robot_server/labware_offsets/router.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""FastAPI endpoint functions for the `/labwareOffsets` endpoints."""


from datetime import datetime
import textwrap
from typing import Annotated, Literal
Expand All @@ -10,7 +9,7 @@
from pydantic.json_schema import SkipJsonSchema
from server_utils.fastapi_utils.light_router import LightRouter

from opentrons.protocol_engine import LabwareOffset, LabwareOffsetCreate, ModuleModel
from opentrons.protocol_engine import LabwareOffsetCreate, ModuleModel
from opentrons.types import DeckSlotName

from robot_server.labware_offsets.models import LabwareOffsetNotFound
Expand All @@ -31,6 +30,7 @@
LabwareOffsetStore,
)
from .fastapi_dependencies import get_labware_offset_store
from .models import StoredLabwareOffset


router = LightRouter()
Expand All @@ -55,8 +55,8 @@ async def post_labware_offset( # noqa: D103
new_offset_id: Annotated[str, fastapi.Depends(get_unique_id)],
new_offset_created_at: Annotated[datetime, fastapi.Depends(get_current_time)],
request_body: Annotated[RequestModel[LabwareOffsetCreate], fastapi.Body()],
) -> PydanticResponse[SimpleBody[LabwareOffset]]:
new_offset = LabwareOffset.model_construct(
) -> PydanticResponse[SimpleBody[StoredLabwareOffset]]:
new_offset = StoredLabwareOffset.model_construct(
id=new_offset_id,
createdAt=new_offset_created_at,
definitionUri=request_body.data.definitionUri,
Expand Down Expand Up @@ -136,7 +136,7 @@ async def get_labware_offsets( # noqa: D103
alias="pageLength", description="The maximum number of entries to return."
),
] = "unlimited",
) -> PydanticResponse[SimpleMultiBody[LabwareOffset]]:
) -> PydanticResponse[SimpleMultiBody[StoredLabwareOffset]]:
if cursor not in (0, None) or page_length != "unlimited":
# todo(mm, 2024-12-06): Support this when LabwareOffsetStore supports it.
raise NotImplementedError(
Expand All @@ -158,7 +158,7 @@ async def get_labware_offsets( # noqa: D103
)

return await PydanticResponse.create(
SimpleMultiBody[LabwareOffset].model_construct(
SimpleMultiBody[StoredLabwareOffset].model_construct(
data=result_data,
meta=meta,
)
Expand All @@ -177,7 +177,7 @@ async def delete_labware_offset( # noqa: D103
str,
fastapi.Path(description="The `id` field of the offset to delete."),
],
) -> PydanticResponse[SimpleBody[LabwareOffset]]:
) -> PydanticResponse[SimpleBody[StoredLabwareOffset]]:
try:
deleted_offset = store.delete(offset_id=id)
except LabwareOffsetNotFoundError as e:
Expand All @@ -194,7 +194,7 @@ async def delete_labware_offset( # noqa: D103
summary="Delete all labware offsets",
)
async def delete_all_labware_offsets( # noqa: D103
store: Annotated[LabwareOffsetStore, fastapi.Depends(get_labware_offset_store)]
store: Annotated[LabwareOffsetStore, fastapi.Depends(get_labware_offset_store)],
) -> PydanticResponse[SimpleEmptyBody]:
store.delete_all()
return await PydanticResponse.create(SimpleEmptyBody.model_construct())
157 changes: 128 additions & 29 deletions robot-server/robot_server/labware_offsets/store.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
# noqa: D100

import enum
from typing import Final, Literal, TypeAlias
from typing import Final, Literal, TypeAlias, Iterator

from opentrons.protocol_engine.types import (
LabwareOffset,
LegacyLabwareOffsetLocation,
LabwareOffsetVector,
ModuleModel,
OnAddressableAreaOffsetSequenceComponent,
OnModuleOffsetSequenceComponent,
OnLabwareOffsetSequenceComponent,
LabwareOffsetLocationSequenceComponents,
)
from opentrons.types import DeckSlotName

from robot_server.persistence.tables import labware_offset_table
from robot_server.persistence.tables import (
labware_offset_table,
labware_offset_location_sequence_components_table,
)
from .models import StoredLabwareOffset

import sqlalchemy
import sqlalchemy.exc
Expand Down Expand Up @@ -50,11 +56,26 @@ def __init__(self, sql_engine: sqlalchemy.engine.Engine) -> None:
"""
self._sql_engine = sql_engine

def add(self, offset: LabwareOffset) -> None:
def add(self, offset: StoredLabwareOffset) -> None:
"""Store a new labware offset."""
with self._sql_engine.begin() as transaction:
offset_row_id = (
transaction.execute(
sqlalchemy.insert(labware_offset_table)
.values(_pydantic_to_sql_offset(offset))
.returning(labware_offset_table.c.row_id)
)
.one()
.row_id
)
transaction.execute(
sqlalchemy.insert(labware_offset_table).values(_pydantic_to_sql(offset))
sqlalchemy.insert(labware_offset_table).values(
list(
_pydantic_to_sql_location_sequence_iterator(
offset, offset_row_id
)
)
)
)

def search(
Expand All @@ -70,19 +91,28 @@ def search(
# The logic for that is currently duplicated across several places in
# robot-server and api. We should try to clean that up, or at least avoid
# making it worse.
) -> list[LabwareOffset]:
) -> list[StoredLabwareOffset]:
"""Return all matching labware offsets in order from oldest-added to newest."""
statement = (
sqlalchemy.select(labware_offset_table)
sqlalchemy.select(
labware_offset_table, labware_offset_location_sequence_components_table
)
.order_by(labware_offset_table.c.row_id)
.where(labware_offset_table.c.active == True) # noqa: E712
.where(
labware_offset_location_sequence_components_table.c.offset_id
== labware_offset_table.c.row_id
)
.order_by(
labware_offset_location_sequence_components_table.c.sequence_ordinal
)
)

if id_filter is not DO_NOT_FILTER:
statement = statement.where(labware_offset_table.c.offset_id == id_filter)
if definition_uri_filter is not DO_NOT_FILTER:
statement = statement.where(
labware_offset_table.c.definition_uri == definition_uri_filter
labware_offset_location_sequence_components_table.c.definition_uri == definition_uri_filter
)
if location_slot_name_filter is not DO_NOT_FILTER:
statement = statement.where(
Expand Down Expand Up @@ -110,7 +140,7 @@ def search(

return [_sql_to_pydantic(row) for row in result]

def delete(self, offset_id: str) -> LabwareOffset:
def delete(self, offset_id: str) -> StoredLabwareOffset:
"""Delete a labware offset by its ID. Return what was just deleted."""
with self._sql_engine.begin() as transaction:
try:
Expand All @@ -131,7 +161,18 @@ def delete(self, offset_id: str) -> LabwareOffset:
.values(active=False)
)

return _sql_to_pydantic(row_to_delete)
location_sequence = transaction.execute(
sqlalchemy.get(labware_offset_location_sequence_components_table)
.where(
labware_offset_location_sequence_components_table.c.offset_id
== row_to_delete.row_id
)
.order_by(
labware_offset_location_sequence_components_table.c.sequence_ordinal
)
).all()

return _sql_to_pydantic(row_to_delete, location_sequence)

def delete_all(self) -> None:
"""Delete all labware offsets."""
Expand All @@ -149,36 +190,94 @@ def __init__(self, bad_offset_id: str) -> None:
self.bad_offset_id = bad_offset_id


def _sql_to_pydantic(row: sqlalchemy.engine.Row) -> LabwareOffset:
return LabwareOffset(
id=row.offset_id,
createdAt=row.created_at,
definitionUri=row.definition_uri,
location=LegacyLabwareOffsetLocation(
slotName=DeckSlotName(row.location_slot_name),
moduleModel=row.location_module_model,
definitionUri=row.location_definition_uri,
def _sql_sequence_component_to_pydantic_sequence_component(
component_row: sqlalchemy.engine.Row,
) -> LabwareOffsetLocationSequenceComponents:
if component_row.component_kind == "onLabware":
yield OnLabwareOffsetSequenceComponent(
labwareUri=component_row.primary_component_value
)
elif component_row.component_kind == "onModule":
yield OnModuleOffsetSequenceComponent(
moduleModel=ModuleModel(component_row.primary_component_value)
)
elif component_row.component_kind == "onAddressableArea":
yield OnAddressableAreaOffsetSequenceComponent(
addressableAreaName=component_row.primary_component_value
)
else:
raise KeyError(component_row.component_kind)


def _sql_sequence_to_pydantic_sequence_iterator(
component_rows: list[sqlalchemy.engine.Row],
) -> Iterator[LabwareOffsetLocationSequenceComponents]:
for row in component_rows:
try:
yield _sql_sequence_component_to_pydantic_sequence_component(row)
except Exception:
pass


def _sql_to_pydantic(
offset_row: sqlalchemy.engine.Row,
location_sequence_rows: list[sqlalchemy.engine.Row],
) -> StoredLabwareOffset:
return StoredLabwareOffset(
id=offset_row.offset_id,
createdAt=offset_row.created_at,
definitionUri=offset_row.definition_uri,
locationSequence=list(
_sql_sequence_to_pydantic_sequence_iterator(location_sequence_rows)
),
vector=LabwareOffsetVector(
x=row.vector_x,
y=row.vector_y,
z=row.vector_z,
x=offset_row.vector_x,
y=offset_row.vector_y,
z=offset_row.vector_z,
),
)


def _pydantic_to_sql(labware_offset: LabwareOffset) -> dict[str, object]:
def _pydantic_to_sql_offset(labware_offset: StoredLabwareOffset) -> dict[str, object]:
return dict(
offset_id=labware_offset.id,
definition_uri=labware_offset.definitionUri,
location_slot_name=labware_offset.location.slotName.value,
location_module_model=labware_offset.location.moduleModel.value
if labware_offset.location.moduleModel is not None
else None,
location_definition_uri=labware_offset.location.definitionUri,
vector_x=labware_offset.vector.x,
vector_y=labware_offset.vector.y,
vector_z=labware_offset.vector.z,
created_at=labware_offset.createdAt,
active=True,
)


def _pydantic_to_sql_location_sequence_iterator(
labware_offset: StoredLabwareOffset, offset_row_id: int
) -> Iterator[dict[str, object]]:
for index, component in labware_offset.locationSequence:
if isinstance(component, OnLabwareOffsetSequenceComponent):
yield dict(
offset_id=offset_row_id,
sequence_ordinal=index,
component_kind=component.kind,
primary_component_value=component.labwareUri,
component_value_json=component.model_dump(),
)
elif isinstance(component, OnModuleOffsetSequenceComponent):
yield dict(
offset_id=offset_row_id,
sequence_ordinal=index,
component_kind=component.kind,
primary_component_value=component.moduleModel.value,
component_value_json=component.model_dump(),
)
elif isinstance(component, OnAddressableAreaOffsetSequenceComponent):
yield dict(
offset_id=offset_row_id,
sequence_ordinal=index,
component_kind=component.kind,
primary_component_value=component.addressableAreaName,
component_value_json=component.model_dump(),
)
else:
# TODO: log here
pass
Loading

0 comments on commit 416b58b

Please sign in to comment.