From 2410782491fbbe435aca50f9e646a6d9739245a4 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 14 Mar 2023 15:08:27 -0400 Subject: [PATCH 01/34] WIP: created an abstract interface for ProtocolRunner. started JsonRunner --- .../protocol_runner/protocol_runner.py | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 4f2e13dcd74..8b65dd7bde4 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -2,6 +2,8 @@ import asyncio from typing import Iterable, List, NamedTuple, Optional +from typing_extensions import Protocol as TypingProtocol + import anyio from opentrons_shared_data.labware.labware_definition import LabwareDefinition @@ -34,9 +36,7 @@ class ProtocolRunResult(NamedTuple): state_summary: StateSummary -# TODO(mc, 2022-01-11): this class has become bloated. Split into an abstract -# interfaces and several concrete implementations per protocol type -class ProtocolRunner: +class ProtocolRunner(TypingProtocol): """An interface to manage and control a protocol run. The ProtocolRunner is primarily responsible for feeding a ProtocolEngine @@ -48,6 +48,30 @@ class ProtocolRunner: you will need a new ProtocolRunner to do another run. """ + def was_started(self) -> bool: + """Whether the runner has been started. + + This value is latched; once it is True, it will never become False. + """ + + async def load(self, protocol_source: ProtocolSource) -> None: + """Load a ProtocolSource into managed ProtocolEngine. + + Calling this method is only necessary if the runner will be used + to control the run of a file-based protocol. + """ + + async def stop(self) -> None: + """Stop (cancel) the run.""" + + async def run( + self, + protocol_source: Optional[ProtocolSource] = None, + ) -> ProtocolRunResult: + """Run a given protocol to completion.""" + + +class JsonRunner(ProtocolRunner): def __init__( self, protocol_engine: ProtocolEngine, From 290e8600c035194bee034ceef13eed3bc8906c1b Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Thu, 16 Mar 2023 16:57:43 -0400 Subject: [PATCH 02/34] WIP create_protocol_runner --- api/src/opentrons/protocol_runner/__init__.py | 4 +- .../protocol_runner/protocol_runner.py | 107 +++++++++++------- .../protocol_runner/test_protocol_runner.py | 48 ++++++-- 3 files changed, 108 insertions(+), 51 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index a86c10b84ba..5284cfc9386 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -3,11 +3,13 @@ The main export of this module is the ProtocolRunner class. See protocol_runner.py for more details. """ -from .protocol_runner import ProtocolRunner, ProtocolRunResult +from .protocol_runner import ProtocolRunner, ProtocolRunResult, create_protocol_runner, JsonRunner from .create_simulating_runner import create_simulating_runner __all__ = [ "ProtocolRunner", "ProtocolRunResult", "create_simulating_runner", + "create_protocol_runner", + "JsonRunner" ] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 8b65dd7bde4..f4757c9b053 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -79,21 +79,18 @@ def __init__( task_queue: Optional[TaskQueue] = None, json_file_reader: Optional[JsonFileReader] = None, json_translator: Optional[JsonTranslator] = None, - legacy_file_reader: Optional[LegacyFileReader] = None, - legacy_context_creator: Optional[LegacyContextCreator] = None, - legacy_executor: Optional[LegacyExecutor] = None, ) -> None: - """Initialize the ProtocolRunner with its dependencies.""" + """Initialize the JsonRunner with its dependencies.""" self._protocol_engine = protocol_engine self._hardware_api = hardware_api self._json_file_reader = json_file_reader or JsonFileReader() self._json_translator = json_translator or JsonTranslator() - self._legacy_file_reader = legacy_file_reader or LegacyFileReader() - self._legacy_context_creator = legacy_context_creator or LegacyContextCreator( - hardware_api=hardware_api, - protocol_engine=protocol_engine, - ) - self._legacy_executor = legacy_executor or LegacyExecutor() + # self._legacy_file_reader = legacy_file_reader or LegacyFileReader() + # self._legacy_context_creator = legacy_context_creator or LegacyContextCreator( + # hardware_api=hardware_api, + # protocol_engine=protocol_engine, + # ) + # self._legacy_executor = legacy_executor or LegacyExecutor() # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) @@ -203,36 +200,68 @@ async def _load_json(self, protocol_source: ProtocolSource) -> None: self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - def _load_python_or_legacy_json( - self, - protocol_source: ProtocolSource, - labware_definitions: Iterable[LabwareDefinition], - ) -> None: - # fixme(mm, 2022-12-23): This does I/O and compute-bound parsing that will block - # the event loop. Jira RSS-165. - protocol = self._legacy_file_reader.read(protocol_source, labware_definitions) - broker = None - equipment_broker = None - - if protocol.api_level < LEGACY_PYTHON_API_VERSION_CUTOFF: - broker = Broker() - equipment_broker = EquipmentBroker[LegacyLoadInfo]() - - self._protocol_engine.add_plugin( - LegacyContextPlugin(broker=broker, equipment_broker=equipment_broker) - ) - - context = self._legacy_context_creator.create( - protocol=protocol, - broker=broker, - equipment_broker=equipment_broker, - ) - - self._task_queue.set_run_func( - func=self._legacy_executor.execute, - protocol=protocol, - context=context, + # def _load_python_or_legacy_json( + # self, + # protocol_source: ProtocolSource, + # labware_definitions: Iterable[LabwareDefinition], + # ) -> None: + # # fixme(mm, 2022-12-23): This does I/O and compute-bound parsing that will block + # # the event loop. Jira RSS-165. + # protocol = self._legacy_file_reader.read(protocol_source, labware_definitions) + # broker = None + # equipment_broker = None + # + # if protocol.api_level < LEGACY_PYTHON_API_VERSION_CUTOFF: + # broker = Broker() + # equipment_broker = EquipmentBroker[LegacyLoadInfo]() + # + # self._protocol_engine.add_plugin( + # LegacyContextPlugin(broker=broker, equipment_broker=equipment_broker) + # ) + # + # context = self._legacy_context_creator.create( + # protocol=protocol, + # broker=broker, + # equipment_broker=equipment_broker, + # ) + # + # self._task_queue.set_run_func( + # func=self._legacy_executor.execute, + # protocol=protocol, + # context=context, + # ) + + +def create_protocol_runner( + protocol_source: ProtocolSource, + protocol_engine: ProtocolEngine, + hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, + json_file_reader: Optional[JsonFileReader] = None, + json_translator: Optional[JsonTranslator] = None, + legacy_file_reader: Optional[LegacyFileReader] = None, + legacy_context_creator: Optional[LegacyContextCreator] = None, + legacy_executor: Optional[LegacyExecutor] = None, +) -> ProtocolRunner: + """Create a protocol runner.""" + config = protocol_source.config + if ( + isinstance(config, JsonProtocolConfig) + and config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF + ): + return JsonRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + json_file_reader=json_file_reader, + json_translator=json_translator, ) + # else: + # self._load_python_or_legacy_json(protocol_source, labware_definitions) + # return ( + # HardwarePipettingHandler(state_view=state_view, hardware_api=hardware_api) + # if state_view.config.use_virtual_pipettes is False + # else VirtualPipettingHandler(state_view=state_view) + # ) async def _yield() -> None: diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 86c814f910d..b4b4d854ae9 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -20,7 +20,7 @@ JsonProtocolConfig, PythonProtocolConfig, ) -from opentrons.protocol_runner import ProtocolRunner +from opentrons.protocol_runner import ProtocolRunner, create_protocol_runner, JsonRunner from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader from opentrons.protocol_runner.json_translator import JsonTranslator @@ -97,26 +97,52 @@ def use_mock_extract_labware_definitions( @pytest.fixture -def subject( +def json_subject( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, task_queue: TaskQueue, json_file_reader: JsonFileReader, json_translator: JsonTranslator, - legacy_file_reader: LegacyFileReader, - legacy_context_creator: LegacyContextCreator, - legacy_executor: LegacyExecutor, -) -> ProtocolRunner: - """Get a ProtocolRunner test subject with mocked dependencies.""" - return ProtocolRunner( +) -> JsonRunner: + """Get a JsonRunner test subject with mocked dependencies.""" + return JsonRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, task_queue=task_queue, json_file_reader=json_file_reader, json_translator=json_translator, - legacy_file_reader=legacy_file_reader, - legacy_context_creator=legacy_context_creator, - legacy_executor=legacy_executor, + ) + + +async def test_create_protocol_runner( + protocol_engine: ProtocolEngine, + hardware_api: HardwareAPI, + task_queue: TaskQueue, + json_file_reader: JsonFileReader, + json_translator: JsonTranslator, + legacy_file_reader: LegacyFileReader, + legacy_context_creator: LegacyContextCreator, + legacy_executor: LegacyExecutor, +) -> None: + """It should return protocol runner type depending on the config.""" + json_protocol_source = ProtocolSource( + directory=Path("/dev/null"), + main_file=Path("/dev/null/abc.json"), + files=[], + metadata={}, + robot_type="OT-2 Standard", + config=JsonProtocolConfig(schema_version=6), + ) + + assert isinstance( + create_protocol_runner( + protocol_source=json_protocol_source, + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator), + JsonRunner, ) From 842a99ef1daed34986b3f4db644ebe85c8361c2d Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Fri, 17 Mar 2023 15:18:16 -0400 Subject: [PATCH 03/34] test json_runner and finished logic for json_runner --- api/src/opentrons/protocol_runner/__init__.py | 9 +- .../protocol_runner/protocol_runner.py | 87 ++-- .../protocol_runner/test_protocol_runner.py | 409 +++++++++--------- 3 files changed, 258 insertions(+), 247 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index 5284cfc9386..1ecf70ed587 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -3,7 +3,12 @@ The main export of this module is the ProtocolRunner class. See protocol_runner.py for more details. """ -from .protocol_runner import ProtocolRunner, ProtocolRunResult, create_protocol_runner, JsonRunner +from .protocol_runner import ( + ProtocolRunner, + ProtocolRunResult, + create_protocol_runner, + JsonRunner, +) from .create_simulating_runner import create_simulating_runner __all__ = [ @@ -11,5 +16,5 @@ "ProtocolRunResult", "create_simulating_runner", "create_protocol_runner", - "JsonRunner" + "JsonRunner", ] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index f4757c9b053..899fd06418b 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -118,13 +118,49 @@ async def load(self, protocol_source: ProtocolSource) -> None: # definitions, so we don't need to yield here. self._protocol_engine.add_labware_definition(definition) - if ( - isinstance(config, JsonProtocolConfig) - and config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF - ): - await self._load_json(protocol_source) - else: - self._load_python_or_legacy_json(protocol_source, labware_definitions) + protocol = await anyio.to_thread.run_sync( + self._json_file_reader.read, + protocol_source, + ) + + commands = await anyio.to_thread.run_sync( + self._json_translator.translate_commands, + protocol, + ) + + # Add commands and liquids to the ProtocolEngine. + # + # We yield on every iteration so that loading large protocols doesn't block the + # event loop. With a 24-step 10k-command protocol (See RQA-443), adding all the + # commands can take 3 to 7 seconds. + # + # It wouldn't be safe to do this in a worker thread because each addition + # invokes the ProtocolEngine's ChangeNotifier machinery, which is not + # thread-safe. + liquids = await anyio.to_thread.run_sync( + self._json_translator.translate_liquids, protocol + ) + for liquid in liquids: + self._protocol_engine.add_liquid( + id=liquid.id, + name=liquid.displayName, + description=liquid.description, + color=liquid.displayColor, + ) + await _yield() + for command in commands: + self._protocol_engine.add_command(request=command) + await _yield() + + self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) + + # if ( + # isinstance(config, JsonProtocolConfig) + # and config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF + # ): + # await self._load_json(protocol_source) + # else: + # self._load_python_or_legacy_json(protocol_source, labware_definitions) def play(self) -> None: """Start or resume the run.""" @@ -163,43 +199,6 @@ async def run( commands = self._protocol_engine.state_view.commands.get_all() return ProtocolRunResult(commands=commands, state_summary=run_data) - async def _load_json(self, protocol_source: ProtocolSource) -> None: - protocol = await anyio.to_thread.run_sync( - self._json_file_reader.read, - protocol_source, - ) - - commands = await anyio.to_thread.run_sync( - self._json_translator.translate_commands, - protocol, - ) - - # Add commands and liquids to the ProtocolEngine. - # - # We yield on every iteration so that loading large protocols doesn't block the - # event loop. With a 24-step 10k-command protocol (See RQA-443), adding all the - # commands can take 3 to 7 seconds. - # - # It wouldn't be safe to do this in a worker thread because each addition - # invokes the ProtocolEngine's ChangeNotifier machinery, which is not - # thread-safe. - liquids = await anyio.to_thread.run_sync( - self._json_translator.translate_liquids, protocol - ) - for liquid in liquids: - self._protocol_engine.add_liquid( - id=liquid.id, - name=liquid.displayName, - description=liquid.description, - color=liquid.displayColor, - ) - await _yield() - for command in commands: - self._protocol_engine.add_command(request=command) - await _yield() - - self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - # def _load_python_or_legacy_json( # self, # protocol_source: ProtocolSource, diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index b4b4d854ae9..1e6223545df 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -141,59 +141,66 @@ async def test_create_protocol_runner( hardware_api=hardware_api, task_queue=task_queue, json_file_reader=json_file_reader, - json_translator=json_translator), + json_translator=json_translator, + ), JsonRunner, ) -async def test_play_starts_run( +# @pytest.mark.parametrize( +# argnames=["subject"], +# argvalues=[ +# (pytest.lazy_fixture("json_subject")), +# ], +# ) +async def test_play_starts_run_json_runner( decoy: Decoy, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + json_subject: JsonRunner, ) -> None: """It should start a protocol run with play.""" - subject.play() + json_subject.play() decoy.verify(protocol_engine.play(), times=1) -async def test_pause( +async def test_pause_json_runner( decoy: Decoy, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + json_subject: JsonRunner, ) -> None: """It should pause a protocol run with pause.""" - subject.pause() + json_subject.pause() decoy.verify(protocol_engine.pause(), times=1) -async def test_stop( +async def test_stop_json_runner( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + json_subject: JsonRunner, ) -> None: """It should halt a protocol run with stop.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) - subject.play() - await subject.stop() + json_subject.play() + await json_subject.stop() decoy.verify(await protocol_engine.stop(), times=1) -async def test_stop_never_started( +async def test_stop_never_started_json_runner( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + json_subject: ProtocolRunner, ) -> None: """It should clean up rather than halt if the runner was never started.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) - await subject.stop() + await json_subject.stop() decoy.verify( await protocol_engine.finish(drop_tips_and_home=False, set_run_status=False), @@ -201,21 +208,21 @@ async def test_stop_never_started( ) -async def test_run( +async def test_run_json_runner( decoy: Decoy, hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + json_subject: ProtocolRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( False, True ) - assert subject.was_started() is False - await subject.run() - assert subject.was_started() is True + assert json_subject.was_started() is False + await json_subject.run() + assert json_subject.was_started() is True decoy.verify( await hardware_api.home(), @@ -225,13 +232,13 @@ async def test_run( ) -async def test_load_json( +async def test_load_json_runner( decoy: Decoy, json_file_reader: JsonFileReader, json_translator: JsonTranslator, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + json_subject: ProtocolRunner, ) -> None: """It should load a JSON protocol file.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -272,7 +279,7 @@ async def test_load_json( decoy.when(json_translator.translate_commands(json_protocol)).then_return(commands) decoy.when(json_translator.translate_liquids(json_protocol)).then_return(liquids) - await subject.load(json_protocol_source) + await json_subject.load(json_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -303,7 +310,7 @@ async def test_load_json_liquids_ff_on( json_translator: JsonTranslator, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + json_subject: ProtocolRunner, ) -> None: """It should load a JSON protocol file.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -338,7 +345,7 @@ async def test_load_json_liquids_ff_on( decoy.when(json_translator.translate_commands(json_protocol)).then_return(commands) decoy.when(json_translator.translate_liquids(json_protocol)).then_return(liquids) - await subject.load(json_protocol_source) + await json_subject.load(json_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -356,180 +363,180 @@ async def test_load_json_liquids_ff_on( ) -async def test_load_legacy_python( - decoy: Decoy, - legacy_file_reader: LegacyFileReader, - legacy_context_creator: LegacyContextCreator, - legacy_executor: LegacyExecutor, - task_queue: TaskQueue, - protocol_engine: ProtocolEngine, - subject: ProtocolRunner, -) -> None: - """It should load a legacy context-based Python protocol.""" - labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] - - legacy_protocol_source = ProtocolSource( - directory=Path("/dev/null"), - main_file=Path("/dev/null/abc.py"), - files=[], - metadata={}, - robot_type="OT-2 Standard", - config=PythonProtocolConfig(api_version=APIVersion(2, 11)), - ) - - extra_labware = {"definition-uri": cast(LegacyLabwareDefinition, {})} - - legacy_protocol = LegacyPythonProtocol( - text="", - contents="", - filename="protocol.py", - api_level=APIVersion(2, 11), - metadata={"foo": "bar"}, - bundled_labware=None, - bundled_data=None, - bundled_python=None, - extra_labware=extra_labware, - ) - - legacy_context = decoy.mock(cls=LegacyProtocolContext) - - decoy.when( - await protocol_reader.extract_labware_definitions(legacy_protocol_source) - ).then_return([labware_definition]) - decoy.when( - legacy_file_reader.read( - protocol_source=legacy_protocol_source, - labware_definitions=[labware_definition], - ) - ).then_return(legacy_protocol) - decoy.when( - legacy_context_creator.create( - protocol=legacy_protocol, - broker=matchers.IsA(Broker), - equipment_broker=matchers.IsA(EquipmentBroker), - ) - ).then_return(legacy_context) - - await subject.load(legacy_protocol_source) - - decoy.verify( - protocol_engine.add_labware_definition(labware_definition), - protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), - task_queue.set_run_func( - func=legacy_executor.execute, - protocol=legacy_protocol, - context=legacy_context, - ), - ) - - -async def test_load_python_with_pe_papi_core( - decoy: Decoy, - legacy_file_reader: LegacyFileReader, - legacy_context_creator: LegacyContextCreator, - protocol_engine: ProtocolEngine, - subject: ProtocolRunner, -) -> None: - """It should load a legacy context-based Python protocol.""" - legacy_protocol_source = ProtocolSource( - directory=Path("/dev/null"), - main_file=Path("/dev/null/abc.py"), - files=[], - metadata={}, - robot_type="OT-2 Standard", - config=PythonProtocolConfig(api_version=APIVersion(2, 14)), - ) - - legacy_protocol = LegacyPythonProtocol( - text="", - contents="", - filename="protocol.py", - api_level=APIVersion(2, 14), - metadata={"foo": "bar"}, - bundled_labware=None, - bundled_data=None, - bundled_python=None, - extra_labware=None, - ) - - legacy_context = decoy.mock(cls=LegacyProtocolContext) - - decoy.when( - await protocol_reader.extract_labware_definitions(legacy_protocol_source) - ).then_return([]) - decoy.when( - legacy_file_reader.read( - protocol_source=legacy_protocol_source, labware_definitions=[] - ) - ).then_return(legacy_protocol) - decoy.when( - legacy_context_creator.create( - protocol=legacy_protocol, broker=None, equipment_broker=None - ) - ).then_return(legacy_context) - - await subject.load(legacy_protocol_source) - - decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0) - - -async def test_load_legacy_json( - decoy: Decoy, - legacy_file_reader: LegacyFileReader, - legacy_context_creator: LegacyContextCreator, - legacy_executor: LegacyExecutor, - task_queue: TaskQueue, - protocol_engine: ProtocolEngine, - subject: ProtocolRunner, -) -> None: - """It should load a legacy context-based JSON protocol.""" - labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] - - legacy_protocol_source = ProtocolSource( - directory=Path("/dev/null"), - main_file=Path("/dev/null/abc.json"), - files=[], - metadata={}, - robot_type="OT-2 Standard", - config=JsonProtocolConfig(schema_version=5), - ) - - legacy_protocol = LegacyJsonProtocol( - text="{}", - contents=cast(LegacyJsonProtocolDict, {}), - filename="protocol.json", - api_level=APIVersion(2, 11), - schema_version=5, - metadata={"protocolName": "A Very Impressive Protocol"}, - ) - - legacy_context = decoy.mock(cls=LegacyProtocolContext) - - decoy.when( - await protocol_reader.extract_labware_definitions(legacy_protocol_source) - ).then_return([labware_definition]) - decoy.when( - legacy_file_reader.read( - protocol_source=legacy_protocol_source, - labware_definitions=[labware_definition], - ) - ).then_return(legacy_protocol) - decoy.when( - legacy_context_creator.create( - legacy_protocol, - broker=matchers.IsA(Broker), - equipment_broker=matchers.IsA(EquipmentBroker), - ) - ).then_return(legacy_context) - - await subject.load(legacy_protocol_source) - - decoy.verify( - protocol_engine.add_labware_definition(labware_definition), - protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), - task_queue.set_run_func( - func=legacy_executor.execute, - protocol=legacy_protocol, - context=legacy_context, - ), - ) +# async def test_load_legacy_python( +# decoy: Decoy, +# legacy_file_reader: LegacyFileReader, +# legacy_context_creator: LegacyContextCreator, +# legacy_executor: LegacyExecutor, +# task_queue: TaskQueue, +# protocol_engine: ProtocolEngine, +# subject: ProtocolRunner, +# ) -> None: +# """It should load a legacy context-based Python protocol.""" +# labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] +# +# legacy_protocol_source = ProtocolSource( +# directory=Path("/dev/null"), +# main_file=Path("/dev/null/abc.py"), +# files=[], +# metadata={}, +# robot_type="OT-2 Standard", +# config=PythonProtocolConfig(api_version=APIVersion(2, 11)), +# ) +# +# extra_labware = {"definition-uri": cast(LegacyLabwareDefinition, {})} +# +# legacy_protocol = LegacyPythonProtocol( +# text="", +# contents="", +# filename="protocol.py", +# api_level=APIVersion(2, 11), +# metadata={"foo": "bar"}, +# bundled_labware=None, +# bundled_data=None, +# bundled_python=None, +# extra_labware=extra_labware, +# ) +# +# legacy_context = decoy.mock(cls=LegacyProtocolContext) +# +# decoy.when( +# await protocol_reader.extract_labware_definitions(legacy_protocol_source) +# ).then_return([labware_definition]) +# decoy.when( +# legacy_file_reader.read( +# protocol_source=legacy_protocol_source, +# labware_definitions=[labware_definition], +# ) +# ).then_return(legacy_protocol) +# decoy.when( +# legacy_context_creator.create( +# protocol=legacy_protocol, +# broker=matchers.IsA(Broker), +# equipment_broker=matchers.IsA(EquipmentBroker), +# ) +# ).then_return(legacy_context) +# +# await subject.load(legacy_protocol_source) +# +# decoy.verify( +# protocol_engine.add_labware_definition(labware_definition), +# protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), +# task_queue.set_run_func( +# func=legacy_executor.execute, +# protocol=legacy_protocol, +# context=legacy_context, +# ), +# ) +# +# +# async def test_load_python_with_pe_papi_core( +# decoy: Decoy, +# legacy_file_reader: LegacyFileReader, +# legacy_context_creator: LegacyContextCreator, +# protocol_engine: ProtocolEngine, +# subject: ProtocolRunner, +# ) -> None: +# """It should load a legacy context-based Python protocol.""" +# legacy_protocol_source = ProtocolSource( +# directory=Path("/dev/null"), +# main_file=Path("/dev/null/abc.py"), +# files=[], +# metadata={}, +# robot_type="OT-2 Standard", +# config=PythonProtocolConfig(api_version=APIVersion(2, 14)), +# ) +# +# legacy_protocol = LegacyPythonProtocol( +# text="", +# contents="", +# filename="protocol.py", +# api_level=APIVersion(2, 14), +# metadata={"foo": "bar"}, +# bundled_labware=None, +# bundled_data=None, +# bundled_python=None, +# extra_labware=None, +# ) +# +# legacy_context = decoy.mock(cls=LegacyProtocolContext) +# +# decoy.when( +# await protocol_reader.extract_labware_definitions(legacy_protocol_source) +# ).then_return([]) +# decoy.when( +# legacy_file_reader.read( +# protocol_source=legacy_protocol_source, labware_definitions=[] +# ) +# ).then_return(legacy_protocol) +# decoy.when( +# legacy_context_creator.create( +# protocol=legacy_protocol, broker=None, equipment_broker=None +# ) +# ).then_return(legacy_context) +# +# await subject.load(legacy_protocol_source) +# +# decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0) +# +# +# async def test_load_legacy_json( +# decoy: Decoy, +# legacy_file_reader: LegacyFileReader, +# legacy_context_creator: LegacyContextCreator, +# legacy_executor: LegacyExecutor, +# task_queue: TaskQueue, +# protocol_engine: ProtocolEngine, +# subject: ProtocolRunner, +# ) -> None: +# """It should load a legacy context-based JSON protocol.""" +# labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] +# +# legacy_protocol_source = ProtocolSource( +# directory=Path("/dev/null"), +# main_file=Path("/dev/null/abc.json"), +# files=[], +# metadata={}, +# robot_type="OT-2 Standard", +# config=JsonProtocolConfig(schema_version=5), +# ) +# +# legacy_protocol = LegacyJsonProtocol( +# text="{}", +# contents=cast(LegacyJsonProtocolDict, {}), +# filename="protocol.json", +# api_level=APIVersion(2, 11), +# schema_version=5, +# metadata={"protocolName": "A Very Impressive Protocol"}, +# ) +# +# legacy_context = decoy.mock(cls=LegacyProtocolContext) +# +# decoy.when( +# await protocol_reader.extract_labware_definitions(legacy_protocol_source) +# ).then_return([labware_definition]) +# decoy.when( +# legacy_file_reader.read( +# protocol_source=legacy_protocol_source, +# labware_definitions=[labware_definition], +# ) +# ).then_return(legacy_protocol) +# decoy.when( +# legacy_context_creator.create( +# legacy_protocol, +# broker=matchers.IsA(Broker), +# equipment_broker=matchers.IsA(EquipmentBroker), +# ) +# ).then_return(legacy_context) +# +# await subject.load(legacy_protocol_source) +# +# decoy.verify( +# protocol_engine.add_labware_definition(labware_definition), +# protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), +# task_queue.set_run_func( +# func=legacy_executor.execute, +# protocol=legacy_protocol, +# context=legacy_context, +# ), +# ) From f5b532560693750a4dd840989c2eb875046f7593 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Fri, 17 Mar 2023 16:54:01 -0400 Subject: [PATCH 04/34] WIP python and legacy runner --- api/src/opentrons/protocol_runner/__init__.py | 2 + .../protocol_runner/protocol_runner.py | 66 +++++++++------ .../protocol_runner/test_protocol_runner.py | 81 ++++++++++++++++--- 3 files changed, 116 insertions(+), 33 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index 1ecf70ed587..4b5362f3bc6 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -8,6 +8,7 @@ ProtocolRunResult, create_protocol_runner, JsonRunner, + PythonAndLegacyRunner, ) from .create_simulating_runner import create_simulating_runner @@ -17,4 +18,5 @@ "create_simulating_runner", "create_protocol_runner", "JsonRunner", + "PythonAndLegacyRunner", ] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 899fd06418b..4f810ff3f03 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -1,6 +1,6 @@ """Protocol run control and management.""" import asyncio -from typing import Iterable, List, NamedTuple, Optional +from typing import Iterable, List, NamedTuple, Optional, Union from typing_extensions import Protocol as TypingProtocol @@ -12,7 +12,11 @@ from opentrons.equipment_broker import EquipmentBroker from opentrons.hardware_control import HardwareControlAPI from opentrons import protocol_reader -from opentrons.protocol_reader import ProtocolSource, JsonProtocolConfig +from opentrons.protocol_reader import ( + ProtocolSource, + JsonProtocolConfig, + PythonProtocolConfig, +) from opentrons.protocol_engine import ProtocolEngine, StateSummary, Command from .task_queue import TaskQueue @@ -71,6 +75,30 @@ async def run( """Run a given protocol to completion.""" +class PythonAndLegacyRunner(ProtocolRunner): + def __init__( + self, + protocol_engine: ProtocolEngine, + hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, + legacy_file_reader: Optional[LegacyFileReader] = None, + legacy_context_creator: Optional[LegacyContextCreator] = None, + legacy_executor: Optional[LegacyExecutor] = None, + ) -> None: + """Initialize the JsonRunner with its dependencies.""" + self._protocol_engine = protocol_engine + self._hardware_api = hardware_api + self._legacy_file_reader = legacy_file_reader or LegacyFileReader() + self._legacy_context_creator = legacy_context_creator or LegacyContextCreator( + hardware_api=hardware_api, + protocol_engine=protocol_engine, + ) + self._legacy_executor = legacy_executor or LegacyExecutor() + # TODO(mc, 2022-01-11): replace task queue with specific implementations + # of runner interface + self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) + + class JsonRunner(ProtocolRunner): def __init__( self, @@ -108,8 +136,6 @@ async def load(self, protocol_source: ProtocolSource) -> None: Calling this method is only necessary if the runner will be used to control the run of a file-based protocol. """ - config = protocol_source.config - labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -154,14 +180,6 @@ async def load(self, protocol_source: ProtocolSource) -> None: self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - # if ( - # isinstance(config, JsonProtocolConfig) - # and config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF - # ): - # await self._load_json(protocol_source) - # else: - # self._load_python_or_legacy_json(protocol_source, labware_definitions) - def play(self) -> None: """Start or resume the run.""" self._protocol_engine.play() @@ -232,7 +250,7 @@ async def run( def create_protocol_runner( - protocol_source: ProtocolSource, + protocol_config: Union[JsonProtocolConfig, PythonProtocolConfig], protocol_engine: ProtocolEngine, hardware_api: HardwareControlAPI, task_queue: Optional[TaskQueue] = None, @@ -243,24 +261,26 @@ def create_protocol_runner( legacy_executor: Optional[LegacyExecutor] = None, ) -> ProtocolRunner: """Create a protocol runner.""" - config = protocol_source.config if ( - isinstance(config, JsonProtocolConfig) - and config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF + isinstance(protocol_config, JsonProtocolConfig) + and protocol_config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF ): return JsonRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, json_file_reader=json_file_reader, json_translator=json_translator, + task_queue=task_queue, + ) + else: + return PythonAndLegacyRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, ) - # else: - # self._load_python_or_legacy_json(protocol_source, labware_definitions) - # return ( - # HardwarePipettingHandler(state_view=state_view, hardware_api=hardware_api) - # if state_view.config.use_virtual_pipettes is False - # else VirtualPipettingHandler(state_view=state_view) - # ) async def _yield() -> None: diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 1e6223545df..86010314643 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -20,7 +20,12 @@ JsonProtocolConfig, PythonProtocolConfig, ) -from opentrons.protocol_runner import ProtocolRunner, create_protocol_runner, JsonRunner +from opentrons.protocol_runner import ( + ProtocolRunner, + create_protocol_runner, + JsonRunner, + PythonAndLegacyRunner, +) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader from opentrons.protocol_runner.json_translator import JsonTranslator @@ -114,6 +119,26 @@ def json_subject( ) +@pytest.fixture +def legacy_subject( + protocol_engine: ProtocolEngine, + hardware_api: HardwareAPI, + task_queue: TaskQueue, + legacy_file_reader: LegacyFileReader, + legacy_context_creator: LegacyContextCreator, + legacy_executor: LegacyExecutor, +) -> PythonAndLegacyRunner: + """Get a JsonRunner test subject with mocked dependencies.""" + return PythonAndLegacyRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, + ) + + async def test_create_protocol_runner( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, @@ -125,25 +150,61 @@ async def test_create_protocol_runner( legacy_executor: LegacyExecutor, ) -> None: """It should return protocol runner type depending on the config.""" - json_protocol_source = ProtocolSource( - directory=Path("/dev/null"), - main_file=Path("/dev/null/abc.json"), - files=[], - metadata={}, - robot_type="OT-2 Standard", - config=JsonProtocolConfig(schema_version=6), + assert isinstance( + create_protocol_runner( + protocol_config=JsonProtocolConfig(schema_version=6), + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator, + ), + JsonRunner, ) assert isinstance( create_protocol_runner( - protocol_source=json_protocol_source, + protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 14)), protocol_engine=protocol_engine, hardware_api=hardware_api, task_queue=task_queue, json_file_reader=json_file_reader, json_translator=json_translator, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, ), - JsonRunner, + PythonAndLegacyRunner, + ) + + assert isinstance( + create_protocol_runner( + protocol_config=JsonProtocolConfig(schema_version=5), + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, + ), + PythonAndLegacyRunner, + ) + + assert isinstance( + create_protocol_runner( + protocol_config=JsonProtocolConfig(schema_version=5), + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, + ), + PythonAndLegacyRunner, ) From 7de1daeb3a047bb6f10aeda762abc94d8cac4047 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Mon, 20 Mar 2023 14:56:00 -0400 Subject: [PATCH 05/34] python and legacy runner implementation --- .../protocol_runner/protocol_runner.py | 84 ++++ .../protocol_runner/test_protocol_runner.py | 372 ++++++++---------- 2 files changed, 242 insertions(+), 214 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 4f810ff3f03..34639d4ed65 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -98,6 +98,90 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) + def was_started(self) -> bool: + """Whether the runner has been started. + + This value is latched; once it is True, it will never become False. + """ + return self._protocol_engine.state_view.commands.has_been_played() + + async def load(self, protocol_source: ProtocolSource) -> None: + """Load a ProtocolSource into managed ProtocolEngine. + + Calling this method is only necessary if the runner will be used + to control the run of a file-based protocol. + """ + labware_definitions = await protocol_reader.extract_labware_definitions( + protocol_source=protocol_source + ) + for definition in labware_definitions: + # Assume adding a labware definition is fast and there are not many labware + # definitions, so we don't need to yield here. + self._protocol_engine.add_labware_definition(definition) + + # fixme(mm, 2022-12-23): This does I/O and compute-bound parsing that will block + # the event loop. Jira RSS-165. + protocol = self._legacy_file_reader.read(protocol_source, labware_definitions) + broker = None + equipment_broker = None + + if protocol.api_level < LEGACY_PYTHON_API_VERSION_CUTOFF: + broker = Broker() + equipment_broker = EquipmentBroker[LegacyLoadInfo]() + + self._protocol_engine.add_plugin( + LegacyContextPlugin(broker=broker, equipment_broker=equipment_broker) + ) + + context = self._legacy_context_creator.create( + protocol=protocol, + broker=broker, + equipment_broker=equipment_broker, + ) + + self._task_queue.set_run_func( + func=self._legacy_executor.execute, + protocol=protocol, + context=context, + ) + + def play(self) -> None: + """Start or resume the run.""" + self._protocol_engine.play() + + def pause(self) -> None: + """Pause the run.""" + self._protocol_engine.pause() + + async def stop(self) -> None: + """Stop (cancel) the run.""" + if self.was_started(): + await self._protocol_engine.stop() + else: + await self._protocol_engine.finish( + drop_tips_and_home=False, + set_run_status=False, + ) + + async def run( + self, + protocol_source: Optional[ProtocolSource] = None, + ) -> ProtocolRunResult: + """Run a given protocol to completion.""" + # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` + # currently `protocol_source` arg is only used by tests + if protocol_source: + await self.load(protocol_source) + + await self._hardware_api.home() + self.play() + self._task_queue.start() + await self._task_queue.join() + + run_data = self._protocol_engine.state_view.get_summary() + commands = self._protocol_engine.state_view.commands.get_all() + return ProtocolRunResult(commands=commands, state_summary=run_data) + class JsonRunner(ProtocolRunner): def __init__( diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 86010314643..ff646bd7910 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -330,7 +330,7 @@ async def test_load_json_runner( ] liquids: List[Liquid] = [ - Liquid(id="water-id", displayName="water", description=" water desc") + Liquid(id="water-id", displayName="water", description="water desc") ] decoy.when( @@ -344,6 +344,9 @@ async def test_load_json_runner( decoy.verify( protocol_engine.add_labware_definition(labware_definition), + protocol_engine.add_liquid( + id="water-id", name="water", description="water desc", color=None + ), protocol_engine.add_command( request=pe_commands.WaitForResumeCreate( params=pe_commands.WaitForResumeParams(message="hello") @@ -365,239 +368,180 @@ async def test_load_json_runner( ) -async def test_load_json_liquids_ff_on( +async def test_load_legacy_python( decoy: Decoy, - json_file_reader: JsonFileReader, - json_translator: JsonTranslator, - protocol_engine: ProtocolEngine, + legacy_file_reader: LegacyFileReader, + legacy_context_creator: LegacyContextCreator, + legacy_executor: LegacyExecutor, task_queue: TaskQueue, - json_subject: ProtocolRunner, + protocol_engine: ProtocolEngine, + legacy_subject: ProtocolRunner, ) -> None: - """It should load a JSON protocol file.""" + """It should load a legacy context-based Python protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] - json_protocol_source = ProtocolSource( + legacy_protocol_source = ProtocolSource( directory=Path("/dev/null"), - main_file=Path("/dev/null/abc.json"), + main_file=Path("/dev/null/abc.py"), files=[], metadata={}, robot_type="OT-2 Standard", - config=JsonProtocolConfig(schema_version=6), + config=PythonProtocolConfig(api_version=APIVersion(2, 11)), ) - json_protocol = ProtocolSchemaV6.construct() # type: ignore[call-arg] - - commands: List[pe_commands.CommandCreate] = [ - pe_commands.LoadLiquidCreate( - params=pe_commands.LoadLiquidParams( - liquidId="water-id", labwareId="labware-id", volumeByWell={"A1": 30} - ) - ), - ] + extra_labware = {"definition-uri": cast(LegacyLabwareDefinition, {})} + + legacy_protocol = LegacyPythonProtocol( + text="", + contents="", + filename="protocol.py", + api_level=APIVersion(2, 11), + metadata={"foo": "bar"}, + bundled_labware=None, + bundled_data=None, + bundled_python=None, + extra_labware=extra_labware, + ) - liquids: List[Liquid] = [ - Liquid(id="water-id", displayName="water", description="water desc") - ] + legacy_context = decoy.mock(cls=LegacyProtocolContext) decoy.when( - await protocol_reader.extract_labware_definitions(json_protocol_source) + await protocol_reader.extract_labware_definitions(legacy_protocol_source) ).then_return([labware_definition]) - decoy.when(json_file_reader.read(json_protocol_source)).then_return(json_protocol) - decoy.when(json_translator.translate_commands(json_protocol)).then_return(commands) - decoy.when(json_translator.translate_liquids(json_protocol)).then_return(liquids) + decoy.when( + legacy_file_reader.read( + protocol_source=legacy_protocol_source, + labware_definitions=[labware_definition], + ) + ).then_return(legacy_protocol) + decoy.when( + legacy_context_creator.create( + protocol=legacy_protocol, + broker=matchers.IsA(Broker), + equipment_broker=matchers.IsA(EquipmentBroker), + ) + ).then_return(legacy_context) - await json_subject.load(json_protocol_source) + await legacy_subject.load(legacy_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), - protocol_engine.add_liquid( - id="water-id", name="water", description="water desc", color=None - ), - protocol_engine.add_command( - request=pe_commands.LoadLiquidCreate( - params=pe_commands.LoadLiquidParams( - liquidId="water-id", labwareId="labware-id", volumeByWell={"A1": 30} - ) - ), + protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), + task_queue.set_run_func( + func=legacy_executor.execute, + protocol=legacy_protocol, + context=legacy_context, ), - task_queue.set_run_func(func=protocol_engine.wait_until_complete), ) -# async def test_load_legacy_python( -# decoy: Decoy, -# legacy_file_reader: LegacyFileReader, -# legacy_context_creator: LegacyContextCreator, -# legacy_executor: LegacyExecutor, -# task_queue: TaskQueue, -# protocol_engine: ProtocolEngine, -# subject: ProtocolRunner, -# ) -> None: -# """It should load a legacy context-based Python protocol.""" -# labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] -# -# legacy_protocol_source = ProtocolSource( -# directory=Path("/dev/null"), -# main_file=Path("/dev/null/abc.py"), -# files=[], -# metadata={}, -# robot_type="OT-2 Standard", -# config=PythonProtocolConfig(api_version=APIVersion(2, 11)), -# ) -# -# extra_labware = {"definition-uri": cast(LegacyLabwareDefinition, {})} -# -# legacy_protocol = LegacyPythonProtocol( -# text="", -# contents="", -# filename="protocol.py", -# api_level=APIVersion(2, 11), -# metadata={"foo": "bar"}, -# bundled_labware=None, -# bundled_data=None, -# bundled_python=None, -# extra_labware=extra_labware, -# ) -# -# legacy_context = decoy.mock(cls=LegacyProtocolContext) -# -# decoy.when( -# await protocol_reader.extract_labware_definitions(legacy_protocol_source) -# ).then_return([labware_definition]) -# decoy.when( -# legacy_file_reader.read( -# protocol_source=legacy_protocol_source, -# labware_definitions=[labware_definition], -# ) -# ).then_return(legacy_protocol) -# decoy.when( -# legacy_context_creator.create( -# protocol=legacy_protocol, -# broker=matchers.IsA(Broker), -# equipment_broker=matchers.IsA(EquipmentBroker), -# ) -# ).then_return(legacy_context) -# -# await subject.load(legacy_protocol_source) -# -# decoy.verify( -# protocol_engine.add_labware_definition(labware_definition), -# protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), -# task_queue.set_run_func( -# func=legacy_executor.execute, -# protocol=legacy_protocol, -# context=legacy_context, -# ), -# ) -# -# -# async def test_load_python_with_pe_papi_core( -# decoy: Decoy, -# legacy_file_reader: LegacyFileReader, -# legacy_context_creator: LegacyContextCreator, -# protocol_engine: ProtocolEngine, -# subject: ProtocolRunner, -# ) -> None: -# """It should load a legacy context-based Python protocol.""" -# legacy_protocol_source = ProtocolSource( -# directory=Path("/dev/null"), -# main_file=Path("/dev/null/abc.py"), -# files=[], -# metadata={}, -# robot_type="OT-2 Standard", -# config=PythonProtocolConfig(api_version=APIVersion(2, 14)), -# ) -# -# legacy_protocol = LegacyPythonProtocol( -# text="", -# contents="", -# filename="protocol.py", -# api_level=APIVersion(2, 14), -# metadata={"foo": "bar"}, -# bundled_labware=None, -# bundled_data=None, -# bundled_python=None, -# extra_labware=None, -# ) -# -# legacy_context = decoy.mock(cls=LegacyProtocolContext) -# -# decoy.when( -# await protocol_reader.extract_labware_definitions(legacy_protocol_source) -# ).then_return([]) -# decoy.when( -# legacy_file_reader.read( -# protocol_source=legacy_protocol_source, labware_definitions=[] -# ) -# ).then_return(legacy_protocol) -# decoy.when( -# legacy_context_creator.create( -# protocol=legacy_protocol, broker=None, equipment_broker=None -# ) -# ).then_return(legacy_context) -# -# await subject.load(legacy_protocol_source) -# -# decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0) -# -# -# async def test_load_legacy_json( -# decoy: Decoy, -# legacy_file_reader: LegacyFileReader, -# legacy_context_creator: LegacyContextCreator, -# legacy_executor: LegacyExecutor, -# task_queue: TaskQueue, -# protocol_engine: ProtocolEngine, -# subject: ProtocolRunner, -# ) -> None: -# """It should load a legacy context-based JSON protocol.""" -# labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] -# -# legacy_protocol_source = ProtocolSource( -# directory=Path("/dev/null"), -# main_file=Path("/dev/null/abc.json"), -# files=[], -# metadata={}, -# robot_type="OT-2 Standard", -# config=JsonProtocolConfig(schema_version=5), -# ) -# -# legacy_protocol = LegacyJsonProtocol( -# text="{}", -# contents=cast(LegacyJsonProtocolDict, {}), -# filename="protocol.json", -# api_level=APIVersion(2, 11), -# schema_version=5, -# metadata={"protocolName": "A Very Impressive Protocol"}, -# ) -# -# legacy_context = decoy.mock(cls=LegacyProtocolContext) -# -# decoy.when( -# await protocol_reader.extract_labware_definitions(legacy_protocol_source) -# ).then_return([labware_definition]) -# decoy.when( -# legacy_file_reader.read( -# protocol_source=legacy_protocol_source, -# labware_definitions=[labware_definition], -# ) -# ).then_return(legacy_protocol) -# decoy.when( -# legacy_context_creator.create( -# legacy_protocol, -# broker=matchers.IsA(Broker), -# equipment_broker=matchers.IsA(EquipmentBroker), -# ) -# ).then_return(legacy_context) -# -# await subject.load(legacy_protocol_source) -# -# decoy.verify( -# protocol_engine.add_labware_definition(labware_definition), -# protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), -# task_queue.set_run_func( -# func=legacy_executor.execute, -# protocol=legacy_protocol, -# context=legacy_context, -# ), -# ) +async def test_load_python_with_pe_papi_core( + decoy: Decoy, + legacy_file_reader: LegacyFileReader, + legacy_context_creator: LegacyContextCreator, + protocol_engine: ProtocolEngine, + legacy_subject: ProtocolRunner, +) -> None: + """It should load a legacy context-based Python protocol.""" + legacy_protocol_source = ProtocolSource( + directory=Path("/dev/null"), + main_file=Path("/dev/null/abc.py"), + files=[], + metadata={}, + robot_type="OT-2 Standard", + config=PythonProtocolConfig(api_version=APIVersion(2, 14)), + ) + + legacy_protocol = LegacyPythonProtocol( + text="", + contents="", + filename="protocol.py", + api_level=APIVersion(2, 14), + metadata={"foo": "bar"}, + bundled_labware=None, + bundled_data=None, + bundled_python=None, + extra_labware=None, + ) + + legacy_context = decoy.mock(cls=LegacyProtocolContext) + + decoy.when( + await protocol_reader.extract_labware_definitions(legacy_protocol_source) + ).then_return([]) + decoy.when( + legacy_file_reader.read( + protocol_source=legacy_protocol_source, labware_definitions=[] + ) + ).then_return(legacy_protocol) + decoy.when( + legacy_context_creator.create( + protocol=legacy_protocol, broker=None, equipment_broker=None + ) + ).then_return(legacy_context) + + await legacy_subject.load(legacy_protocol_source) + + decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0) + + +async def test_load_legacy_json( + decoy: Decoy, + legacy_file_reader: LegacyFileReader, + legacy_context_creator: LegacyContextCreator, + legacy_executor: LegacyExecutor, + task_queue: TaskQueue, + protocol_engine: ProtocolEngine, + legacy_subject: ProtocolRunner, +) -> None: + """It should load a legacy context-based JSON protocol.""" + labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] + + legacy_protocol_source = ProtocolSource( + directory=Path("/dev/null"), + main_file=Path("/dev/null/abc.json"), + files=[], + metadata={}, + robot_type="OT-2 Standard", + config=JsonProtocolConfig(schema_version=5), + ) + + legacy_protocol = LegacyJsonProtocol( + text="{}", + contents=cast(LegacyJsonProtocolDict, {}), + filename="protocol.json", + api_level=APIVersion(2, 11), + schema_version=5, + metadata={"protocolName": "A Very Impressive Protocol"}, + ) + + legacy_context = decoy.mock(cls=LegacyProtocolContext) + + decoy.when( + await protocol_reader.extract_labware_definitions(legacy_protocol_source) + ).then_return([labware_definition]) + decoy.when( + legacy_file_reader.read( + protocol_source=legacy_protocol_source, + labware_definitions=[labware_definition], + ) + ).then_return(legacy_protocol) + decoy.when( + legacy_context_creator.create( + legacy_protocol, + broker=matchers.IsA(Broker), + equipment_broker=matchers.IsA(EquipmentBroker), + ) + ).then_return(legacy_context) + + await legacy_subject.load(legacy_protocol_source) + + decoy.verify( + protocol_engine.add_labware_definition(labware_definition), + protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), + task_queue.set_run_func( + func=legacy_executor.execute, + protocol=legacy_protocol, + context=legacy_context, + ), + ) From 8f30fa79630cc35c3eb02e4de77349ca77ac1d10 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Mon, 20 Mar 2023 15:26:26 -0400 Subject: [PATCH 06/34] create_simulating_runner.py --- api/src/opentrons/cli/analyze.py | 4 +- .../create_simulating_runner.py | 10 +++-- .../protocol_runner/protocol_runner.py | 39 +++---------------- .../smoke_tests/test_legacy_command_mapper.py | 8 +++- .../smoke_tests/test_legacy_custom_labware.py | 8 +++- .../test_legacy_module_commands.py | 11 +++++- .../smoke_tests/test_protocol_runner.py | 25 +++++++++--- 7 files changed, 56 insertions(+), 49 deletions(-) diff --git a/api/src/opentrons/cli/analyze.py b/api/src/opentrons/cli/analyze.py index 0b7e6776764..931025e15d7 100644 --- a/api/src/opentrons/cli/analyze.py +++ b/api/src/opentrons/cli/analyze.py @@ -76,7 +76,9 @@ async def _analyze( except ProtocolFilesInvalidError as error: raise click.ClickException(str(error)) - runner = await create_simulating_runner(robot_type=protocol_source.robot_type) + runner = await create_simulating_runner( + robot_type=protocol_source.robot_type, protocol_config=protocol_source.config + ) analysis = await runner.run(protocol_source) if json_output: diff --git a/api/src/opentrons/protocol_runner/create_simulating_runner.py b/api/src/opentrons/protocol_runner/create_simulating_runner.py index 9e9061c73fb..fc35422b80e 100644 --- a/api/src/opentrons/protocol_runner/create_simulating_runner.py +++ b/api/src/opentrons/protocol_runner/create_simulating_runner.py @@ -6,14 +6,17 @@ Config as ProtocolEngineConfig, create_protocol_engine, ) +from opentrons.protocol_reader.protocol_source import ProtocolConfig from opentrons_shared_data.robot.dev_types import RobotType from .legacy_wrappers import LegacySimulatingContextCreator -from .protocol_runner import ProtocolRunner +from .protocol_runner import create_protocol_runner, ProtocolRunner -async def create_simulating_runner(robot_type: RobotType) -> ProtocolRunner: +async def create_simulating_runner( + robot_type: RobotType, protocol_config: ProtocolConfig +) -> ProtocolRunner: """Create a ProtocolRunner wired to a simulating HardwareControlAPI. Example: @@ -62,7 +65,8 @@ async def create_simulating_runner(robot_type: RobotType) -> ProtocolRunner: protocol_engine=protocol_engine, ) - return ProtocolRunner( + return create_protocol_runner( + protocol_config=protocol_config, protocol_engine=protocol_engine, hardware_api=simulating_hardware_api, legacy_context_creator=simulating_legacy_context_creator, diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 34639d4ed65..1b68e6db8c9 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -1,13 +1,11 @@ """Protocol run control and management.""" import asyncio -from typing import Iterable, List, NamedTuple, Optional, Union +from typing import List, NamedTuple, Optional, Union from typing_extensions import Protocol as TypingProtocol import anyio -from opentrons_shared_data.labware.labware_definition import LabwareDefinition - from opentrons.broker import Broker from opentrons.equipment_broker import EquipmentBroker from opentrons.hardware_control import HardwareControlAPI @@ -76,6 +74,8 @@ async def run( class PythonAndLegacyRunner(ProtocolRunner): + """Protocol runner implementation for python protocols and legacy protocols.""" + def __init__( self, protocol_engine: ProtocolEngine, @@ -184,6 +184,8 @@ async def run( class JsonRunner(ProtocolRunner): + """Protocol runner implementation for python protocols and legacy protocols.""" + def __init__( self, protocol_engine: ProtocolEngine, @@ -301,37 +303,6 @@ async def run( commands = self._protocol_engine.state_view.commands.get_all() return ProtocolRunResult(commands=commands, state_summary=run_data) - # def _load_python_or_legacy_json( - # self, - # protocol_source: ProtocolSource, - # labware_definitions: Iterable[LabwareDefinition], - # ) -> None: - # # fixme(mm, 2022-12-23): This does I/O and compute-bound parsing that will block - # # the event loop. Jira RSS-165. - # protocol = self._legacy_file_reader.read(protocol_source, labware_definitions) - # broker = None - # equipment_broker = None - # - # if protocol.api_level < LEGACY_PYTHON_API_VERSION_CUTOFF: - # broker = Broker() - # equipment_broker = EquipmentBroker[LegacyLoadInfo]() - # - # self._protocol_engine.add_plugin( - # LegacyContextPlugin(broker=broker, equipment_broker=equipment_broker) - # ) - # - # context = self._legacy_context_creator.create( - # protocol=protocol, - # broker=broker, - # equipment_broker=equipment_broker, - # ) - # - # self._task_queue.set_run_func( - # func=self._legacy_executor.execute, - # protocol=protocol, - # context=context, - # ) - def create_protocol_runner( protocol_config: Union[JsonProtocolConfig, PythonProtocolConfig], diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py index 56298e2fe3c..51acb12a9e1 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py @@ -18,11 +18,12 @@ ModuleLocation, DeckPoint, ) -from opentrons.protocol_reader import ProtocolReader +from opentrons.protocol_reader import ProtocolReader, PythonProtocolConfig from opentrons.protocol_runner import create_simulating_runner from opentrons.protocol_runner.legacy_command_mapper import LegacyCommandParams from opentrons.types import MountType, DeckSlotName from opentrons_shared_data.pipette.dev_types import PipetteNameType +from opentrons.protocols.api_support.types import APIVersion async def simulate_and_get_commands(protocol_file: Path) -> List[commands.Command]: @@ -32,7 +33,10 @@ async def simulate_and_get_commands(protocol_file: Path) -> List[commands.Comman files=[protocol_file], directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + ) result = await subject.run(protocol_source) assert result.state_summary.errors == [] assert result.state_summary.status == EngineStatus.SUCCEEDED diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py index 613c336c047..be75fac1b4a 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py @@ -12,8 +12,9 @@ from opentrons_shared_data import load_shared_data from opentrons.types import DeckSlotName from opentrons.protocol_engine import DeckSlotLocation, LoadedLabware -from opentrons.protocol_reader import ProtocolReader +from opentrons.protocol_reader import ProtocolReader, PythonProtocolConfig from opentrons.protocol_runner import create_simulating_runner +from opentrons.protocols.api_support.types import APIVersion FIXTURE_LABWARE_DEF = load_shared_data("labware/fixtures/2/fixture_96_plate.json") @@ -52,7 +53,10 @@ async def test_legacy_custom_labware(custom_labware_protocol_files: List[Path]) directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + ) result = await subject.run(protocol_source) expected_labware = LoadedLabware.construct( diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py index e2b51095e7c..905d6c064e2 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py @@ -8,8 +8,12 @@ from opentrons.protocol_engine import ModuleModel, commands -from opentrons.protocol_reader import ProtocolReader +from opentrons.protocol_reader import ( + ProtocolReader, + PythonProtocolConfig, +) from opentrons.protocol_runner import create_simulating_runner +from opentrons.protocols.api_support.types import APIVersion @pytest.fixture() @@ -60,7 +64,10 @@ async def test_runner_with_modules_in_legacy_python( directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + ) result = await subject.run(protocol_source) commands_result = result.commands diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index a036463c534..13703e8b0ac 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -27,8 +27,13 @@ commands, DeckPoint, ) -from opentrons.protocol_reader import ProtocolReader +from opentrons.protocol_reader import ( + ProtocolReader, + JsonProtocolConfig, + PythonProtocolConfig, +) from opentrons.protocol_runner import create_simulating_runner +from opentrons.protocols.api_support.types import APIVersion # TODO (tz, 6-17-22): API version 3.x in-development. @@ -45,7 +50,10 @@ async def test_runner_with_python( directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 14)), + ) result = await subject.run(protocol_source) commands_result = result.commands pipettes_result = result.state_summary.pipettes @@ -111,7 +119,9 @@ async def test_runner_with_json(json_protocol_file: Path) -> None: directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", protocol_config=JsonProtocolConfig(schema_version=6) + ) result = await subject.run(protocol_source) commands_result = result.commands @@ -170,7 +180,10 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + ) result = await subject.run(protocol_source) commands_result = result.commands @@ -227,7 +240,9 @@ async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", protocol_config=JsonProtocolConfig(schema_version=5) + ) result = await subject.run(protocol_source) commands_result = result.commands From c73922c996b1e84bba299ddcb5ec6b74d60e3170 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 21 Mar 2023 14:57:46 -0400 Subject: [PATCH 07/34] fixed robot server dependencies.py --- .../robot_server/protocols/dependencies.py | 22 +++++++++---------- robot-server/robot_server/protocols/router.py | 10 ++++++--- .../robot_server/runs/engine_store.py | 12 ++++++++-- .../tests/protocols/test_protocols_router.py | 1 - robot-server/tests/runs/test_engine_store.py | 2 +- 5 files changed, 29 insertions(+), 18 deletions(-) diff --git a/robot-server/robot_server/protocols/dependencies.py b/robot-server/robot_server/protocols/dependencies.py index 3159414e6a6..37434f6cd0a 100644 --- a/robot-server/robot_server/protocols/dependencies.py +++ b/robot-server/robot_server/protocols/dependencies.py @@ -96,17 +96,17 @@ async def get_analysis_store( return analysis_store -async def get_protocol_analyzer( - analysis_store: AnalysisStore = Depends(get_analysis_store), - analysis_robot_type: RobotType = Depends(get_robot_type), -) -> ProtocolAnalyzer: - """Construct a ProtocolAnalyzer for a single request.""" - protocol_runner = await create_simulating_runner(robot_type=analysis_robot_type) - - return ProtocolAnalyzer( - protocol_runner=protocol_runner, - analysis_store=analysis_store, - ) +# async def get_protocol_analyzer( +# analysis_store: AnalysisStore = Depends(get_analysis_store), +# analysis_robot_type: RobotType = Depends(get_robot_type), +# ) -> ProtocolAnalyzer: +# """Construct a ProtocolAnalyzer for a single request.""" +# protocol_runner = await create_simulating_runner(robot_type=analysis_robot_type) +# +# return ProtocolAnalyzer( +# protocol_runner=protocol_runner, +# analysis_store=analysis_store, +# ) async def get_protocol_auto_deleter( diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index ab087270a36..ee4d90cb788 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -9,6 +9,7 @@ from typing_extensions import Literal from opentrons.protocol_reader import ProtocolReader, ProtocolFilesInvalidError +from opentrons.protocol_runner import create_simulating_runner from opentrons_shared_data.robot.dev_types import RobotType from robot_server.errors import ErrorDetails, ErrorBody @@ -39,7 +40,6 @@ get_protocol_reader, get_protocol_store, get_analysis_store, - get_protocol_analyzer, get_protocol_directory, ) @@ -121,7 +121,6 @@ async def create_protocol( protocol_store: ProtocolStore = Depends(get_protocol_store), analysis_store: AnalysisStore = Depends(get_analysis_store), protocol_reader: ProtocolReader = Depends(get_protocol_reader), - protocol_analyzer: ProtocolAnalyzer = Depends(get_protocol_analyzer), task_runner: TaskRunner = Depends(get_task_runner), protocol_auto_deleter: ProtocolAutoDeleter = Depends(get_protocol_auto_deleter), robot_type: RobotType = Depends(get_robot_type), @@ -176,7 +175,12 @@ async def create_protocol( protocol_auto_deleter.make_room_for_new_protocol() protocol_store.insert(protocol_resource) - + protocol_runner = await create_simulating_runner( + robot_type=robot_type, protocol_config=source.config + ) + protocol_analyzer = ProtocolAnalyzer( + protocol_runner=protocol_runner, analysis_store=analysis_store + ) task_runner.run( protocol_analyzer.analyze, protocol_resource=protocol_resource, diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index 04425a73775..6a18d203ee5 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -5,7 +5,11 @@ from opentrons.config import feature_flags from opentrons.hardware_control import HardwareControlAPI -from opentrons.protocol_runner import ProtocolRunner, ProtocolRunResult +from opentrons.protocol_runner import ( + ProtocolRunner, + ProtocolRunResult, + create_protocol_runner, +) from opentrons.protocol_engine import ( ProtocolEngine, Config as ProtocolEngineConfig, @@ -132,7 +136,11 @@ async def create( block_on_door_open=feature_flags.enable_door_safety_switch(), ), ) - runner = ProtocolRunner(protocol_engine=engine, hardware_api=self._hardware_api) + runner = create_protocol_runner( + protocol_engine=engine, + hardware_api=self._hardware_api, + protocol_config=protocol.source.config if protocol else None, + ) if self._runner_engine_pair is not None: raise EngineConflictError("Another run is currently active.") diff --git a/robot-server/tests/protocols/test_protocols_router.py b/robot-server/tests/protocols/test_protocols_router.py index 6f5d9df89b4..713273afa5f 100644 --- a/robot-server/tests/protocols/test_protocols_router.py +++ b/robot-server/tests/protocols/test_protocols_router.py @@ -309,7 +309,6 @@ async def test_create_protocol( protocol_store=protocol_store, analysis_store=analysis_store, protocol_reader=protocol_reader, - protocol_analyzer=protocol_analyzer, task_runner=task_runner, protocol_auto_deleter=protocol_auto_deleter, robot_type="OT-2 Standard", diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index e494810abd0..b7628cdeded 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -47,7 +47,7 @@ async def test_create_engine(subject: EngineStore) -> None: assert subject.current_run_id == "run-id" assert isinstance(result, StateSummary) - assert isinstance(subject.runner, ProtocolRunner) + assert isinstance(subject.runner, ProtocolRunner) # type: ignore[misc] assert isinstance(subject.engine, ProtocolEngine) From 33c260a045a4c0f4d3831a60e36a355a6ab4f9dd Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 21 Mar 2023 16:02:47 -0400 Subject: [PATCH 08/34] MaintenanceRunner --- api/src/opentrons/protocol_runner/__init__.py | 2 + .../protocol_runner/protocol_runner.py | 133 +++++++++--- .../protocol_runner/test_protocol_runner.py | 200 +++++++++++++++++- 3 files changed, 301 insertions(+), 34 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index 4b5362f3bc6..74a539124ac 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -9,6 +9,7 @@ create_protocol_runner, JsonRunner, PythonAndLegacyRunner, + MaintenanceRunner, ) from .create_simulating_runner import create_simulating_runner @@ -19,4 +20,5 @@ "create_protocol_runner", "JsonRunner", "PythonAndLegacyRunner", + "MaintenanceRunner", ] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 1b68e6db8c9..a353e460d21 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -63,6 +63,12 @@ async def load(self, protocol_source: ProtocolSource) -> None: to control the run of a file-based protocol. """ + def play(self) -> None: + """Start or resume the run.""" + + def pause(self) -> None: + """Pause the run.""" + async def stop(self) -> None: """Stop (cancel) the run.""" @@ -199,12 +205,6 @@ def __init__( self._hardware_api = hardware_api self._json_file_reader = json_file_reader or JsonFileReader() self._json_translator = json_translator or JsonTranslator() - # self._legacy_file_reader = legacy_file_reader or LegacyFileReader() - # self._legacy_context_creator = legacy_context_creator or LegacyContextCreator( - # hardware_api=hardware_api, - # protocol_engine=protocol_engine, - # ) - # self._legacy_executor = legacy_executor or LegacyExecutor() # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) @@ -304,8 +304,80 @@ async def run( return ProtocolRunResult(commands=commands, state_summary=run_data) +class MaintenanceRunner(ProtocolRunner): + """Protocol runner implementation for setup commands.""" + + def __init__( + self, + protocol_engine: ProtocolEngine, + hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, + ) -> None: + """Initialize the JsonRunner with its dependencies.""" + self._protocol_engine = protocol_engine + # TODO(mc, 2022-01-11): replace task queue with specific implementations + # of runner interface + self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) + self._hardware_api = hardware_api + + def was_started(self) -> bool: + """Whether the runner has been started. + + This value is latched; once it is True, it will never become False. + """ + return self._protocol_engine.state_view.commands.has_been_played() + + async def load(self, protocol_source: ProtocolSource) -> None: + """Load a ProtocolSource into managed ProtocolEngine. + + Calling this method is only necessary if the runner will be used + to control the run of a file-based protocol. + """ + raise NotImplementedError( + "MaintenanceRunner.load() not supported for setup commands. no protocol associated." + ) + + def play(self) -> None: + """Start or resume the run.""" + raise NotImplementedError( + "MaintenanceRunner.play() not supported for setup commands." + ) + + def pause(self) -> None: + """Pause the run.""" + raise NotImplementedError( + "MaintenanceRunner.pause() not supported for setup commands." + ) + + async def stop(self) -> None: + """Stop (cancel) the run.""" + if self.was_started(): + await self._protocol_engine.stop() + else: + await self._protocol_engine.finish( + drop_tips_and_home=False, + set_run_status=False, + ) + + async def run( + self, + protocol_source: Optional[ProtocolSource] = None, + ) -> ProtocolRunResult: + """Run a given protocol to completion.""" + if protocol_source: + self.load(protocol_source) + + await self._hardware_api.home() + self._task_queue.start() + await self._task_queue.join() + + run_data = self._protocol_engine.state_view.get_summary() + commands = self._protocol_engine.state_view.commands.get_all() + return ProtocolRunResult(commands=commands, state_summary=run_data) + + def create_protocol_runner( - protocol_config: Union[JsonProtocolConfig, PythonProtocolConfig], + protocol_config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], protocol_engine: ProtocolEngine, hardware_api: HardwareControlAPI, task_queue: Optional[TaskQueue] = None, @@ -316,26 +388,33 @@ def create_protocol_runner( legacy_executor: Optional[LegacyExecutor] = None, ) -> ProtocolRunner: """Create a protocol runner.""" - if ( - isinstance(protocol_config, JsonProtocolConfig) - and protocol_config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF - ): - return JsonRunner( - protocol_engine=protocol_engine, - hardware_api=hardware_api, - json_file_reader=json_file_reader, - json_translator=json_translator, - task_queue=task_queue, - ) - else: - return PythonAndLegacyRunner( - protocol_engine=protocol_engine, - hardware_api=hardware_api, - task_queue=task_queue, - legacy_file_reader=legacy_file_reader, - legacy_context_creator=legacy_context_creator, - legacy_executor=legacy_executor, - ) + if protocol_config: + if ( + isinstance(protocol_config, JsonProtocolConfig) + and protocol_config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF + ): + return JsonRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + json_file_reader=json_file_reader, + json_translator=json_translator, + task_queue=task_queue, + ) + else: + return PythonAndLegacyRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, + ) + + return MaintenanceRunner( + protocol_engine=protocol_engine, + task_queue=task_queue, + hardware_api=hardware_api, + ) async def _yield() -> None: diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index ff646bd7910..51a62f356c4 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -21,10 +21,10 @@ PythonProtocolConfig, ) from opentrons.protocol_runner import ( - ProtocolRunner, create_protocol_runner, JsonRunner, PythonAndLegacyRunner, + MaintenanceRunner, ) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader @@ -139,6 +139,20 @@ def legacy_subject( ) +@pytest.fixture +def maintenance_subject( + protocol_engine: ProtocolEngine, + hardware_api: HardwareAPI, + task_queue: TaskQueue, +) -> MaintenanceRunner: + """Get a MaintenanceRunner test subject with mocked dependencies.""" + return MaintenanceRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + ) + + async def test_create_protocol_runner( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, @@ -207,6 +221,21 @@ async def test_create_protocol_runner( PythonAndLegacyRunner, ) + assert isinstance( + create_protocol_runner( + protocol_config=None, + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, + ), + MaintenanceRunner, + ) + # @pytest.mark.parametrize( # argnames=["subject"], @@ -256,7 +285,7 @@ async def test_stop_never_started_json_runner( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - json_subject: ProtocolRunner, + json_subject: JsonRunner, ) -> None: """It should clean up rather than halt if the runner was never started.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) @@ -274,7 +303,7 @@ async def test_run_json_runner( hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - json_subject: ProtocolRunner, + json_subject: JsonRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( @@ -299,7 +328,7 @@ async def test_load_json_runner( json_translator: JsonTranslator, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - json_subject: ProtocolRunner, + json_subject: JsonRunner, ) -> None: """It should load a JSON protocol file.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -368,6 +397,61 @@ async def test_load_json_runner( ) +async def test_play_starts_run_legacy_runner( + decoy: Decoy, + protocol_engine: ProtocolEngine, + task_queue: TaskQueue, + legacy_subject: PythonAndLegacyRunner, +) -> None: + """It should start a protocol run with play.""" + legacy_subject.play() + + decoy.verify(protocol_engine.play(), times=1) + + +async def test_pause_legacy_runner( + decoy: Decoy, + protocol_engine: ProtocolEngine, + legacy_subject: PythonAndLegacyRunner, +) -> None: + """It should pause a protocol run with pause.""" + legacy_subject.pause() + + decoy.verify(protocol_engine.pause(), times=1) + + +async def test_stop_legacy_runner( + decoy: Decoy, + task_queue: TaskQueue, + protocol_engine: ProtocolEngine, + legacy_subject: PythonAndLegacyRunner, +) -> None: + """It should halt a protocol run with stop.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) + + legacy_subject.play() + await legacy_subject.stop() + + decoy.verify(await protocol_engine.stop(), times=1) + + +async def test_stop_never_started_legacy_runner( + decoy: Decoy, + task_queue: TaskQueue, + protocol_engine: ProtocolEngine, + legacy_subject: PythonAndLegacyRunner, +) -> None: + """It should clean up rather than halt if the runner was never started.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) + + await legacy_subject.stop() + + decoy.verify( + await protocol_engine.finish(drop_tips_and_home=False, set_run_status=False), + times=1, + ) + + async def test_load_legacy_python( decoy: Decoy, legacy_file_reader: LegacyFileReader, @@ -375,7 +459,7 @@ async def test_load_legacy_python( legacy_executor: LegacyExecutor, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - legacy_subject: ProtocolRunner, + legacy_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based Python protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -440,7 +524,7 @@ async def test_load_python_with_pe_papi_core( legacy_file_reader: LegacyFileReader, legacy_context_creator: LegacyContextCreator, protocol_engine: ProtocolEngine, - legacy_subject: ProtocolRunner, + legacy_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based Python protocol.""" legacy_protocol_source = ProtocolSource( @@ -492,7 +576,7 @@ async def test_load_legacy_json( legacy_executor: LegacyExecutor, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - legacy_subject: ProtocolRunner, + legacy_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based JSON protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -545,3 +629,105 @@ async def test_load_legacy_json( context=legacy_context, ), ) + + +async def test_run_python_runner( + decoy: Decoy, + hardware_api: HardwareAPI, + protocol_engine: ProtocolEngine, + task_queue: TaskQueue, + legacy_subject: PythonAndLegacyRunner, +) -> None: + """It should run a protocol to completion.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( + False, True + ) + + assert legacy_subject.was_started() is False + await legacy_subject.run() + assert legacy_subject.was_started() is True + + decoy.verify( + await hardware_api.home(), + protocol_engine.play(), + task_queue.start(), + await task_queue.join(), + ) + + +async def test_stop_maintenance_runner( + decoy: Decoy, + task_queue: TaskQueue, + protocol_engine: ProtocolEngine, + maintenance_subject: MaintenanceRunner, +) -> None: + """It should halt a protocol run with stop.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) + + await maintenance_subject.stop() + + decoy.verify(await protocol_engine.stop(), times=1) + + +async def test_stop_never_started_maintenance_runner( + decoy: Decoy, + task_queue: TaskQueue, + protocol_engine: ProtocolEngine, + maintenance_subject: MaintenanceRunner, +) -> None: + """It should clean up rather than halt if the runner was never started.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) + + await maintenance_subject.stop() + + decoy.verify( + await protocol_engine.finish(drop_tips_and_home=False, set_run_status=False), + times=1, + ) + + +async def test_run_maintanence_runner( + decoy: Decoy, + hardware_api: HardwareAPI, + protocol_engine: ProtocolEngine, + task_queue: TaskQueue, + maintenance_subject: MaintenanceRunner, +) -> None: + """It should run a protocol to completion.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( + False, True + ) + + assert maintenance_subject.was_started() is False + await maintenance_subject.run() + assert maintenance_subject.was_started() is True + + decoy.verify( + await hardware_api.home(), + task_queue.start(), + await task_queue.join(), + ) + + +def test_play_maintanence_runner_raises( + maintenance_subject: MaintenanceRunner, +) -> None: + """Should raise a not supported error.""" + with pytest.raises(NotImplementedError): + maintenance_subject.play() + + +def test_pause_maintanence_runner_raises( + maintenance_subject: MaintenanceRunner, +) -> None: + """Should raise a not supported error.""" + with pytest.raises(NotImplementedError): + maintenance_subject.pause() + + +async def test_load_maintanence_runner_raises( + maintenance_subject: MaintenanceRunner, +) -> None: + """Should raise a not supported error.""" + with pytest.raises(NotImplementedError): + await maintenance_subject.load(None) # type: ignore[arg-type] From b7278040c9ae23aa4e40b2c301244d2e18110c77 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 21 Mar 2023 16:58:21 -0400 Subject: [PATCH 09/34] wip robot-server --- .../protocol_engine/protocol_engine.py | 8 +++++++ .../protocol_engine/state/commands.py | 1 + .../protocol_runner/protocol_runner.py | 1 + .../robot_server/protocols/dependencies.py | 18 --------------- .../http_api/runs/test_persistence.py | 2 +- robot-server/tests/runs/test_engine_store.py | 22 +++++++++++++++---- 6 files changed, 29 insertions(+), 23 deletions(-) diff --git a/api/src/opentrons/protocol_engine/protocol_engine.py b/api/src/opentrons/protocol_engine/protocol_engine.py index e1283d5e308..a8de66493e4 100644 --- a/api/src/opentrons/protocol_engine/protocol_engine.py +++ b/api/src/opentrons/protocol_engine/protocol_engine.py @@ -109,6 +109,14 @@ def add_plugin(self, plugin: AbstractPlugin) -> None: """Add a plugin to the engine to customize behavior.""" self._plugin_starter.start(plugin) + def set_run_started_at(self): + """Set run_started_at for maintenance runs""" + requested_at = self._model_utils.get_timestamp() + if not self.state_view.commands.state.run_result: + self.state_view.commands.state.run_started_at = ( + self.state_view.commands.state.run_started_at or requested_at + ) + def play(self) -> None: """Start or resume executing commands in the queue.""" requested_at = self._model_utils.get_timestamp() diff --git a/api/src/opentrons/protocol_engine/state/commands.py b/api/src/opentrons/protocol_engine/state/commands.py index 330012fb3f9..91a6fe8c51f 100644 --- a/api/src/opentrons/protocol_engine/state/commands.py +++ b/api/src/opentrons/protocol_engine/state/commands.py @@ -182,6 +182,7 @@ def __init__( latest_command_hash=None, ) + def handle_action(self, action: Action) -> None: # noqa: C901 """Modify state in reaction to an action.""" errors_by_id: Mapping[str, ErrorOccurrence] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index a353e460d21..6005058191e 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -368,6 +368,7 @@ async def run( self.load(protocol_source) await self._hardware_api.home() + self._protocol_engine.set_run_started_at() self._task_queue.start() await self._task_queue.join() diff --git a/robot-server/robot_server/protocols/dependencies.py b/robot-server/robot_server/protocols/dependencies.py index 37434f6cd0a..264bb16eba2 100644 --- a/robot-server/robot_server/protocols/dependencies.py +++ b/robot-server/robot_server/protocols/dependencies.py @@ -11,13 +11,9 @@ from sqlalchemy.engine import Engine as SQLEngine from opentrons.protocol_reader import ProtocolReader -from opentrons.protocol_runner import create_simulating_runner - -from opentrons_shared_data.robot.dev_types import RobotType from robot_server.app_state import AppState, AppStateAccessor, get_app_state from robot_server.deletion_planner import ProtocolDeletionPlanner -from robot_server.hardware import get_robot_type from robot_server.persistence import get_sql_engine, get_persistence_directory from robot_server.settings import get_settings @@ -25,7 +21,6 @@ from .protocol_store import ( ProtocolStore, ) -from .protocol_analyzer import ProtocolAnalyzer from .analysis_store import AnalysisStore @@ -96,19 +91,6 @@ async def get_analysis_store( return analysis_store -# async def get_protocol_analyzer( -# analysis_store: AnalysisStore = Depends(get_analysis_store), -# analysis_robot_type: RobotType = Depends(get_robot_type), -# ) -> ProtocolAnalyzer: -# """Construct a ProtocolAnalyzer for a single request.""" -# protocol_runner = await create_simulating_runner(robot_type=analysis_robot_type) -# -# return ProtocolAnalyzer( -# protocol_runner=protocol_runner, -# analysis_store=analysis_store, -# ) - - async def get_protocol_auto_deleter( protocol_store: ProtocolStore = Depends(get_protocol_store), ) -> ProtocolAutoDeleter: diff --git a/robot-server/tests/integration/http_api/runs/test_persistence.py b/robot-server/tests/integration/http_api/runs/test_persistence.py index 99f30896357..16287ae0506 100644 --- a/robot-server/tests/integration/http_api/runs/test_persistence.py +++ b/robot-server/tests/integration/http_api/runs/test_persistence.py @@ -249,7 +249,7 @@ async def test_runs_completed_started_at_persist_via_actions_router( # fetch the updated run, which we expect to be persisted get_run_response = await client.get_run(run_id=run_id) run_data = get_run_response.json()["data"] - + print(run_data) assert datetime.fromisoformat(run_data["startedAt"]).timestamp() == pytest.approx( expected_started_at.timestamp(), abs=2 ) diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index b7628cdeded..f3334c88020 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -11,7 +11,11 @@ from opentrons.types import DeckSlotName from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_engine import ProtocolEngine, StateSummary, types as pe_types -from opentrons.protocol_runner import ProtocolRunner, ProtocolRunResult +from opentrons.protocol_runner import ( + ProtocolRunner, + ProtocolRunResult, + MaintenanceRunner, +) from opentrons.protocol_reader import ProtocolReader, ProtocolSource from robot_server.protocols import ProtocolResource @@ -47,7 +51,7 @@ async def test_create_engine(subject: EngineStore) -> None: assert subject.current_run_id == "run-id" assert isinstance(result, StateSummary) - assert isinstance(subject.runner, ProtocolRunner) # type: ignore[misc] + assert isinstance(subject.runner, MaintenanceRunner) assert isinstance(subject.engine, ProtocolEngine) @@ -140,9 +144,19 @@ async def test_clear_engine(subject: EngineStore) -> None: subject.runner -async def test_clear_engine_not_stopped_or_idle(subject: EngineStore) -> None: +async def test_clear_engine_not_stopped_or_idle( + subject: EngineStore, protocol_source: ProtocolSource +) -> None: """It should raise a conflict if the engine is not stopped.""" - await subject.create(run_id="run-id", labware_offsets=[], protocol=None) + protocol_resource = ProtocolResource( + protocol_id="123", + created_at=datetime.now(), + source=protocol_source, + protocol_key=None, + ) + await subject.create( + run_id="run-id", labware_offsets=[], protocol=protocol_resource + ) subject.runner.play() with pytest.raises(EngineConflictError): From 5ecd3cb408fdfe01c8030ef411e414bf29f9510b Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Wed, 22 Mar 2023 11:24:06 -0400 Subject: [PATCH 10/34] changed create_simulating_runner to monkeypatch --- robot-server/robot_server/protocols/router.py | 8 ++++--- .../tests/protocols/test_protocols_router.py | 23 ++++++++++++++++++- robot-server/tests/runs/test_engine_store.py | 1 - 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index ee4d90cb788..4e22714c43b 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -9,7 +9,7 @@ from typing_extensions import Literal from opentrons.protocol_reader import ProtocolReader, ProtocolFilesInvalidError -from opentrons.protocol_runner import create_simulating_runner +import opentrons.protocol_runner as protocol_runner from opentrons_shared_data.robot.dev_types import RobotType from robot_server.errors import ErrorDetails, ErrorBody @@ -175,11 +175,13 @@ async def create_protocol( protocol_auto_deleter.make_room_for_new_protocol() protocol_store.insert(protocol_resource) - protocol_runner = await create_simulating_runner( + print(robot_type) + print(source.config) + simulating_runner = await protocol_runner.create_simulating_runner( robot_type=robot_type, protocol_config=source.config ) protocol_analyzer = ProtocolAnalyzer( - protocol_runner=protocol_runner, analysis_store=analysis_store + protocol_runner=simulating_runner, analysis_store=analysis_store ) task_runner.run( protocol_analyzer.analyze, diff --git a/robot-server/tests/protocols/test_protocols_router.py b/robot-server/tests/protocols/test_protocols_router.py index 713273afa5f..196f4d57324 100644 --- a/robot-server/tests/protocols/test_protocols_router.py +++ b/robot-server/tests/protocols/test_protocols_router.py @@ -16,6 +16,7 @@ PythonProtocolConfig, ProtocolFilesInvalidError, ) +import opentrons.protocol_runner as protocol_runner from robot_server.errors import ApiError from robot_server.service.json_api import SimpleEmptyBody, MultiBodyMeta @@ -54,6 +55,15 @@ ) +@pytest.fixture(autouse=True) +def patch_mock_create_simulating_runner( + decoy: Decoy, monkeypatch: pytest.MonkeyPatch +) -> None: + """Replace protocol_runner.create_simulating_runner() with a mock.""" + mock = decoy.mock(func=protocol_runner.create_simulating_runner) + monkeypatch.setattr(protocol_runner, "create_simulating_runner", mock) + + @pytest.fixture def protocol_store(decoy: Decoy) -> ProtocolStore: """Get a mocked out ProtocolStore interface.""" @@ -256,7 +266,6 @@ async def test_create_protocol( protocol_store: ProtocolStore, analysis_store: AnalysisStore, protocol_reader: ProtocolReader, - protocol_analyzer: ProtocolAnalyzer, task_runner: TaskRunner, protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: @@ -286,6 +295,18 @@ async def test_create_protocol( protocol_key="dummy-key-111", ) + json_runner = decoy.mock(cls=protocol_runner.JsonRunner) + decoy.when( + await protocol_runner.create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=JsonProtocolConfig(schema_version=123), + ) + ).then_return(json_runner) + + protocol_analyzer = ProtocolAnalyzer( + protocol_runner=json_runner, analysis_store=analysis_store + ) + pending_analysis = AnalysisSummary( id="analysis-id", status=AnalysisStatus.PENDING, diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index f3334c88020..be4426fbba8 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -12,7 +12,6 @@ from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_engine import ProtocolEngine, StateSummary, types as pe_types from opentrons.protocol_runner import ( - ProtocolRunner, ProtocolRunResult, MaintenanceRunner, ) From 037f3224749531c46b75281a5357b48762d48359 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Wed, 22 Mar 2023 14:24:44 -0400 Subject: [PATCH 11/34] removed set_run_started_at and fixed test failing on started_at --- .../protocol_engine/protocol_engine.py | 8 ------ .../protocol_runner/protocol_runner.py | 1 - .../protocol_runner/test_protocol_runner.py | 17 +++++------ .../test_pause_run_not_started.tavern.yaml | 28 +++++++++++++++++-- .../http_api/runs/test_persistence.py | 10 +++++-- 5 files changed, 43 insertions(+), 21 deletions(-) diff --git a/api/src/opentrons/protocol_engine/protocol_engine.py b/api/src/opentrons/protocol_engine/protocol_engine.py index a8de66493e4..e1283d5e308 100644 --- a/api/src/opentrons/protocol_engine/protocol_engine.py +++ b/api/src/opentrons/protocol_engine/protocol_engine.py @@ -109,14 +109,6 @@ def add_plugin(self, plugin: AbstractPlugin) -> None: """Add a plugin to the engine to customize behavior.""" self._plugin_starter.start(plugin) - def set_run_started_at(self): - """Set run_started_at for maintenance runs""" - requested_at = self._model_utils.get_timestamp() - if not self.state_view.commands.state.run_result: - self.state_view.commands.state.run_started_at = ( - self.state_view.commands.state.run_started_at or requested_at - ) - def play(self) -> None: """Start or resume executing commands in the queue.""" requested_at = self._model_utils.get_timestamp() diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 6005058191e..a353e460d21 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -368,7 +368,6 @@ async def run( self.load(protocol_source) await self._hardware_api.home() - self._protocol_engine.set_run_started_at() self._task_queue.start() await self._task_queue.join() diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 51a62f356c4..9470bf5dd01 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -25,6 +25,7 @@ JsonRunner, PythonAndLegacyRunner, MaintenanceRunner, + ProtocolRunner ) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader @@ -237,20 +238,20 @@ async def test_create_protocol_runner( ) -# @pytest.mark.parametrize( -# argnames=["subject"], -# argvalues=[ -# (pytest.lazy_fixture("json_subject")), -# ], -# ) +@pytest.mark.parametrize( + argnames=["subject"], + argvalues=[ + [pytest.lazy_fixture("json_subject")], + ], +) async def test_play_starts_run_json_runner( decoy: Decoy, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - json_subject: JsonRunner, + subject: ProtocolRunner, ) -> None: """It should start a protocol run with play.""" - json_subject.play() + subject.play() decoy.verify(protocol_engine.play(), times=1) diff --git a/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml index 0ff14461e6e..9162ff6b212 100644 --- a/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml @@ -5,11 +5,24 @@ marks: - run_server stages: - - name: Create Empty Run + - name: Upload a protocol + request: + url: '{host:s}:{port:d}/protocols' + method: POST + files: + files: 'tests/integration/protocols/load_one_labware.py' + response: + status_code: 201 + save: + json: + protocol_id: data.id + + - name: Create a run request: url: '{host:s}:{port:d}/runs' json: - data: {} + data: + protocolId: '{protocol_id}' method: POST response: strict: @@ -23,6 +36,7 @@ stages: save: json: run_id: data.id + - name: Issue pause to a none active run request: url: '{host:s}:{port:d}/runs/{run_id}/actions' @@ -37,3 +51,13 @@ stages: - id: 'RunActionNotAllowed' title: 'Run Action Not Allowed' detail: 'Cannot pause a run that is not running.' + + - name: Stop the run + request: + url: '{host:s}:{port:d}/runs/{run_id}/actions' + method: POST + json: + data: + actionType: stop + response: + status_code: 201 diff --git a/robot-server/tests/integration/http_api/runs/test_persistence.py b/robot-server/tests/integration/http_api/runs/test_persistence.py index 16287ae0506..4971b37cf06 100644 --- a/robot-server/tests/integration/http_api/runs/test_persistence.py +++ b/robot-server/tests/integration/http_api/runs/test_persistence.py @@ -1,5 +1,6 @@ from typing import Any, AsyncGenerator, Dict, NamedTuple, cast from datetime import datetime +from pathlib import Path import anyio import pytest @@ -226,9 +227,14 @@ async def test_runs_completed_started_at_persist_via_actions_router( ) -> None: """Test that startedAt and completedAt are be persisted via play action.""" client, server = client_and_server + await client.post_protocol([Path("./tests/integration/protocols/simple.py")]) + + protocols = (await client.get_protocols()).json()["data"] # create a run - create_run_response = await client.post_run(req_body={"data": {}}) + create_run_response = await client.post_run( + req_body={"data": {"protocolId": protocols[0]["id"]}} + ) run_id = create_run_response.json()["data"]["id"] # persist the run by hitting the actions router @@ -249,7 +255,7 @@ async def test_runs_completed_started_at_persist_via_actions_router( # fetch the updated run, which we expect to be persisted get_run_response = await client.get_run(run_id=run_id) run_data = get_run_response.json()["data"] - print(run_data) + assert datetime.fromisoformat(run_data["startedAt"]).timestamp() == pytest.approx( expected_started_at.timestamp(), abs=2 ) From d54c1ecbf1d0f6d1a3b23d22304c59c5f6ceabb5 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Wed, 22 Mar 2023 15:26:43 -0400 Subject: [PATCH 12/34] rollback depends for protocol_analyzer.py and moved runner creation into analyze --- .../robot_server/protocols/dependencies.py | 10 +++++++ .../protocols/protocol_analyzer.py | 10 ++++--- robot-server/robot_server/protocols/router.py | 12 ++------ .../tests/protocols/test_protocol_analyzer.py | 29 ++++++++++++------- .../tests/protocols/test_protocols_router.py | 24 ++------------- 5 files changed, 40 insertions(+), 45 deletions(-) diff --git a/robot-server/robot_server/protocols/dependencies.py b/robot-server/robot_server/protocols/dependencies.py index 264bb16eba2..3f547c89489 100644 --- a/robot-server/robot_server/protocols/dependencies.py +++ b/robot-server/robot_server/protocols/dependencies.py @@ -21,6 +21,7 @@ from .protocol_store import ( ProtocolStore, ) +from .protocol_analyzer import ProtocolAnalyzer from .analysis_store import AnalysisStore @@ -91,6 +92,15 @@ async def get_analysis_store( return analysis_store +async def get_protocol_analyzer( + analysis_store: AnalysisStore = Depends(get_analysis_store), +) -> ProtocolAnalyzer: + """Construct a ProtocolAnalyzer for a single request.""" + return ProtocolAnalyzer( + analysis_store=analysis_store, + ) + + async def get_protocol_auto_deleter( protocol_store: ProtocolStore = Depends(get_protocol_store), ) -> ProtocolAutoDeleter: diff --git a/robot-server/robot_server/protocols/protocol_analyzer.py b/robot-server/robot_server/protocols/protocol_analyzer.py index 6af3069f8bd..6659ffbb7dd 100644 --- a/robot-server/robot_server/protocols/protocol_analyzer.py +++ b/robot-server/robot_server/protocols/protocol_analyzer.py @@ -1,7 +1,7 @@ """Protocol analysis module.""" import logging -from opentrons.protocol_runner import ProtocolRunner +import opentrons.protocol_runner as protocol_runner from .protocol_store import ProtocolResource from .analysis_store import AnalysisStore @@ -15,11 +15,9 @@ class ProtocolAnalyzer: def __init__( self, - protocol_runner: ProtocolRunner, analysis_store: AnalysisStore, ) -> None: """Initialize the analyzer and its dependencies.""" - self._protocol_runner = protocol_runner self._analysis_store = analysis_store async def analyze( @@ -28,7 +26,11 @@ async def analyze( analysis_id: str, ) -> None: """Analyze a given protocol, storing the analysis when complete.""" - result = await self._protocol_runner.run(protocol_resource.source) + runner = await protocol_runner.create_simulating_runner( + robot_type=protocol_resource.source.robot_type, + protocol_config=protocol_resource.source.config, + ) + result = await runner.run(protocol_resource.source) log.info(f'Completed analysis "{analysis_id}".') diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index 4e22714c43b..e5933c6515a 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -9,7 +9,6 @@ from typing_extensions import Literal from opentrons.protocol_reader import ProtocolReader, ProtocolFilesInvalidError -import opentrons.protocol_runner as protocol_runner from opentrons_shared_data.robot.dev_types import RobotType from robot_server.errors import ErrorDetails, ErrorBody @@ -41,6 +40,7 @@ get_protocol_store, get_analysis_store, get_protocol_directory, + get_protocol_analyzer, ) @@ -121,6 +121,7 @@ async def create_protocol( protocol_store: ProtocolStore = Depends(get_protocol_store), analysis_store: AnalysisStore = Depends(get_analysis_store), protocol_reader: ProtocolReader = Depends(get_protocol_reader), + protocol_analyzer: ProtocolAnalyzer = Depends(get_protocol_analyzer), task_runner: TaskRunner = Depends(get_task_runner), protocol_auto_deleter: ProtocolAutoDeleter = Depends(get_protocol_auto_deleter), robot_type: RobotType = Depends(get_robot_type), @@ -175,14 +176,7 @@ async def create_protocol( protocol_auto_deleter.make_room_for_new_protocol() protocol_store.insert(protocol_resource) - print(robot_type) - print(source.config) - simulating_runner = await protocol_runner.create_simulating_runner( - robot_type=robot_type, protocol_config=source.config - ) - protocol_analyzer = ProtocolAnalyzer( - protocol_runner=simulating_runner, analysis_store=analysis_store - ) + task_runner.run( protocol_analyzer.analyze, protocol_resource=protocol_resource, diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index 9fefb0d0080..ab2c77af6a9 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -14,7 +14,7 @@ errors as pe_errors, types as pe_types, ) -from opentrons.protocol_runner import ProtocolRunner, ProtocolRunResult +import opentrons.protocol_runner as protocol_runner from opentrons.protocol_reader import ProtocolSource, JsonProtocolConfig from robot_server.protocols.analysis_store import AnalysisStore @@ -22,10 +22,13 @@ from robot_server.protocols.protocol_analyzer import ProtocolAnalyzer -@pytest.fixture -def protocol_runner(decoy: Decoy) -> ProtocolRunner: - """Get a mocked out ProtocolRunner.""" - return decoy.mock(cls=ProtocolRunner) +@pytest.fixture(autouse=True) +def patch_mock_create_simulating_runner( + decoy: Decoy, monkeypatch: pytest.MonkeyPatch +) -> None: + """Replace protocol_runner.check() with a mock.""" + mock = decoy.mock(func=protocol_runner.create_simulating_runner) + monkeypatch.setattr(protocol_runner, "create_simulating_runner", mock) @pytest.fixture @@ -36,19 +39,16 @@ def analysis_store(decoy: Decoy) -> AnalysisStore: @pytest.fixture def subject( - protocol_runner: ProtocolRunner, analysis_store: AnalysisStore, ) -> ProtocolAnalyzer: """Get a ProtocolAnalyzer test subject.""" return ProtocolAnalyzer( - protocol_runner=protocol_runner, analysis_store=analysis_store, ) async def test_analyze( decoy: Decoy, - protocol_runner: ProtocolRunner, analysis_store: AnalysisStore, subject: ProtocolAnalyzer, ) -> None: @@ -96,8 +96,17 @@ async def test_analyze( mount=MountType.LEFT, ) - decoy.when(await protocol_runner.run(protocol_resource.source)).then_return( - ProtocolRunResult( + json_runner = decoy.mock(cls=protocol_runner.JsonRunner) + + decoy.when( + await protocol_runner.create_simulating_runner( + robot_type="OT-3 Standard", + protocol_config=JsonProtocolConfig(schema_version=123), + ) + ).then_return(json_runner) + + decoy.when(await json_runner.run(protocol_resource.source)).then_return( + protocol_runner.ProtocolRunResult( commands=[analysis_command], state_summary=StateSummary( status=EngineStatus.SUCCEEDED, diff --git a/robot-server/tests/protocols/test_protocols_router.py b/robot-server/tests/protocols/test_protocols_router.py index 196f4d57324..6f5d9df89b4 100644 --- a/robot-server/tests/protocols/test_protocols_router.py +++ b/robot-server/tests/protocols/test_protocols_router.py @@ -16,7 +16,6 @@ PythonProtocolConfig, ProtocolFilesInvalidError, ) -import opentrons.protocol_runner as protocol_runner from robot_server.errors import ApiError from robot_server.service.json_api import SimpleEmptyBody, MultiBodyMeta @@ -55,15 +54,6 @@ ) -@pytest.fixture(autouse=True) -def patch_mock_create_simulating_runner( - decoy: Decoy, monkeypatch: pytest.MonkeyPatch -) -> None: - """Replace protocol_runner.create_simulating_runner() with a mock.""" - mock = decoy.mock(func=protocol_runner.create_simulating_runner) - monkeypatch.setattr(protocol_runner, "create_simulating_runner", mock) - - @pytest.fixture def protocol_store(decoy: Decoy) -> ProtocolStore: """Get a mocked out ProtocolStore interface.""" @@ -266,6 +256,7 @@ async def test_create_protocol( protocol_store: ProtocolStore, analysis_store: AnalysisStore, protocol_reader: ProtocolReader, + protocol_analyzer: ProtocolAnalyzer, task_runner: TaskRunner, protocol_auto_deleter: ProtocolAutoDeleter, ) -> None: @@ -295,18 +286,6 @@ async def test_create_protocol( protocol_key="dummy-key-111", ) - json_runner = decoy.mock(cls=protocol_runner.JsonRunner) - decoy.when( - await protocol_runner.create_simulating_runner( - robot_type="OT-2 Standard", - protocol_config=JsonProtocolConfig(schema_version=123), - ) - ).then_return(json_runner) - - protocol_analyzer = ProtocolAnalyzer( - protocol_runner=json_runner, analysis_store=analysis_store - ) - pending_analysis = AnalysisSummary( id="analysis-id", status=AnalysisStatus.PENDING, @@ -330,6 +309,7 @@ async def test_create_protocol( protocol_store=protocol_store, analysis_store=analysis_store, protocol_reader=protocol_reader, + protocol_analyzer=protocol_analyzer, task_runner=task_runner, protocol_auto_deleter=protocol_auto_deleter, robot_type="OT-2 Standard", From af5aaac0b34e8d3f002a482534922f36adc4b915 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Thu, 23 Mar 2023 10:38:29 -0400 Subject: [PATCH 13/34] reorganize prtocol_runner tests and text fixes --- .../protocol_engine/state/commands.py | 1 - .../protocol_runner/protocol_runner.py | 6 +- .../protocol_runner/test_protocol_runner.py | 104 ++++++------------ 3 files changed, 39 insertions(+), 72 deletions(-) diff --git a/api/src/opentrons/protocol_engine/state/commands.py b/api/src/opentrons/protocol_engine/state/commands.py index 91a6fe8c51f..330012fb3f9 100644 --- a/api/src/opentrons/protocol_engine/state/commands.py +++ b/api/src/opentrons/protocol_engine/state/commands.py @@ -182,7 +182,6 @@ def __init__( latest_command_hash=None, ) - def handle_action(self, action: Action) -> None: # noqa: C901 """Modify state in reaction to an action.""" errors_by_id: Mapping[str, ErrorOccurrence] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index a353e460d21..9d09272563c 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -91,7 +91,7 @@ def __init__( legacy_context_creator: Optional[LegacyContextCreator] = None, legacy_executor: Optional[LegacyExecutor] = None, ) -> None: - """Initialize the JsonRunner with its dependencies.""" + """Initialize the PythonAndLegacyRunner with its dependencies.""" self._protocol_engine = protocol_engine self._hardware_api = hardware_api self._legacy_file_reader = legacy_file_reader or LegacyFileReader() @@ -190,7 +190,7 @@ async def run( class JsonRunner(ProtocolRunner): - """Protocol runner implementation for python protocols and legacy protocols.""" + """Protocol runner implementation for json protocols.""" def __init__( self, @@ -313,7 +313,7 @@ def __init__( hardware_api: HardwareControlAPI, task_queue: Optional[TaskQueue] = None, ) -> None: - """Initialize the JsonRunner with its dependencies.""" + """Initialize the MaintenanceRunner with its dependencies.""" self._protocol_engine = protocol_engine # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 9470bf5dd01..486e955ec4b 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -1,5 +1,6 @@ """Tests for the ProtocolRunner class.""" import pytest +from pytest_lazyfixture import lazy_fixture # type: ignore[import] from decoy import Decoy, matchers from pathlib import Path from typing import List, cast @@ -25,7 +26,7 @@ JsonRunner, PythonAndLegacyRunner, MaintenanceRunner, - ProtocolRunner + ProtocolRunner, ) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader @@ -239,12 +240,13 @@ async def test_create_protocol_runner( @pytest.mark.parametrize( - argnames=["subject"], - argvalues=[ - [pytest.lazy_fixture("json_subject")], + "subject", + [ + (lazy_fixture("json_subject")), + (lazy_fixture("legacy_subject")), ], ) -async def test_play_starts_run_json_runner( +async def test_play_starts_run( decoy: Decoy, protocol_engine: ProtocolEngine, task_queue: TaskQueue, @@ -256,42 +258,63 @@ async def test_play_starts_run_json_runner( decoy.verify(protocol_engine.play(), times=1) -async def test_pause_json_runner( +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_subject")), + (lazy_fixture("legacy_subject")), + ], +) +async def test_pause( decoy: Decoy, protocol_engine: ProtocolEngine, - json_subject: JsonRunner, + subject: ProtocolRunner, ) -> None: """It should pause a protocol run with pause.""" - json_subject.pause() + subject.pause() decoy.verify(protocol_engine.pause(), times=1) +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_subject")), + (lazy_fixture("legacy_subject")), + ], +) async def test_stop_json_runner( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - json_subject: JsonRunner, + subject: ProtocolRunner, ) -> None: """It should halt a protocol run with stop.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) - json_subject.play() - await json_subject.stop() + subject.play() + await subject.stop() decoy.verify(await protocol_engine.stop(), times=1) +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_subject")), + (lazy_fixture("legacy_subject")), + ], +) async def test_stop_never_started_json_runner( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - json_subject: JsonRunner, + subject: ProtocolRunner, ) -> None: """It should clean up rather than halt if the runner was never started.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) - await json_subject.stop() + await subject.stop() decoy.verify( await protocol_engine.finish(drop_tips_and_home=False, set_run_status=False), @@ -398,61 +421,6 @@ async def test_load_json_runner( ) -async def test_play_starts_run_legacy_runner( - decoy: Decoy, - protocol_engine: ProtocolEngine, - task_queue: TaskQueue, - legacy_subject: PythonAndLegacyRunner, -) -> None: - """It should start a protocol run with play.""" - legacy_subject.play() - - decoy.verify(protocol_engine.play(), times=1) - - -async def test_pause_legacy_runner( - decoy: Decoy, - protocol_engine: ProtocolEngine, - legacy_subject: PythonAndLegacyRunner, -) -> None: - """It should pause a protocol run with pause.""" - legacy_subject.pause() - - decoy.verify(protocol_engine.pause(), times=1) - - -async def test_stop_legacy_runner( - decoy: Decoy, - task_queue: TaskQueue, - protocol_engine: ProtocolEngine, - legacy_subject: PythonAndLegacyRunner, -) -> None: - """It should halt a protocol run with stop.""" - decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) - - legacy_subject.play() - await legacy_subject.stop() - - decoy.verify(await protocol_engine.stop(), times=1) - - -async def test_stop_never_started_legacy_runner( - decoy: Decoy, - task_queue: TaskQueue, - protocol_engine: ProtocolEngine, - legacy_subject: PythonAndLegacyRunner, -) -> None: - """It should clean up rather than halt if the runner was never started.""" - decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) - - await legacy_subject.stop() - - decoy.verify( - await protocol_engine.finish(drop_tips_and_home=False, set_run_status=False), - times=1, - ) - - async def test_load_legacy_python( decoy: Decoy, legacy_file_reader: LegacyFileReader, From 3a870b08d2f2f563dd5e232e9478eac092cc3da2 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Thu, 23 Mar 2023 11:18:06 -0400 Subject: [PATCH 14/34] removed task_queue from maintenance runner --- api/src/opentrons/protocol_runner/protocol_runner.py | 5 ----- api/tests/opentrons/protocol_runner/test_protocol_runner.py | 1 - 2 files changed, 6 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 9d09272563c..7f7244925a9 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -311,13 +311,11 @@ def __init__( self, protocol_engine: ProtocolEngine, hardware_api: HardwareControlAPI, - task_queue: Optional[TaskQueue] = None, ) -> None: """Initialize the MaintenanceRunner with its dependencies.""" self._protocol_engine = protocol_engine # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface - self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) self._hardware_api = hardware_api def was_started(self) -> bool: @@ -368,8 +366,6 @@ async def run( self.load(protocol_source) await self._hardware_api.home() - self._task_queue.start() - await self._task_queue.join() run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() @@ -412,7 +408,6 @@ def create_protocol_runner( return MaintenanceRunner( protocol_engine=protocol_engine, - task_queue=task_queue, hardware_api=hardware_api, ) diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 486e955ec4b..8f56cabe891 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -151,7 +151,6 @@ def maintenance_subject( return MaintenanceRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, - task_queue=task_queue, ) From 50978e33e29b611fc5a1f87f484ca30f7241dd2b Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Thu, 23 Mar 2023 13:30:42 -0400 Subject: [PATCH 15/34] linting --- robot-server/robot_server/protocols/dependencies.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/robot-server/robot_server/protocols/dependencies.py b/robot-server/robot_server/protocols/dependencies.py index b1070ef6229..292109e1a23 100644 --- a/robot-server/robot_server/protocols/dependencies.py +++ b/robot-server/robot_server/protocols/dependencies.py @@ -11,9 +11,6 @@ from sqlalchemy.engine import Engine as SQLEngine from opentrons.protocol_reader import ProtocolReader, FileReaderWriter, FileHasher -from opentrons.protocol_runner import create_simulating_runner - -from opentrons_shared_data.robot.dev_types import RobotType from robot_server.app_state import AppState, AppStateAccessor, get_app_state from robot_server.deletion_planner import ProtocolDeletionPlanner From 20d3c3f4bcff57d893864824be75feceeec6a2e1 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Thu, 23 Mar 2023 14:09:13 -0400 Subject: [PATCH 16/34] fixed failing test in actions router --- .../tests/integration/http_api/runs/test_persistence.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/robot-server/tests/integration/http_api/runs/test_persistence.py b/robot-server/tests/integration/http_api/runs/test_persistence.py index 4971b37cf06..3f008f43674 100644 --- a/robot-server/tests/integration/http_api/runs/test_persistence.py +++ b/robot-server/tests/integration/http_api/runs/test_persistence.py @@ -102,9 +102,14 @@ async def test_runs_persist_via_actions_router( """Test that runs commands and state are persisted when calling play action through dev server restart.""" client, server = client_and_server + await client.post_protocol([Path("./tests/integration/protocols/simple.py")]) + protocols = (await client.get_protocols()).json()["data"] + protocol_id = protocols[0]["id"] # create a run - create_run_response = await client.post_run(req_body={"data": {}}) + create_run_response = await client.post_run( + req_body={"data": {"protocolId": protocol_id}} + ) run_id = create_run_response.json()["data"]["id"] # persist the run by hitting the actions router From 08524fd8a26d24e36d3244e5eac322ee1ed2f7f4 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Thu, 23 Mar 2023 14:11:25 -0400 Subject: [PATCH 17/34] fixed maintenance test with task runner --- api/tests/opentrons/protocol_runner/test_protocol_runner.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 159dda0c42a..7d46ce96c8c 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -662,7 +662,6 @@ async def test_run_maintanence_runner( decoy: Decoy, hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, - task_queue: TaskQueue, maintenance_subject: MaintenanceRunner, ) -> None: """It should run a protocol to completion.""" @@ -676,8 +675,6 @@ async def test_run_maintanence_runner( decoy.verify( await hardware_api.home(), - task_queue.start(), - await task_queue.join(), ) From da0fdbabe855a880f261037655c2b92d68cf788a Mon Sep 17 00:00:00 2001 From: TamarZanzouri Date: Mon, 27 Mar 2023 11:16:32 -0400 Subject: [PATCH 18/34] Update api/src/opentrons/protocol_runner/protocol_runner.py Co-authored-by: Max Marrone --- api/src/opentrons/protocol_runner/protocol_runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 7f7244925a9..41c39bd193b 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -80,7 +80,7 @@ async def run( class PythonAndLegacyRunner(ProtocolRunner): - """Protocol runner implementation for python protocols and legacy protocols.""" + """Protocol runner implementation for Python protocols, and JSON protocols ≤v5.""" def __init__( self, From b38aaf742afb83893334142cd9a73b07f49c5001 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Mon, 27 Mar 2023 15:58:25 -0400 Subject: [PATCH 19/34] pr feedback --- api/src/opentrons/protocol_runner/__init__.py | 10 +- .../create_simulating_runner.py | 12 +- .../protocol_runner/protocol_runner.py | 79 +++------ .../smoke_tests/test_protocol_runner.py | 12 +- .../test_legacy_command_mapper.py | 2 +- .../test_legacy_context_plugin.py | 2 +- .../protocol_runner/test_protocol_runner.py | 165 ++++-------------- 7 files changed, 77 insertions(+), 205 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index 74a539124ac..de028004429 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -1,24 +1,24 @@ """Protocol run control and management. -The main export of this module is the ProtocolRunner class. See +The main export of this module is the AbstractRunner class. See protocol_runner.py for more details. """ from .protocol_runner import ( - ProtocolRunner, + AbstractRunner, ProtocolRunResult, create_protocol_runner, JsonRunner, PythonAndLegacyRunner, - MaintenanceRunner, + LiveRunner, ) from .create_simulating_runner import create_simulating_runner __all__ = [ - "ProtocolRunner", + "AbstractRunner", "ProtocolRunResult", "create_simulating_runner", "create_protocol_runner", "JsonRunner", "PythonAndLegacyRunner", - "MaintenanceRunner", + "LiveRunner", ] diff --git a/api/src/opentrons/protocol_runner/create_simulating_runner.py b/api/src/opentrons/protocol_runner/create_simulating_runner.py index fc35422b80e..0760845933a 100644 --- a/api/src/opentrons/protocol_runner/create_simulating_runner.py +++ b/api/src/opentrons/protocol_runner/create_simulating_runner.py @@ -1,4 +1,4 @@ -"""Simulating ProtocolRunner factory.""" +"""Simulating AbstractRunner factory.""" from opentrons.config import feature_flags from opentrons.hardware_control import API as OT2API, HardwareControlAPI @@ -11,13 +11,13 @@ from opentrons_shared_data.robot.dev_types import RobotType from .legacy_wrappers import LegacySimulatingContextCreator -from .protocol_runner import create_protocol_runner, ProtocolRunner +from .protocol_runner import create_protocol_runner, AbstractRunner async def create_simulating_runner( robot_type: RobotType, protocol_config: ProtocolConfig -) -> ProtocolRunner: - """Create a ProtocolRunner wired to a simulating HardwareControlAPI. +) -> AbstractRunner: + """Create a AbstractRunner wired to a simulating HardwareControlAPI. Example: ```python @@ -27,7 +27,7 @@ async def create_simulating_runner( from opentrons.protocol_runner import ( ProtocolType, ProtocolFile, - ProtocolRunner, + AbstractRunner, create_simulating_runner, ) @@ -35,7 +35,7 @@ async def create_simulating_runner( protocol_type=ProtocolType.PYTHON, files=[Path("/path/to/protocol.py")], ) - runner: ProtocolRunner = await create_simulating_runner() + runner: AbstractRunner = await create_simulating_runner() commands: List[Command] = await runner.run(protocol) ``` """ diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 41c39bd193b..224ad4def50 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -2,7 +2,7 @@ import asyncio from typing import List, NamedTuple, Optional, Union -from typing_extensions import Protocol as TypingProtocol +from abc import ABC, abstractmethod import anyio @@ -38,24 +38,26 @@ class ProtocolRunResult(NamedTuple): state_summary: StateSummary -class ProtocolRunner(TypingProtocol): +class AbstractRunner(ABC): """An interface to manage and control a protocol run. - The ProtocolRunner is primarily responsible for feeding a ProtocolEngine + The AbstractRunner is primarily responsible for feeding a ProtocolEngine with commands and control signals. These commands and signals are generated by protocol files, hardware signals, or externally via the HTTP robot-server. - A ProtocolRunner controls a single run. Once the run is finished, - you will need a new ProtocolRunner to do another run. + A AbstractRunner controls a single run. Once the run is finished, + you will need a new AbstractRunner to do another run. """ + @abstractmethod def was_started(self) -> bool: """Whether the runner has been started. This value is latched; once it is True, it will never become False. """ + @abstractmethod async def load(self, protocol_source: ProtocolSource) -> None: """Load a ProtocolSource into managed ProtocolEngine. @@ -63,15 +65,19 @@ async def load(self, protocol_source: ProtocolSource) -> None: to control the run of a file-based protocol. """ + @abstractmethod def play(self) -> None: """Start or resume the run.""" + @abstractmethod def pause(self) -> None: """Pause the run.""" + @abstractmethod async def stop(self) -> None: """Stop (cancel) the run.""" + @abstractmethod async def run( self, protocol_source: Optional[ProtocolSource] = None, @@ -79,7 +85,7 @@ async def run( """Run a given protocol to completion.""" -class PythonAndLegacyRunner(ProtocolRunner): +class PythonAndLegacyRunner(AbstractRunner): """Protocol runner implementation for Python protocols, and JSON protocols ≤v5.""" def __init__( @@ -105,18 +111,9 @@ def __init__( self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) def was_started(self) -> bool: - """Whether the runner has been started. - - This value is latched; once it is True, it will never become False. - """ return self._protocol_engine.state_view.commands.has_been_played() async def load(self, protocol_source: ProtocolSource) -> None: - """Load a ProtocolSource into managed ProtocolEngine. - - Calling this method is only necessary if the runner will be used - to control the run of a file-based protocol. - """ labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -152,15 +149,12 @@ async def load(self, protocol_source: ProtocolSource) -> None: ) def play(self) -> None: - """Start or resume the run.""" self._protocol_engine.play() def pause(self) -> None: - """Pause the run.""" self._protocol_engine.pause() async def stop(self) -> None: - """Stop (cancel) the run.""" if self.was_started(): await self._protocol_engine.stop() else: @@ -173,11 +167,10 @@ async def run( self, protocol_source: Optional[ProtocolSource] = None, ) -> ProtocolRunResult: - """Run a given protocol to completion.""" # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: - await self.load(protocol_source) + await self.load(protocol_source=protocol_source) await self._hardware_api.home() self.play() @@ -189,7 +182,7 @@ async def run( return ProtocolRunResult(commands=commands, state_summary=run_data) -class JsonRunner(ProtocolRunner): +class JsonRunner(AbstractRunner): """Protocol runner implementation for json protocols.""" def __init__( @@ -210,18 +203,9 @@ def __init__( self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) def was_started(self) -> bool: - """Whether the runner has been started. - - This value is latched; once it is True, it will never become False. - """ return self._protocol_engine.state_view.commands.has_been_played() async def load(self, protocol_source: ProtocolSource) -> None: - """Load a ProtocolSource into managed ProtocolEngine. - - Calling this method is only necessary if the runner will be used - to control the run of a file-based protocol. - """ labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -267,15 +251,12 @@ async def load(self, protocol_source: ProtocolSource) -> None: self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) def play(self) -> None: - """Start or resume the run.""" self._protocol_engine.play() def pause(self) -> None: - """Pause the run.""" self._protocol_engine.pause() async def stop(self) -> None: - """Stop (cancel) the run.""" if self.was_started(): await self._protocol_engine.stop() else: @@ -288,7 +269,6 @@ async def run( self, protocol_source: Optional[ProtocolSource] = None, ) -> ProtocolRunResult: - """Run a given protocol to completion.""" # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: @@ -304,7 +284,7 @@ async def run( return ProtocolRunResult(commands=commands, state_summary=run_data) -class MaintenanceRunner(ProtocolRunner): +class LiveRunner(AbstractRunner): """Protocol runner implementation for setup commands.""" def __init__( @@ -312,43 +292,27 @@ def __init__( protocol_engine: ProtocolEngine, hardware_api: HardwareControlAPI, ) -> None: - """Initialize the MaintenanceRunner with its dependencies.""" + """Initialize the LiveRunner with its dependencies.""" self._protocol_engine = protocol_engine # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface self._hardware_api = hardware_api def was_started(self) -> bool: - """Whether the runner has been started. - - This value is latched; once it is True, it will never become False. - """ return self._protocol_engine.state_view.commands.has_been_played() async def load(self, protocol_source: ProtocolSource) -> None: - """Load a ProtocolSource into managed ProtocolEngine. - - Calling this method is only necessary if the runner will be used - to control the run of a file-based protocol. - """ raise NotImplementedError( - "MaintenanceRunner.load() not supported for setup commands. no protocol associated." + "LiveRunner.load() not supported for setup commands. no protocol associated." ) def play(self) -> None: - """Start or resume the run.""" - raise NotImplementedError( - "MaintenanceRunner.play() not supported for setup commands." - ) + self._protocol_engine.play() def pause(self) -> None: - """Pause the run.""" - raise NotImplementedError( - "MaintenanceRunner.pause() not supported for setup commands." - ) + self._protocol_engine.pause() async def stop(self) -> None: - """Stop (cancel) the run.""" if self.was_started(): await self._protocol_engine.stop() else: @@ -361,7 +325,6 @@ async def run( self, protocol_source: Optional[ProtocolSource] = None, ) -> ProtocolRunResult: - """Run a given protocol to completion.""" if protocol_source: self.load(protocol_source) @@ -382,7 +345,7 @@ def create_protocol_runner( legacy_file_reader: Optional[LegacyFileReader] = None, legacy_context_creator: Optional[LegacyContextCreator] = None, legacy_executor: Optional[LegacyExecutor] = None, -) -> ProtocolRunner: +) -> AbstractRunner: """Create a protocol runner.""" if protocol_config: if ( @@ -406,7 +369,7 @@ def create_protocol_runner( legacy_executor=legacy_executor, ) - return MaintenanceRunner( + return LiveRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, ) diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index 13703e8b0ac..62ebf5d79c4 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -1,6 +1,6 @@ -"""Smoke tests for the ProtocolRunner and ProtocolEngine classes. +"""Smoke tests for the AbstractRunner and ProtocolEngine classes. -These tests construct a ProtocolRunner with a real ProtocolEngine +These tests construct a AbstractRunner with a real ProtocolEngine hooked to a simulating HardwareAPI. Minimal, but valid and complete, protocol files are then loaded from @@ -43,7 +43,7 @@ async def test_runner_with_python( python_protocol_file: Path, tempdeck_v1_def: ModuleDefinition, ) -> None: - """It should run a Python protocol on the ProtocolRunner.""" + """It should run a Python protocol on the AbstractRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[python_protocol_file], @@ -112,7 +112,7 @@ async def test_runner_with_python( async def test_runner_with_json(json_protocol_file: Path) -> None: - """It should run a JSON protocol on the ProtocolRunner.""" + """It should run a JSON protocol on the AbstractRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[json_protocol_file], @@ -173,7 +173,7 @@ async def test_runner_with_json(json_protocol_file: Path) -> None: async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> None: - """It should run a Python protocol on the ProtocolRunner.""" + """It should run a Python protocol on the AbstractRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[legacy_python_protocol_file], @@ -233,7 +233,7 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: - """It should run a Python protocol on the ProtocolRunner.""" + """It should run a Python protocol on the AbstractRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[legacy_json_protocol_file], diff --git a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py index 531ff4a8401..27f0ce212af 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py @@ -1,4 +1,4 @@ -"""Tests for the ProtocolRunner's LegacyContextPlugin.""" +"""Tests for the AbstractRunner's LegacyContextPlugin.""" import inspect from datetime import datetime from typing import cast diff --git a/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py b/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py index fb2c475fb93..96003564813 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py @@ -1,4 +1,4 @@ -"""Tests for the ProtocolRunner's LegacyContextPlugin.""" +"""Tests for the AbstractRunner's LegacyContextPlugin.""" import pytest from anyio import to_thread from decoy import Decoy, matchers diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 7d46ce96c8c..1f7c75c370c 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -1,9 +1,9 @@ -"""Tests for the ProtocolRunner class.""" +"""Tests for the AbstractRunner class.""" import pytest from pytest_lazyfixture import lazy_fixture # type: ignore[import] from decoy import Decoy, matchers from pathlib import Path -from typing import List, cast +from typing import List, cast, Optional, Union from opentrons_shared_data.protocol.dev_types import ( JsonProtocol as LegacyJsonProtocolDict, @@ -25,8 +25,8 @@ create_protocol_runner, JsonRunner, PythonAndLegacyRunner, - MaintenanceRunner, - ProtocolRunner, + LiveRunner, + AbstractRunner, ) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader @@ -142,18 +142,28 @@ def legacy_subject( @pytest.fixture -def maintenance_subject( +def live_subject( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, task_queue: TaskQueue, -) -> MaintenanceRunner: - """Get a MaintenanceRunner test subject with mocked dependencies.""" - return MaintenanceRunner( +) -> LiveRunner: + """Get a LiveRunner test subject with mocked dependencies.""" + return LiveRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, ) +@pytest.mark.parametrize( + "config, runner_type", + [ + (JsonProtocolConfig(schema_version=6), JsonRunner), + (PythonProtocolConfig(api_version=APIVersion(2, 14)), PythonAndLegacyRunner), + (JsonProtocolConfig(schema_version=5), PythonAndLegacyRunner), + (PythonProtocolConfig(api_version=APIVersion(2, 13)), PythonAndLegacyRunner), + (None, LiveRunner), + ], +) async def test_create_protocol_runner( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, @@ -163,78 +173,20 @@ async def test_create_protocol_runner( legacy_file_reader: LegacyFileReader, legacy_context_creator: LegacyContextCreator, legacy_executor: LegacyExecutor, + config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], + runner_type: AbstractRunner, ) -> None: """It should return protocol runner type depending on the config.""" assert isinstance( create_protocol_runner( - protocol_config=JsonProtocolConfig(schema_version=6), - protocol_engine=protocol_engine, - hardware_api=hardware_api, - task_queue=task_queue, - json_file_reader=json_file_reader, - json_translator=json_translator, - ), - JsonRunner, - ) - - assert isinstance( - create_protocol_runner( - protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 14)), - protocol_engine=protocol_engine, - hardware_api=hardware_api, - task_queue=task_queue, - json_file_reader=json_file_reader, - json_translator=json_translator, - legacy_file_reader=legacy_file_reader, - legacy_context_creator=legacy_context_creator, - legacy_executor=legacy_executor, - ), - PythonAndLegacyRunner, - ) - - assert isinstance( - create_protocol_runner( - protocol_config=JsonProtocolConfig(schema_version=5), + protocol_config=config, protocol_engine=protocol_engine, hardware_api=hardware_api, task_queue=task_queue, json_file_reader=json_file_reader, json_translator=json_translator, - legacy_file_reader=legacy_file_reader, - legacy_context_creator=legacy_context_creator, - legacy_executor=legacy_executor, ), - PythonAndLegacyRunner, - ) - - assert isinstance( - create_protocol_runner( - protocol_config=JsonProtocolConfig(schema_version=5), - protocol_engine=protocol_engine, - hardware_api=hardware_api, - task_queue=task_queue, - json_file_reader=json_file_reader, - json_translator=json_translator, - legacy_file_reader=legacy_file_reader, - legacy_context_creator=legacy_context_creator, - legacy_executor=legacy_executor, - ), - PythonAndLegacyRunner, - ) - - assert isinstance( - create_protocol_runner( - protocol_config=None, - protocol_engine=protocol_engine, - hardware_api=hardware_api, - task_queue=task_queue, - json_file_reader=json_file_reader, - json_translator=json_translator, - legacy_file_reader=legacy_file_reader, - legacy_context_creator=legacy_context_creator, - legacy_executor=legacy_executor, - ), - MaintenanceRunner, + type(runner_type), ) @@ -243,13 +195,14 @@ async def test_create_protocol_runner( [ (lazy_fixture("json_subject")), (lazy_fixture("legacy_subject")), + (lazy_fixture("live_subject")), ], ) async def test_play_starts_run( decoy: Decoy, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + subject: AbstractRunner, ) -> None: """It should start a protocol run with play.""" subject.play() @@ -262,12 +215,13 @@ async def test_play_starts_run( [ (lazy_fixture("json_subject")), (lazy_fixture("legacy_subject")), + (lazy_fixture("live_subject")), ], ) async def test_pause( decoy: Decoy, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + subject: AbstractRunner, ) -> None: """It should pause a protocol run with pause.""" subject.pause() @@ -280,13 +234,14 @@ async def test_pause( [ (lazy_fixture("json_subject")), (lazy_fixture("legacy_subject")), + (lazy_fixture("live_subject")), ], ) -async def test_stop_json_runner( +async def test_stop( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + subject: AbstractRunner, ) -> None: """It should halt a protocol run with stop.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) @@ -302,13 +257,14 @@ async def test_stop_json_runner( [ (lazy_fixture("json_subject")), (lazy_fixture("legacy_subject")), + (lazy_fixture("live_subject")), ], ) async def test_stop_never_started_json_runner( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + subject: AbstractRunner, ) -> None: """It should clean up rather than halt if the runner was never started.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) @@ -627,76 +583,29 @@ async def test_run_python_runner( ) -async def test_stop_maintenance_runner( - decoy: Decoy, - task_queue: TaskQueue, - protocol_engine: ProtocolEngine, - maintenance_subject: MaintenanceRunner, -) -> None: - """It should halt a protocol run with stop.""" - decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) - - await maintenance_subject.stop() - - decoy.verify(await protocol_engine.stop(), times=1) - - -async def test_stop_never_started_maintenance_runner( - decoy: Decoy, - task_queue: TaskQueue, - protocol_engine: ProtocolEngine, - maintenance_subject: MaintenanceRunner, -) -> None: - """It should clean up rather than halt if the runner was never started.""" - decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) - - await maintenance_subject.stop() - - decoy.verify( - await protocol_engine.finish(drop_tips_and_home=False, set_run_status=False), - times=1, - ) - - async def test_run_maintanence_runner( decoy: Decoy, hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, - maintenance_subject: MaintenanceRunner, + live_subject: LiveRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( False, True ) - assert maintenance_subject.was_started() is False - await maintenance_subject.run() - assert maintenance_subject.was_started() is True + assert live_subject.was_started() is False + await live_subject.run() + assert live_subject.was_started() is True decoy.verify( await hardware_api.home(), ) -def test_play_maintanence_runner_raises( - maintenance_subject: MaintenanceRunner, -) -> None: - """Should raise a not supported error.""" - with pytest.raises(NotImplementedError): - maintenance_subject.play() - - -def test_pause_maintanence_runner_raises( - maintenance_subject: MaintenanceRunner, -) -> None: - """Should raise a not supported error.""" - with pytest.raises(NotImplementedError): - maintenance_subject.pause() - - async def test_load_maintanence_runner_raises( - maintenance_subject: MaintenanceRunner, + live_subject: LiveRunner, ) -> None: """Should raise a not supported error.""" with pytest.raises(NotImplementedError): - await maintenance_subject.load(None) # type: ignore[arg-type] + await live_subject.load(None) # type: ignore[arg-type] From 14e2fa0fef3f248c0a5422721485ec2a3ffa8d4f Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Mon, 27 Mar 2023 16:41:02 -0400 Subject: [PATCH 20/34] changed ProtocolRunResult to RunnerRunResult --- api/src/opentrons/protocol_runner/__init__.py | 4 ++-- .../protocol_runner/protocol_runner.py | 19 ++++++++----------- .../protocol_runner/test_protocol_runner.py | 2 +- .../robot_server/runs/engine_store.py | 12 ++++++------ .../tests/protocols/test_protocol_analyzer.py | 2 +- robot-server/tests/runs/test_engine_store.py | 8 ++++---- .../tests/runs/test_run_controller.py | 4 ++-- .../tests/runs/test_run_data_manager.py | 6 +++--- 8 files changed, 27 insertions(+), 30 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index de028004429..9cb607ace1e 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -5,7 +5,7 @@ """ from .protocol_runner import ( AbstractRunner, - ProtocolRunResult, + RunnerRunResult, create_protocol_runner, JsonRunner, PythonAndLegacyRunner, @@ -15,7 +15,7 @@ __all__ = [ "AbstractRunner", - "ProtocolRunResult", + "RunnerRunResult", "create_simulating_runner", "create_protocol_runner", "JsonRunner", diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 224ad4def50..de303f7bb76 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -31,7 +31,7 @@ ) -class ProtocolRunResult(NamedTuple): +class RunnerRunResult(NamedTuple): """Result data from a run, pulled from the ProtocolEngine.""" commands: List[Command] @@ -81,7 +81,7 @@ async def stop(self) -> None: async def run( self, protocol_source: Optional[ProtocolSource] = None, - ) -> ProtocolRunResult: + ) -> RunnerRunResult: """Run a given protocol to completion.""" @@ -166,7 +166,7 @@ async def stop(self) -> None: async def run( self, protocol_source: Optional[ProtocolSource] = None, - ) -> ProtocolRunResult: + ) -> RunnerRunResult: # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: @@ -179,7 +179,7 @@ async def run( run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return ProtocolRunResult(commands=commands, state_summary=run_data) + return RunnerRunResult(commands=commands, state_summary=run_data) class JsonRunner(AbstractRunner): @@ -268,7 +268,7 @@ async def stop(self) -> None: async def run( self, protocol_source: Optional[ProtocolSource] = None, - ) -> ProtocolRunResult: + ) -> RunnerRunResult: # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: @@ -281,7 +281,7 @@ async def run( run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return ProtocolRunResult(commands=commands, state_summary=run_data) + return RunnerRunResult(commands=commands, state_summary=run_data) class LiveRunner(AbstractRunner): @@ -324,15 +324,12 @@ async def stop(self) -> None: async def run( self, protocol_source: Optional[ProtocolSource] = None, - ) -> ProtocolRunResult: - if protocol_source: - self.load(protocol_source) - + ) -> RunnerRunResult: await self._hardware_api.home() run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return ProtocolRunResult(commands=commands, state_summary=run_data) + return RunnerRunResult(commands=commands, state_summary=run_data) def create_protocol_runner( diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 1f7c75c370c..1f456c9aa30 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -186,7 +186,7 @@ async def test_create_protocol_runner( json_file_reader=json_file_reader, json_translator=json_translator, ), - type(runner_type), + runner_type, ) diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index 6a18d203ee5..ea885471975 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -6,8 +6,8 @@ from opentrons.config import feature_flags from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_runner import ( - ProtocolRunner, - ProtocolRunResult, + AbstractRunner, + RunnerRunResult, create_protocol_runner, ) from opentrons.protocol_engine import ( @@ -34,7 +34,7 @@ class RunnerEnginePair(NamedTuple): """A stored ProtocolRunner/ProtocolEngine pair.""" run_id: str - runner: ProtocolRunner + runner: AbstractRunner engine: ProtocolEngine @@ -65,7 +65,7 @@ def engine(self) -> ProtocolEngine: return self._runner_engine_pair.engine @property - def runner(self) -> ProtocolRunner: + def runner(self) -> AbstractRunner: """Get the "current" persisted ProtocolRunner.""" assert self._runner_engine_pair is not None, "Runner not yet created." return self._runner_engine_pair.runner @@ -162,7 +162,7 @@ async def create( return engine.state_view.get_summary() - async def clear(self) -> ProtocolRunResult: + async def clear(self) -> RunnerRunResult: """Remove the persisted ProtocolEngine. Raises: @@ -181,4 +181,4 @@ async def clear(self) -> ProtocolRunResult: commands = state_view.commands.get_all() self._runner_engine_pair = None - return ProtocolRunResult(state_summary=run_data, commands=commands) + return RunnerRunResult(state_summary=run_data, commands=commands) diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index abc9840ad3f..2b3144a3659 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -107,7 +107,7 @@ async def test_analyze( ).then_return(json_runner) decoy.when(await json_runner.run(protocol_resource.source)).then_return( - protocol_runner.ProtocolRunResult( + protocol_runner.RunnerRunResult( commands=[analysis_command], state_summary=StateSummary( status=EngineStatus.SUCCEEDED, diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index be4426fbba8..7975e1a9373 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -12,8 +12,8 @@ from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_engine import ProtocolEngine, StateSummary, types as pe_types from opentrons.protocol_runner import ( - ProtocolRunResult, - MaintenanceRunner, + RunnerRunResult, + LiveRunner, ) from opentrons.protocol_reader import ProtocolReader, ProtocolSource @@ -50,7 +50,7 @@ async def test_create_engine(subject: EngineStore) -> None: assert subject.current_run_id == "run-id" assert isinstance(result, StateSummary) - assert isinstance(subject.runner, MaintenanceRunner) + assert isinstance(subject.runner, LiveRunner) assert isinstance(subject.engine, ProtocolEngine) @@ -134,7 +134,7 @@ async def test_clear_engine(subject: EngineStore) -> None: result = await subject.clear() assert subject.current_run_id is None - assert isinstance(result, ProtocolRunResult) + assert isinstance(result, RunnerRunResult) with pytest.raises(AssertionError): subject.engine diff --git a/robot-server/tests/runs/test_run_controller.py b/robot-server/tests/runs/test_run_controller.py index b31b2b1553e..db94414bad6 100644 --- a/robot-server/tests/runs/test_run_controller.py +++ b/robot-server/tests/runs/test_run_controller.py @@ -11,7 +11,7 @@ commands as pe_commands, errors as pe_errors, ) -from opentrons.protocol_runner import ProtocolRunResult +from opentrons.protocol_runner import RunnerRunResult from robot_server.service.task_runner import TaskRunner from robot_server.runs.action_models import RunAction, RunActionType @@ -144,7 +144,7 @@ async def test_create_play_action_to_start( decoy.verify(mock_task_runner.run(background_task_captor)) decoy.when(await mock_engine_store.runner.run()).then_return( - ProtocolRunResult( + RunnerRunResult( commands=protocol_commands, state_summary=engine_state_summary, ) diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 1ecdf9e5896..59f2de963df 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -6,7 +6,7 @@ from decoy import Decoy, matchers from opentrons.types import DeckSlotName -from opentrons.protocol_runner import ProtocolRunResult +from opentrons.protocol_runner import RunnerRunResult from opentrons.protocol_engine import ( EngineStatus, StateSummary, @@ -485,7 +485,7 @@ async def test_update_current( run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(await mock_engine_store.clear()).then_return( - ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) + RunnerRunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( @@ -593,7 +593,7 @@ async def test_create_archives_existing( decoy.when(mock_engine_store.current_run_id).then_return(run_id_old) decoy.when(await mock_engine_store.clear()).then_return( - ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) + RunnerRunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( From 5aa78999bf3f5e2a1bd67d70888ec2b2ae020c7a Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 28 Mar 2023 10:42:46 -0400 Subject: [PATCH 21/34] added linting ignore --- api/tests/opentrons/protocol_runner/test_protocol_runner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 1f456c9aa30..0a0e931ee24 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -174,7 +174,7 @@ async def test_create_protocol_runner( legacy_context_creator: LegacyContextCreator, legacy_executor: LegacyExecutor, config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], - runner_type: AbstractRunner, + runner_type: Union[JsonRunner, PythonAndLegacyRunner, LiveRunner], ) -> None: """It should return protocol runner type depending on the config.""" assert isinstance( @@ -186,7 +186,7 @@ async def test_create_protocol_runner( json_file_reader=json_file_reader, json_translator=json_translator, ), - runner_type, + runner_type, # type: ignore[arg-type] ) From f36be3377273cf93916ab83481521bf1f956556d Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 28 Mar 2023 10:58:02 -0400 Subject: [PATCH 22/34] fixed g-code testing --- g-code-testing/g_code_parsing/g_code_engine.py | 7 ++++--- robot-server/robot_server/runs/engine_store.py | 5 ++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/g-code-testing/g_code_parsing/g_code_engine.py b/g-code-testing/g_code_parsing/g_code_engine.py index 2416a370550..6eb481b072f 100644 --- a/g-code-testing/g_code_parsing/g_code_engine.py +++ b/g-code-testing/g_code_parsing/g_code_engine.py @@ -7,7 +7,7 @@ from opentrons import APIVersion from opentrons.hardware_control.emulation.settings import Settings -from opentrons.protocol_engine.create_protocol_engine import create_protocol_engine +from opentrons.protocol_engine.create_protocol_engine import create_protocol_engine, AbstractRunner from opentrons.protocol_engine.state.config import Config from opentrons.protocol_reader.protocol_source import ( JsonProtocolConfig, @@ -15,7 +15,7 @@ ProtocolSource, PythonProtocolConfig, ) -from opentrons.protocol_runner.protocol_runner import ProtocolRunner +from opentrons.protocol_runner.protocol_runner import create_protocol_runner from opentrons.protocols.parse import parse from opentrons.protocols.execution import execute from contextlib import asynccontextmanager, contextmanager @@ -156,7 +156,8 @@ async def run_protocol( content_hash=path, ) - protocol_runner: ProtocolRunner = ProtocolRunner( + protocol_runner: AbstractRunner = create_protocol_runner( + protocol_config=config, protocol_engine=await create_protocol_engine( hardware_api=hardware, # type: ignore config=Config(robot_type=robot_type), diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index ea885471975..37887b2ade9 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -31,7 +31,10 @@ class EngineConflictError(RuntimeError): class RunnerEnginePair(NamedTuple): - """A stored ProtocolRunner/ProtocolEngine pair.""" + """A stored + + + ProtocolRunner/ProtocolEngine pair.""" run_id: str runner: AbstractRunner From a81df1db8ad1d2c0bb7cf2907a2f1f6d6547edab Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 28 Mar 2023 11:03:04 -0400 Subject: [PATCH 23/34] g-code testing fix --- g-code-testing/g_code_parsing/g_code_engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/g-code-testing/g_code_parsing/g_code_engine.py b/g-code-testing/g_code_parsing/g_code_engine.py index 6eb481b072f..fcc56371caa 100644 --- a/g-code-testing/g_code_parsing/g_code_engine.py +++ b/g-code-testing/g_code_parsing/g_code_engine.py @@ -7,7 +7,7 @@ from opentrons import APIVersion from opentrons.hardware_control.emulation.settings import Settings -from opentrons.protocol_engine.create_protocol_engine import create_protocol_engine, AbstractRunner +from opentrons.protocol_engine.create_protocol_engine import create_protocol_engine from opentrons.protocol_engine.state.config import Config from opentrons.protocol_reader.protocol_source import ( JsonProtocolConfig, @@ -156,7 +156,7 @@ async def run_protocol( content_hash=path, ) - protocol_runner: AbstractRunner = create_protocol_runner( + protocol_runner = create_protocol_runner( protocol_config=config, protocol_engine=await create_protocol_engine( hardware_api=hardware, # type: ignore From b64731518b6e40744589101a2c6b36ca16fe0add Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 28 Mar 2023 11:10:52 -0400 Subject: [PATCH 24/34] linting --- api/src/opentrons/protocol_runner/protocol_runner.py | 1 + robot-server/robot_server/runs/engine_store.py | 5 +---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index de303f7bb76..c0cfd4b2ecf 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -57,6 +57,7 @@ def was_started(self) -> bool: This value is latched; once it is True, it will never become False. """ + # TODO (tz, 03-28-23): Change this to a @classmethod that does not accept protocol_source for live runs and returns the runner instance. @abstractmethod async def load(self, protocol_source: ProtocolSource) -> None: """Load a ProtocolSource into managed ProtocolEngine. diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index 37887b2ade9..c7845214745 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -31,10 +31,7 @@ class EngineConflictError(RuntimeError): class RunnerEnginePair(NamedTuple): - """A stored - - - ProtocolRunner/ProtocolEngine pair.""" + """A stored AbstractRunner/ProtocolEngine pair.""" run_id: str runner: AbstractRunner From e802d654710a7c194ad463fc7bf611df6cf30ea6 Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 28 Mar 2023 12:03:46 -0400 Subject: [PATCH 25/34] linting --- .../protocol_runner/protocol_runner.py | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index c0cfd4b2ecf..5bc24b8ffaf 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -111,10 +111,10 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def was_started(self) -> bool: + def was_started(self) -> bool: # noqa: D102 return self._protocol_engine.state_view.commands.has_been_played() - async def load(self, protocol_source: ProtocolSource) -> None: + async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -149,13 +149,13 @@ async def load(self, protocol_source: ProtocolSource) -> None: context=context, ) - def play(self) -> None: + def play(self) -> None: # noqa: D102 self._protocol_engine.play() - def pause(self) -> None: + def pause(self) -> None: # noqa: D102 self._protocol_engine.pause() - async def stop(self) -> None: + async def stop(self) -> None: # noqa: D102 if self.was_started(): await self._protocol_engine.stop() else: @@ -164,7 +164,7 @@ async def stop(self) -> None: set_run_status=False, ) - async def run( + async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, ) -> RunnerRunResult: @@ -203,10 +203,10 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def was_started(self) -> bool: + def was_started(self) -> bool: # noqa: D102 return self._protocol_engine.state_view.commands.has_been_played() - async def load(self, protocol_source: ProtocolSource) -> None: + async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -251,13 +251,13 @@ async def load(self, protocol_source: ProtocolSource) -> None: self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - def play(self) -> None: + def play(self) -> None: # noqa: D102 self._protocol_engine.play() - def pause(self) -> None: + def pause(self) -> None: # noqa: D102 self._protocol_engine.pause() - async def stop(self) -> None: + async def stop(self) -> None: # noqa: D102 if self.was_started(): await self._protocol_engine.stop() else: @@ -266,7 +266,7 @@ async def stop(self) -> None: set_run_status=False, ) - async def run( + async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, ) -> RunnerRunResult: @@ -299,21 +299,21 @@ def __init__( # of runner interface self._hardware_api = hardware_api - def was_started(self) -> bool: + def was_started(self) -> bool: # noqa: D102 return self._protocol_engine.state_view.commands.has_been_played() - async def load(self, protocol_source: ProtocolSource) -> None: + async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 raise NotImplementedError( "LiveRunner.load() not supported for setup commands. no protocol associated." ) - def play(self) -> None: + def play(self) -> None: # noqa: D102 self._protocol_engine.play() - def pause(self) -> None: + def pause(self) -> None: # noqa: D102 self._protocol_engine.pause() - async def stop(self) -> None: + async def stop(self) -> None: # noqa: D102 if self.was_started(): await self._protocol_engine.stop() else: @@ -322,7 +322,7 @@ async def stop(self) -> None: set_run_status=False, ) - async def run( + async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, ) -> RunnerRunResult: From 923e00cef68f1ddfa2a552cfa3c48dc90ef2a7bd Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Tue, 28 Mar 2023 16:10:34 -0400 Subject: [PATCH 26/34] added pe.play for run live commands --- api/src/opentrons/protocol_runner/protocol_runner.py | 1 + api/tests/opentrons/protocol_runner/test_protocol_runner.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 5bc24b8ffaf..71aecd58364 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -327,6 +327,7 @@ async def run( # noqa: D102 protocol_source: Optional[ProtocolSource] = None, ) -> RunnerRunResult: await self._hardware_api.home() + self.play() run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 0a0e931ee24..385043967c0 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -583,7 +583,7 @@ async def test_run_python_runner( ) -async def test_run_maintanence_runner( +async def test_run_live_runner( decoy: Decoy, hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, @@ -600,10 +600,11 @@ async def test_run_maintanence_runner( decoy.verify( await hardware_api.home(), + protocol_engine.play(), ) -async def test_load_maintanence_runner_raises( +async def test_load_live_runner_raises( live_subject: LiveRunner, ) -> None: """Should raise a not supported error.""" From e1e9ddd2de573ec6fa50ba8f04daec1adeebf1bd Mon Sep 17 00:00:00 2001 From: tamarzanzouri Date: Wed, 29 Mar 2023 15:03:41 -0400 Subject: [PATCH 27/34] added tavern test for posting protocol commands --- .../protocol_runner/protocol_runner.py | 5 + .../protocol_runner/test_protocol_runner.py | 4 + ...t_run_queued_protocol_commands.tavern.yaml | 196 ++++++++++++++++++ 3 files changed, 205 insertions(+) create mode 100644 robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 71aecd58364..93cf187f533 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -292,12 +292,14 @@ def __init__( self, protocol_engine: ProtocolEngine, hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, ) -> None: """Initialize the LiveRunner with its dependencies.""" self._protocol_engine = protocol_engine # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface self._hardware_api = hardware_api + self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) def was_started(self) -> bool: # noqa: D102 return self._protocol_engine.state_view.commands.has_been_played() @@ -328,6 +330,8 @@ async def run( # noqa: D102 ) -> RunnerRunResult: await self._hardware_api.home() self.play() + self._task_queue.start() + await self._task_queue.join() run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() @@ -371,6 +375,7 @@ def create_protocol_runner( return LiveRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, + task_queue=task_queue, ) diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 385043967c0..17a3fc99ec5 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -151,6 +151,7 @@ def live_subject( return LiveRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, + task_queue=task_queue, ) @@ -587,6 +588,7 @@ async def test_run_live_runner( decoy: Decoy, hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, + task_queue: TaskQueue, live_subject: LiveRunner, ) -> None: """It should run a protocol to completion.""" @@ -601,6 +603,8 @@ async def test_run_live_runner( decoy.verify( await hardware_api.home(), protocol_engine.play(), + task_queue.start(), + await task_queue.join(), ) diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml new file mode 100644 index 00000000000..6cd8ce9df82 --- /dev/null +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -0,0 +1,196 @@ +test_name: Make sure empty run commands execute protocol commands queue + +marks: + - xfail + - usefixtures: + - run_server + +stages: + - name: Create Empty Run + request: + url: '{host:s}:{port:d}/runs' + json: + data: {} + method: POST + response: + strict: + - json:off + status_code: 201 + json: + data: + id: !anystr + status: idle + current: true + save: + json: + run_id: data.id + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 1' + response: + status_code: 201 + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 2' + response: + status_code: 201 + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 3' + response: + status_code: 201 + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 4' + response: + status_code: 201 + + - name: Get run + request: + url: '{host:s}:{port:d}/runs/{run_id}' + method: GET + response: + status_code: 200 + json: + # Check that `data` is identical to the original POSTed run. + data: + actions: [] + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + current: True + errors: [] + id: '{run_id}' + labware: + - id: 'fixedTrash' + loadName: 'opentrons_1_trash_1100ml_fixed' + definitionUri: 'opentrons/opentrons_1_trash_1100ml_fixed/1' + location: + slotName: '12' + labwareOffsets: [] + liquids: [] + modules: [] + pipettes: [] + status: 'idle' + + - name: Play the run + request: + url: '{host:s}:{port:d}/runs/{run_id}/actions' + method: POST + json: + data: + actionType: play + response: + status_code: 201 + json: + data: + id: !anystr + actionType: play + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + save: + json: + play_action_id: data.id + + # While the protocol is ongoing, several fields (like labware) + # are nondeterministic depending on request timing. + # Wait for the protocol to complete so things settle. + - name: Wait for the protocol to complete + max_retries: 10 + delay_after: 0.1 + request: + url: '{host:s}:{port:d}/runs/{run_id}' + method: GET + response: + status_code: 200 + strict: + - json:off + json: + data: + status: succeeded + + - name: Verify commands succeeded with the expected results + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: GET + response: + status_code: 200 + json: + links: + current: !anydict + meta: + cursor: 0 + totalLength: 4 + data: + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 1' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 2' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 3' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 4' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + save: + json: + load_labware_command_id: data[0].id From 88cae89e385c45a5cec0f52f4fc9960d075c928d Mon Sep 17 00:00:00 2001 From: Sanniti Date: Wed, 5 Apr 2023 16:30:13 -0400 Subject: [PATCH 28/34] addressed PR comments, fixed live runner bug, updated tests --- api/src/opentrons/protocol_runner/__init__.py | 6 +- .../protocol_runner/legacy_wrappers.py | 2 + .../protocol_runner/protocol_runner.py | 124 ++++++------------ .../smoke_tests/test_protocol_runner.py | 6 +- .../protocol_runner/test_protocol_runner.py | 13 +- .../robot_server/runs/engine_store.py | 21 ++- .../tests/protocols/test_protocol_analyzer.py | 2 +- robot-server/tests/runs/test_engine_store.py | 123 ++++++++++++++--- .../tests/runs/test_run_controller.py | 4 +- .../tests/runs/test_run_data_manager.py | 6 +- 10 files changed, 177 insertions(+), 130 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index 9cb607ace1e..92aab4c567d 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -5,20 +5,22 @@ """ from .protocol_runner import ( AbstractRunner, - RunnerRunResult, + RunResult, create_protocol_runner, JsonRunner, PythonAndLegacyRunner, LiveRunner, + RunnerType, ) from .create_simulating_runner import create_simulating_runner __all__ = [ "AbstractRunner", - "RunnerRunResult", + "RunResult", "create_simulating_runner", "create_protocol_runner", "JsonRunner", "PythonAndLegacyRunner", "LiveRunner", + "RunnerType", ] diff --git a/api/src/opentrons/protocol_runner/legacy_wrappers.py b/api/src/opentrons/protocol_runner/legacy_wrappers.py index b65471a5b46..85df1380c1d 100644 --- a/api/src/opentrons/protocol_runner/legacy_wrappers.py +++ b/api/src/opentrons/protocol_runner/legacy_wrappers.py @@ -96,6 +96,8 @@ def read( ) +# TODO (spp, 2023-04-05): Remove 'legacy' wording since this is the context we are using +# for all python protocols. class LegacyContextCreator: """Interface to construct Protocol API v2 contexts.""" diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 93cf187f533..9e2acf8d4cf 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -31,7 +31,7 @@ ) -class RunnerRunResult(NamedTuple): +class RunResult(NamedTuple): """Result data from a run, pulled from the ProtocolEngine.""" commands: List[Command] @@ -41,48 +41,48 @@ class RunnerRunResult(NamedTuple): class AbstractRunner(ABC): """An interface to manage and control a protocol run. - The AbstractRunner is primarily responsible for feeding a ProtocolEngine + A Runner is primarily responsible for feeding a ProtocolEngine with commands and control signals. These commands and signals are generated by protocol files, hardware signals, or externally via the HTTP robot-server. - A AbstractRunner controls a single run. Once the run is finished, - you will need a new AbstractRunner to do another run. + A Runner controls a single run. Once the run is finished, + you will need a new Runner to do another run. """ - @abstractmethod - def was_started(self) -> bool: - """Whether the runner has been started. - - This value is latched; once it is True, it will never become False. - """ + def __init__(self, protocol_engine: ProtocolEngine): + self._protocol_engine = protocol_engine - # TODO (tz, 03-28-23): Change this to a @classmethod that does not accept protocol_source for live runs and returns the runner instance. - @abstractmethod - async def load(self, protocol_source: ProtocolSource) -> None: - """Load a ProtocolSource into managed ProtocolEngine. + def was_started(self) -> bool: # noqa: D102 + """Whether the run has been started. - Calling this method is only necessary if the runner will be used - to control the run of a file-based protocol. + This value is latched; once it is True, it will never become False. """ + return self._protocol_engine.state_view.commands.has_been_played() - @abstractmethod def play(self) -> None: """Start or resume the run.""" + self._protocol_engine.play() - @abstractmethod def pause(self) -> None: """Pause the run.""" + self._protocol_engine.pause() - @abstractmethod async def stop(self) -> None: """Stop (cancel) the run.""" + if self.was_started(): + await self._protocol_engine.stop() + else: + await self._protocol_engine.finish( + drop_tips_and_home=False, + set_run_status=False, + ) @abstractmethod async def run( self, protocol_source: Optional[ProtocolSource] = None, - ) -> RunnerRunResult: + ) -> RunResult: """Run a given protocol to completion.""" @@ -99,7 +99,7 @@ def __init__( legacy_executor: Optional[LegacyExecutor] = None, ) -> None: """Initialize the PythonAndLegacyRunner with its dependencies.""" - self._protocol_engine = protocol_engine + super().__init__(protocol_engine) self._hardware_api = hardware_api self._legacy_file_reader = legacy_file_reader or LegacyFileReader() self._legacy_context_creator = legacy_context_creator or LegacyContextCreator( @@ -111,10 +111,8 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def was_started(self) -> bool: # noqa: D102 - return self._protocol_engine.state_view.commands.has_been_played() - async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 + """Load a Python or JSONv5(& older) ProtocolSource into managed ProtocolEngine.""" labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -149,25 +147,10 @@ async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 context=context, ) - def play(self) -> None: # noqa: D102 - self._protocol_engine.play() - - def pause(self) -> None: # noqa: D102 - self._protocol_engine.pause() - - async def stop(self) -> None: # noqa: D102 - if self.was_started(): - await self._protocol_engine.stop() - else: - await self._protocol_engine.finish( - drop_tips_and_home=False, - set_run_status=False, - ) - async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, - ) -> RunnerRunResult: + ) -> RunResult: # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: @@ -180,7 +163,7 @@ async def run( # noqa: D102 run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return RunnerRunResult(commands=commands, state_summary=run_data) + return RunResult(commands=commands, state_summary=run_data) class JsonRunner(AbstractRunner): @@ -195,6 +178,7 @@ def __init__( json_translator: Optional[JsonTranslator] = None, ) -> None: """Initialize the JsonRunner with its dependencies.""" + super().__init__(protocol_engine) self._protocol_engine = protocol_engine self._hardware_api = hardware_api self._json_file_reader = json_file_reader or JsonFileReader() @@ -203,10 +187,8 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def was_started(self) -> bool: # noqa: D102 - return self._protocol_engine.state_view.commands.has_been_played() - async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 + """Load a JSONv6+ ProtocolSource into managed ProtocolEngine.""" labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -251,25 +233,10 @@ async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - def play(self) -> None: # noqa: D102 - self._protocol_engine.play() - - def pause(self) -> None: # noqa: D102 - self._protocol_engine.pause() - - async def stop(self) -> None: # noqa: D102 - if self.was_started(): - await self._protocol_engine.stop() - else: - await self._protocol_engine.finish( - drop_tips_and_home=False, - set_run_status=False, - ) - async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, - ) -> RunnerRunResult: + ) -> RunResult: # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: @@ -282,7 +249,7 @@ async def run( # noqa: D102 run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return RunnerRunResult(commands=commands, state_summary=run_data) + return RunResult(commands=commands, state_summary=run_data) class LiveRunner(AbstractRunner): @@ -295,39 +262,21 @@ def __init__( task_queue: Optional[TaskQueue] = None, ) -> None: """Initialize the LiveRunner with its dependencies.""" + super().__init__(protocol_engine) self._protocol_engine = protocol_engine # TODO(mc, 2022-01-11): replace task queue with specific implementations # of runner interface self._hardware_api = hardware_api self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def was_started(self) -> bool: # noqa: D102 - return self._protocol_engine.state_view.commands.has_been_played() - - async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 - raise NotImplementedError( - "LiveRunner.load() not supported for setup commands. no protocol associated." - ) - - def play(self) -> None: # noqa: D102 - self._protocol_engine.play() - - def pause(self) -> None: # noqa: D102 - self._protocol_engine.pause() - - async def stop(self) -> None: # noqa: D102 - if self.was_started(): - await self._protocol_engine.stop() - else: - await self._protocol_engine.finish( - drop_tips_and_home=False, - set_run_status=False, - ) + def set_task_queue_wait(self) -> None: + """Set the task queue to wait until all commands are executed.""" + self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, - ) -> RunnerRunResult: + ) -> RunResult: await self._hardware_api.home() self.play() self._task_queue.start() @@ -335,7 +284,10 @@ async def run( # noqa: D102 run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return RunnerRunResult(commands=commands, state_summary=run_data) + return RunResult(commands=commands, state_summary=run_data) + + +RunnerType = Union[PythonAndLegacyRunner, JsonRunner, LiveRunner] def create_protocol_runner( @@ -348,7 +300,7 @@ def create_protocol_runner( legacy_file_reader: Optional[LegacyFileReader] = None, legacy_context_creator: Optional[LegacyContextCreator] = None, legacy_executor: Optional[LegacyExecutor] = None, -) -> AbstractRunner: +) -> RunnerType: """Create a protocol runner.""" if protocol_config: if ( diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index 62ebf5d79c4..ceceb148348 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -112,7 +112,7 @@ async def test_runner_with_python( async def test_runner_with_json(json_protocol_file: Path) -> None: - """It should run a JSON protocol on the AbstractRunner.""" + """It should run a JSON protocol on the JsonRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[json_protocol_file], @@ -173,7 +173,7 @@ async def test_runner_with_json(json_protocol_file: Path) -> None: async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> None: - """It should run a Python protocol on the AbstractRunner.""" + """It should run a Python protocol on the PythonAndLegacyRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[legacy_python_protocol_file], @@ -233,7 +233,7 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: - """It should run a Python protocol on the AbstractRunner.""" + """It should run a Python protocol on the PythonAndLegacyRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[legacy_json_protocol_file], diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 17a3fc99ec5..14b5ce93f00 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -27,6 +27,7 @@ PythonAndLegacyRunner, LiveRunner, AbstractRunner, + RunnerType, ) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader @@ -175,7 +176,7 @@ async def test_create_protocol_runner( legacy_context_creator: LegacyContextCreator, legacy_executor: LegacyExecutor, config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], - runner_type: Union[JsonRunner, PythonAndLegacyRunner, LiveRunner], + runner_type: RunnerType, ) -> None: """It should return protocol runner type depending on the config.""" assert isinstance( @@ -261,7 +262,7 @@ async def test_stop( (lazy_fixture("live_subject")), ], ) -async def test_stop_never_started_json_runner( +async def test_stop_when_run_never_started( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, @@ -606,11 +607,3 @@ async def test_run_live_runner( task_queue.start(), await task_queue.join(), ) - - -async def test_load_live_runner_raises( - live_subject: LiveRunner, -) -> None: - """Should raise a not supported error.""" - with pytest.raises(NotImplementedError): - await live_subject.load(None) # type: ignore[arg-type] diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index c7845214745..8391168f2de 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -7,7 +7,11 @@ from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_runner import ( AbstractRunner, - RunnerRunResult, + RunnerType, + JsonRunner, + PythonAndLegacyRunner, + LiveRunner, + RunResult, create_protocol_runner, ) from opentrons.protocol_engine import ( @@ -34,7 +38,7 @@ class RunnerEnginePair(NamedTuple): """A stored AbstractRunner/ProtocolEngine pair.""" run_id: str - runner: AbstractRunner + runner: RunnerType engine: ProtocolEngine @@ -65,7 +69,7 @@ def engine(self) -> ProtocolEngine: return self._runner_engine_pair.engine @property - def runner(self) -> AbstractRunner: + def runner(self) -> RunnerType: """Get the "current" persisted ProtocolRunner.""" assert self._runner_engine_pair is not None, "Runner not yet created." return self._runner_engine_pair.runner @@ -145,11 +149,16 @@ async def create( if self._runner_engine_pair is not None: raise EngineConflictError("Another run is currently active.") - if protocol is not None: + if isinstance(runner, (PythonAndLegacyRunner, JsonRunner)): # FIXME(mm, 2022-12-21): This `await` introduces a concurrency hazard. If # two requests simultaneously call this method, they will both "succeed" # (with undefined results) instead of one raising EngineConflictError. + assert ( + protocol is not None + ), "A Python or JSON protocol should have a protocol source file." await runner.load(protocol.source) + else: # Why does linter not like `else:`..? + runner.set_task_queue_wait() for offset in labware_offsets: engine.add_labware_offset(offset) @@ -162,7 +171,7 @@ async def create( return engine.state_view.get_summary() - async def clear(self) -> RunnerRunResult: + async def clear(self) -> RunResult: """Remove the persisted ProtocolEngine. Raises: @@ -181,4 +190,4 @@ async def clear(self) -> RunnerRunResult: commands = state_view.commands.get_all() self._runner_engine_pair = None - return RunnerRunResult(state_summary=run_data, commands=commands) + return RunResult(state_summary=run_data, commands=commands) diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index 2b3144a3659..c655db805a5 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -107,7 +107,7 @@ async def test_analyze( ).then_return(json_runner) decoy.when(await json_runner.run(protocol_resource.source)).then_return( - protocol_runner.RunnerRunResult( + protocol_runner.RunResult( commands=[analysis_command], state_summary=StateSummary( status=EngineStatus.SUCCEEDED, diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index 7975e1a9373..d44592f9171 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -1,19 +1,25 @@ """Tests for the EngineStore interface.""" from datetime import datetime from pathlib import Path - +from typing import cast import pytest from decoy import Decoy, matchers +from opentrons.protocol_engine.state import StateStore from opentrons_shared_data import get_shared_data_root from opentrons_shared_data.robot.dev_types import RobotType from opentrons.types import DeckSlotName from opentrons.hardware_control import HardwareControlAPI +from opentrons import protocol_engine from opentrons.protocol_engine import ProtocolEngine, StateSummary, types as pe_types +from opentrons.protocol_engine.state.config import Config from opentrons.protocol_runner import ( - RunnerRunResult, + RunResult, LiveRunner, + RunnerType, + protocol_runner, + JsonRunner, ) from opentrons.protocol_reader import ProtocolReader, ProtocolSource @@ -22,11 +28,24 @@ @pytest.fixture -def subject(decoy: Decoy) -> EngineStore: - """Get a EngineStore test subject.""" +def hardware_api( + decoy: Decoy, +) -> HardwareControlAPI: + """Return a mock in the shape of a HardwareControlAPI.""" # TODO(mc, 2021-06-11): to make these test more effective and valuable, we # should pass in some sort of actual, valid HardwareAPI instead of a mock - hardware_api = decoy.mock(cls=HardwareControlAPI) + return decoy.mock(cls=HardwareControlAPI) + + +@pytest.fixture +def state_store(decoy: Decoy) -> StateStore: + """Get a mock StateStore.""" + return decoy.mock(cls=StateStore) + + +@pytest.fixture +def subject(decoy: Decoy, hardware_api: HardwareControlAPI) -> EngineStore: + """Get a EngineStore test subject.""" return EngineStore( hardware_api=hardware_api, # Arbitrary choice of robot_type. Tests where robot_type matters should @@ -36,7 +55,7 @@ def subject(decoy: Decoy) -> EngineStore: @pytest.fixture -async def protocol_source(tmp_path: Path) -> ProtocolSource: +async def json_protocol_source(tmp_path: Path) -> ProtocolSource: """Get a protocol source fixture.""" simple_protocol = ( get_shared_data_root() / "protocol" / "fixtures" / "6" / "simpleV6.json" @@ -44,6 +63,22 @@ async def protocol_source(tmp_path: Path) -> ProtocolSource: return await ProtocolReader().read_saved(files=[simple_protocol], directory=None) +@pytest.fixture(autouse=True) +def mock_patch_create_protocol_runner( + decoy: Decoy, monkeypatch: pytest.MonkeyPatch +) -> None: + """Mock patch a runner creator.""" + mock_run_creator = decoy.mock(func=protocol_runner.create_protocol_runner) + monkeypatch.setattr(protocol_runner, "create_protocol_runner", mock_run_creator) + + +@pytest.fixture(autouse=True) +def mock_patch_create_engine(decoy: Decoy, monkeypatch: pytest.MonkeyPatch) -> None: + """Mock patch an engine creator.""" + mock_engine_creator = decoy.mock(func=protocol_engine.create_protocol_engine) + monkeypatch.setattr(protocol_engine, "create_protocol_engine", mock_engine_creator) + + async def test_create_engine(subject: EngineStore) -> None: """It should create an engine for a run.""" result = await subject.create(run_id="run-id", labware_offsets=[], protocol=None) @@ -94,27 +129,81 @@ async def test_create_engine_with_labware_offsets(subject: EngineStore) -> None: ] -@pytest.mark.xfail(strict=True, raises=NotImplementedError) -async def test_create_engine_with_protocol( +async def test_create_engine_with_protocol_loads_protocol_source( + decoy: Decoy, + hardware_api: HardwareControlAPI, + state_store: StateStore, subject: EngineStore, - protocol_source: ProtocolSource, + json_protocol_source: ProtocolSource, ) -> None: - """It should create an engine for a run with labware offsets.""" - # TODO(mc, 2022-05-18): https://github.com/Opentrons/opentrons/pull/10170 - raise NotImplementedError("Implement this test when JSONv6 runs are supported") - + """It should create an engine-runner pair for a run with JSONv6 protocol source.""" protocol = ProtocolResource( protocol_id="my cool protocol", protocol_key=None, created_at=datetime(year=2021, month=1, day=1), - source=protocol_source, + source=json_protocol_source, ) + mock_engine = decoy.mock(cls=ProtocolEngine) + mock_runner = decoy.mock(cls=JsonRunner) + assert isinstance(mock_runner, JsonRunner) + decoy.when( + await protocol_engine.create_protocol_engine( + hardware_api=hardware_api, + config=Config(robot_type="OT-2 Standard", block_on_door_open=False), + ) + ).then_return(mock_engine) + decoy.when( + protocol_runner.create_protocol_runner( + protocol_config=json_protocol_source.config, + protocol_engine=mock_engine, + hardware_api=hardware_api, + ) + ).then_return(mock_runner) await subject.create( run_id="run-id", labware_offsets=[], protocol=protocol, ) + decoy.verify( + await protocol_engine.create_protocol_engine( + hardware_api=hardware_api, + config=Config( + robot_type=cast(RobotType, "OT-2 Standard"), + block_on_door_open=False, + ), + ) + ) + decoy.verify(await mock_runner.load(json_protocol_source), times=1) + + +async def test_create_engine_for_live_protocol( + decoy: Decoy, + hardware_api: HardwareControlAPI, + state_store: StateStore, + subject: EngineStore, +) -> None: + """It should create an engine-runner for a run with no protocol.""" + mock_engine = decoy.mock(cls=ProtocolEngine) + mock_runner = decoy.mock(cls=LiveRunner) + decoy.when( + await protocol_engine.create_protocol_engine( + hardware_api=hardware_api, + config=Config(robot_type="OT-2 Standard", block_on_door_open=False), + ) + ).then_return(mock_engine) + decoy.when( + protocol_runner.create_protocol_runner( + protocol_config=None, protocol_engine=mock_engine, hardware_api=hardware_api + ) + ).then_return(mock_runner) + + await subject.create( + run_id="run-id", + labware_offsets=[], + protocol=None, + ) + decoy.verify(mock_runner.set_task_queue_wait(), times=1) async def test_archives_state_if_engine_already_exists(subject: EngineStore) -> None: @@ -134,7 +223,7 @@ async def test_clear_engine(subject: EngineStore) -> None: result = await subject.clear() assert subject.current_run_id is None - assert isinstance(result, RunnerRunResult) + assert isinstance(result, RunResult) with pytest.raises(AssertionError): subject.engine @@ -144,13 +233,13 @@ async def test_clear_engine(subject: EngineStore) -> None: async def test_clear_engine_not_stopped_or_idle( - subject: EngineStore, protocol_source: ProtocolSource + subject: EngineStore, json_protocol_source: ProtocolSource ) -> None: """It should raise a conflict if the engine is not stopped.""" protocol_resource = ProtocolResource( protocol_id="123", created_at=datetime.now(), - source=protocol_source, + source=json_protocol_source, protocol_key=None, ) await subject.create( diff --git a/robot-server/tests/runs/test_run_controller.py b/robot-server/tests/runs/test_run_controller.py index db94414bad6..5644daec438 100644 --- a/robot-server/tests/runs/test_run_controller.py +++ b/robot-server/tests/runs/test_run_controller.py @@ -11,7 +11,7 @@ commands as pe_commands, errors as pe_errors, ) -from opentrons.protocol_runner import RunnerRunResult +from opentrons.protocol_runner import RunResult from robot_server.service.task_runner import TaskRunner from robot_server.runs.action_models import RunAction, RunActionType @@ -144,7 +144,7 @@ async def test_create_play_action_to_start( decoy.verify(mock_task_runner.run(background_task_captor)) decoy.when(await mock_engine_store.runner.run()).then_return( - RunnerRunResult( + RunResult( commands=protocol_commands, state_summary=engine_state_summary, ) diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 59f2de963df..d03052ecc03 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -6,7 +6,7 @@ from decoy import Decoy, matchers from opentrons.types import DeckSlotName -from opentrons.protocol_runner import RunnerRunResult +from opentrons.protocol_runner import RunResult from opentrons.protocol_engine import ( EngineStatus, StateSummary, @@ -485,7 +485,7 @@ async def test_update_current( run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(await mock_engine_store.clear()).then_return( - RunnerRunResult(commands=[run_command], state_summary=engine_state_summary) + RunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( @@ -593,7 +593,7 @@ async def test_create_archives_existing( decoy.when(mock_engine_store.current_run_id).then_return(run_id_old) decoy.when(await mock_engine_store.clear()).then_return( - RunnerRunResult(commands=[run_command], state_summary=engine_state_summary) + RunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( From 742e29272e5d050c7b0969d9561bea19776dd303 Mon Sep 17 00:00:00 2001 From: Sanniti Date: Wed, 5 Apr 2023 17:38:22 -0400 Subject: [PATCH 29/34] updated tests, fixed linter errors --- .../protocol_runner/protocol_runner.py | 8 +- .../robot_server/runs/engine_store.py | 2 - ...t_run_queued_protocol_commands.tavern.yaml | 1 - robot-server/tests/runs/test_engine_store.py | 106 ------------------ .../tests/runs/test_run_controller.py | 16 ++- 5 files changed, 14 insertions(+), 119 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 9e2acf8d4cf..8790ad03ad2 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -50,10 +50,10 @@ class AbstractRunner(ABC): you will need a new Runner to do another run. """ - def __init__(self, protocol_engine: ProtocolEngine): + def __init__(self, protocol_engine: ProtocolEngine) -> None: self._protocol_engine = protocol_engine - def was_started(self) -> bool: # noqa: D102 + def was_started(self) -> bool: """Whether the run has been started. This value is latched; once it is True, it will never become False. @@ -111,7 +111,7 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 + async def load(self, protocol_source: ProtocolSource) -> None: """Load a Python or JSONv5(& older) ProtocolSource into managed ProtocolEngine.""" labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source @@ -187,7 +187,7 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - async def load(self, protocol_source: ProtocolSource) -> None: # noqa: D102 + async def load(self, protocol_source: ProtocolSource) -> None: """Load a JSONv6+ ProtocolSource into managed ProtocolEngine.""" labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index 8391168f2de..828d6eb3711 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -6,11 +6,9 @@ from opentrons.config import feature_flags from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_runner import ( - AbstractRunner, RunnerType, JsonRunner, PythonAndLegacyRunner, - LiveRunner, RunResult, create_protocol_runner, ) diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml index 6cd8ce9df82..d43acc831f1 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -1,7 +1,6 @@ test_name: Make sure empty run commands execute protocol commands queue marks: - - xfail - usefixtures: - run_server diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index d44592f9171..527ede8f370 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -1,25 +1,18 @@ """Tests for the EngineStore interface.""" from datetime import datetime from pathlib import Path -from typing import cast import pytest from decoy import Decoy, matchers -from opentrons.protocol_engine.state import StateStore from opentrons_shared_data import get_shared_data_root from opentrons_shared_data.robot.dev_types import RobotType from opentrons.types import DeckSlotName from opentrons.hardware_control import HardwareControlAPI -from opentrons import protocol_engine from opentrons.protocol_engine import ProtocolEngine, StateSummary, types as pe_types -from opentrons.protocol_engine.state.config import Config from opentrons.protocol_runner import ( RunResult, LiveRunner, - RunnerType, - protocol_runner, - JsonRunner, ) from opentrons.protocol_reader import ProtocolReader, ProtocolSource @@ -37,12 +30,6 @@ def hardware_api( return decoy.mock(cls=HardwareControlAPI) -@pytest.fixture -def state_store(decoy: Decoy) -> StateStore: - """Get a mock StateStore.""" - return decoy.mock(cls=StateStore) - - @pytest.fixture def subject(decoy: Decoy, hardware_api: HardwareControlAPI) -> EngineStore: """Get a EngineStore test subject.""" @@ -63,22 +50,6 @@ async def json_protocol_source(tmp_path: Path) -> ProtocolSource: return await ProtocolReader().read_saved(files=[simple_protocol], directory=None) -@pytest.fixture(autouse=True) -def mock_patch_create_protocol_runner( - decoy: Decoy, monkeypatch: pytest.MonkeyPatch -) -> None: - """Mock patch a runner creator.""" - mock_run_creator = decoy.mock(func=protocol_runner.create_protocol_runner) - monkeypatch.setattr(protocol_runner, "create_protocol_runner", mock_run_creator) - - -@pytest.fixture(autouse=True) -def mock_patch_create_engine(decoy: Decoy, monkeypatch: pytest.MonkeyPatch) -> None: - """Mock patch an engine creator.""" - mock_engine_creator = decoy.mock(func=protocol_engine.create_protocol_engine) - monkeypatch.setattr(protocol_engine, "create_protocol_engine", mock_engine_creator) - - async def test_create_engine(subject: EngineStore) -> None: """It should create an engine for a run.""" result = await subject.create(run_id="run-id", labware_offsets=[], protocol=None) @@ -129,83 +100,6 @@ async def test_create_engine_with_labware_offsets(subject: EngineStore) -> None: ] -async def test_create_engine_with_protocol_loads_protocol_source( - decoy: Decoy, - hardware_api: HardwareControlAPI, - state_store: StateStore, - subject: EngineStore, - json_protocol_source: ProtocolSource, -) -> None: - """It should create an engine-runner pair for a run with JSONv6 protocol source.""" - protocol = ProtocolResource( - protocol_id="my cool protocol", - protocol_key=None, - created_at=datetime(year=2021, month=1, day=1), - source=json_protocol_source, - ) - mock_engine = decoy.mock(cls=ProtocolEngine) - mock_runner = decoy.mock(cls=JsonRunner) - assert isinstance(mock_runner, JsonRunner) - decoy.when( - await protocol_engine.create_protocol_engine( - hardware_api=hardware_api, - config=Config(robot_type="OT-2 Standard", block_on_door_open=False), - ) - ).then_return(mock_engine) - decoy.when( - protocol_runner.create_protocol_runner( - protocol_config=json_protocol_source.config, - protocol_engine=mock_engine, - hardware_api=hardware_api, - ) - ).then_return(mock_runner) - - await subject.create( - run_id="run-id", - labware_offsets=[], - protocol=protocol, - ) - decoy.verify( - await protocol_engine.create_protocol_engine( - hardware_api=hardware_api, - config=Config( - robot_type=cast(RobotType, "OT-2 Standard"), - block_on_door_open=False, - ), - ) - ) - decoy.verify(await mock_runner.load(json_protocol_source), times=1) - - -async def test_create_engine_for_live_protocol( - decoy: Decoy, - hardware_api: HardwareControlAPI, - state_store: StateStore, - subject: EngineStore, -) -> None: - """It should create an engine-runner for a run with no protocol.""" - mock_engine = decoy.mock(cls=ProtocolEngine) - mock_runner = decoy.mock(cls=LiveRunner) - decoy.when( - await protocol_engine.create_protocol_engine( - hardware_api=hardware_api, - config=Config(robot_type="OT-2 Standard", block_on_door_open=False), - ) - ).then_return(mock_engine) - decoy.when( - protocol_runner.create_protocol_runner( - protocol_config=None, protocol_engine=mock_engine, hardware_api=hardware_api - ) - ).then_return(mock_runner) - - await subject.create( - run_id="run-id", - labware_offsets=[], - protocol=None, - ) - decoy.verify(mock_runner.set_task_queue_wait(), times=1) - - async def test_archives_state_if_engine_already_exists(subject: EngineStore) -> None: """It should not create more than one engine / runner pair.""" await subject.create(run_id="run-id-1", labware_offsets=[], protocol=None) diff --git a/robot-server/tests/runs/test_run_controller.py b/robot-server/tests/runs/test_run_controller.py index 5644daec438..7387af2d912 100644 --- a/robot-server/tests/runs/test_run_controller.py +++ b/robot-server/tests/runs/test_run_controller.py @@ -11,7 +11,7 @@ commands as pe_commands, errors as pe_errors, ) -from opentrons.protocol_runner import RunResult +from opentrons.protocol_runner import RunResult, JsonRunner, PythonAndLegacyRunner from robot_server.service.task_runner import TaskRunner from robot_server.runs.action_models import RunAction, RunActionType @@ -94,7 +94,9 @@ async def test_create_play_action_to_resume( subject: RunController, ) -> None: """It should resume a run.""" - decoy.when(mock_engine_store.runner.was_started()).then_return(True) + mock_json_runner = decoy.mock(cls=JsonRunner) + decoy.when(mock_engine_store.runner).then_return(mock_json_runner) + decoy.when(mock_json_runner.was_started()).then_return(True) result = subject.create_action( action_id="some-action-id", @@ -109,8 +111,8 @@ async def test_create_play_action_to_resume( ) decoy.verify(mock_run_store.insert_action(run_id, result), times=1) - decoy.verify(mock_engine_store.runner.play(), times=1) - decoy.verify(await mock_engine_store.runner.run(), times=0) + decoy.verify(mock_json_runner.play(), times=1) + decoy.verify(await mock_json_runner.run(), times=0) async def test_create_play_action_to_start( @@ -124,7 +126,9 @@ async def test_create_play_action_to_start( subject: RunController, ) -> None: """It should start a run.""" - decoy.when(mock_engine_store.runner.was_started()).then_return(False) + mock_python_runner = decoy.mock(cls=PythonAndLegacyRunner) + decoy.when(mock_engine_store.runner).then_return(mock_python_runner) + decoy.when(mock_python_runner.was_started()).then_return(False) result = subject.create_action( action_id="some-action-id", @@ -143,7 +147,7 @@ async def test_create_play_action_to_start( background_task_captor = matchers.Captor() decoy.verify(mock_task_runner.run(background_task_captor)) - decoy.when(await mock_engine_store.runner.run()).then_return( + decoy.when(await mock_python_runner.run()).then_return( RunResult( commands=protocol_commands, state_summary=engine_state_summary, From 56a1b2f81d0086a578645c71bbf0a50eceb24100 Mon Sep 17 00:00:00 2001 From: Sanniti Pimpley Date: Fri, 7 Apr 2023 14:37:42 -0400 Subject: [PATCH 30/34] correct docstring --- api/src/opentrons/protocol_runner/protocol_runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 8790ad03ad2..ec88f2cc6db 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -253,7 +253,7 @@ async def run( # noqa: D102 class LiveRunner(AbstractRunner): - """Protocol runner implementation for setup commands.""" + """Protocol runner implementation for live http protocols.""" def __init__( self, From b272e10431a1e107354d3f4794589b278185619c Mon Sep 17 00:00:00 2001 From: Sanniti Pimpley Date: Thu, 13 Apr 2023 08:05:43 -0400 Subject: [PATCH 31/34] Apply suggestions from code review - Use `protocol_source.config` where possible - Some docstring corrections - other small misc cleanup in tests Co-authored-by: Max Marrone --- .../smoke_tests/test_legacy_command_mapper.py | 2 +- .../smoke_tests/test_legacy_custom_labware.py | 2 +- .../smoke_tests/test_legacy_module_commands.py | 2 +- .../smoke_tests/test_protocol_runner.py | 10 +++++----- .../protocol_runner/test_legacy_command_mapper.py | 2 +- .../protocol_runner/test_legacy_context_plugin.py | 2 +- .../opentrons/protocol_runner/test_protocol_runner.py | 4 ++-- .../robot_server/protocols/protocol_analyzer.py | 2 +- robot-server/robot_server/runs/engine_store.py | 2 +- .../runs/test_run_queued_protocol_commands.tavern.yaml | 6 +++--- 10 files changed, 17 insertions(+), 17 deletions(-) diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py index 51acb12a9e1..2bba8608b72 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py @@ -35,7 +35,7 @@ async def simulate_and_get_commands(protocol_file: Path) -> List[commands.Comman ) subject = await create_simulating_runner( robot_type="OT-2 Standard", - protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + protocol_config=protocol_source.config, ) result = await subject.run(protocol_source) assert result.state_summary.errors == [] diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py index be75fac1b4a..5470bf4bbaa 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py @@ -55,7 +55,7 @@ async def test_legacy_custom_labware(custom_labware_protocol_files: List[Path]) subject = await create_simulating_runner( robot_type="OT-2 Standard", - protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + protocol_config=protocol_source.config, ) result = await subject.run(protocol_source) diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py index 905d6c064e2..df9bad9f3d7 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py @@ -66,7 +66,7 @@ async def test_runner_with_modules_in_legacy_python( subject = await create_simulating_runner( robot_type="OT-2 Standard", - protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + protocol_config=protocol_source.config, ) result = await subject.run(protocol_source) commands_result = result.commands diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index f735c006965..02d531de33d 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -43,7 +43,7 @@ async def test_runner_with_python( python_protocol_file: Path, tempdeck_v1_def: ModuleDefinition, ) -> None: - """It should run a Python protocol on the AbstractRunner.""" + """It should run a Python protocol on the PythonAndLegacyRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[python_protocol_file], @@ -52,7 +52,7 @@ async def test_runner_with_python( subject = await create_simulating_runner( robot_type="OT-2 Standard", - protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 14)), + protocol_config=protocol_source.config, ) result = await subject.run(protocol_source) commands_result = result.commands @@ -120,7 +120,7 @@ async def test_runner_with_json(json_protocol_file: Path) -> None: ) subject = await create_simulating_runner( - robot_type="OT-2 Standard", protocol_config=JsonProtocolConfig(schema_version=6) + robot_type="OT-2 Standard", protocol_config=protocol_source.config ) result = await subject.run(protocol_source) @@ -182,7 +182,7 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N subject = await create_simulating_runner( robot_type="OT-2 Standard", - protocol_config=PythonProtocolConfig(api_version=APIVersion(2, 13)), + protocol_config=protocol_source.config, ) result = await subject.run(protocol_source) @@ -241,7 +241,7 @@ async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: ) subject = await create_simulating_runner( - robot_type="OT-2 Standard", protocol_config=JsonProtocolConfig(schema_version=5) + robot_type="OT-2 Standard", protocol_config=protocol_source.config ) result = await subject.run(protocol_source) diff --git a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py index 27f0ce212af..07bf2d0283b 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py @@ -1,4 +1,4 @@ -"""Tests for the AbstractRunner's LegacyContextPlugin.""" +"""Tests for the PythonAndLegacyRunner's LegacyCommandMapper.""" import inspect from datetime import datetime from typing import cast diff --git a/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py b/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py index 96003564813..d7bb3fc9902 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py @@ -1,4 +1,4 @@ -"""Tests for the AbstractRunner's LegacyContextPlugin.""" +"""Tests for the PythonAndLegacyRunner's LegacyContextPlugin.""" import pytest from anyio import to_thread from decoy import Decoy, matchers diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 14b5ce93f00..6443c01c842 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -1,4 +1,4 @@ -"""Tests for the AbstractRunner class.""" +"""Tests for the PythonAndLegacyRunner, JsonRunner & LiveRunner classes.""" import pytest from pytest_lazyfixture import lazy_fixture # type: ignore[import] from decoy import Decoy, matchers @@ -131,7 +131,7 @@ def legacy_subject( legacy_context_creator: LegacyContextCreator, legacy_executor: LegacyExecutor, ) -> PythonAndLegacyRunner: - """Get a JsonRunner test subject with mocked dependencies.""" + """Get a PythonAndLegacyRunner test subject with mocked dependencies.""" return PythonAndLegacyRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, diff --git a/robot-server/robot_server/protocols/protocol_analyzer.py b/robot-server/robot_server/protocols/protocol_analyzer.py index 6659ffbb7dd..cd8c44cb539 100644 --- a/robot-server/robot_server/protocols/protocol_analyzer.py +++ b/robot-server/robot_server/protocols/protocol_analyzer.py @@ -1,7 +1,7 @@ """Protocol analysis module.""" import logging -import opentrons.protocol_runner as protocol_runner +from opentrons import protocol_runner from .protocol_store import ProtocolResource from .analysis_store import AnalysisStore diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index 828d6eb3711..bed04624a0e 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -33,7 +33,7 @@ class EngineConflictError(RuntimeError): class RunnerEnginePair(NamedTuple): - """A stored AbstractRunner/ProtocolEngine pair.""" + """A stored Runner/ProtocolEngine pair.""" run_id: str runner: RunnerType diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml index d43acc831f1..8d35b1acb89 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -1,11 +1,12 @@ -test_name: Make sure empty run commands execute protocol commands queue +test_name: Make sure multiple commands can be queued and played on a basic run marks: - usefixtures: - run_server + - clean_server_state stages: - - name: Create Empty Run + - name: Create empty run request: url: '{host:s}:{port:d}/runs' json: @@ -83,7 +84,6 @@ stages: response: status_code: 200 json: - # Check that `data` is identical to the original POSTed run. data: actions: [] createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" From 433ca47253cab827478f1586ed224c9fb66a7fc0 Mon Sep 17 00:00:00 2001 From: Sanniti Date: Fri, 14 Apr 2023 07:19:36 -0400 Subject: [PATCH 32/34] fix runner smoke tests and linter errors, some renaming from PR review --- .../protocol_runner/smoke_tests/conftest.py | 6 +- .../smoke_tests/test_legacy_command_mapper.py | 3 +- .../smoke_tests/test_legacy_custom_labware.py | 3 +- .../test_legacy_module_commands.py | 6 +- .../smoke_tests/test_protocol_runner.py | 17 ++--- .../protocol_runner/test_protocol_runner.py | 70 +++++++++---------- 6 files changed, 46 insertions(+), 59 deletions(-) diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py b/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py index 2c8635f7eb4..a49625fc258 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py @@ -92,11 +92,11 @@ def python_protocol_file(tmp_path: Path) -> Path: """ # my protocol metadata = { - "apiLevel": "3.0", + "apiLevel": "2.14", } def run(ctx): - pipette = ctx.load_pipette( - pipette_name="p300_single", + pipette = ctx.load_instrument( + instrument_name="p300_single", mount="left", ) tip_rack = ctx.load_labware( diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py index 2bba8608b72..27f25092942 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py @@ -18,12 +18,11 @@ ModuleLocation, DeckPoint, ) -from opentrons.protocol_reader import ProtocolReader, PythonProtocolConfig +from opentrons.protocol_reader import ProtocolReader from opentrons.protocol_runner import create_simulating_runner from opentrons.protocol_runner.legacy_command_mapper import LegacyCommandParams from opentrons.types import MountType, DeckSlotName from opentrons_shared_data.pipette.dev_types import PipetteNameType -from opentrons.protocols.api_support.types import APIVersion async def simulate_and_get_commands(protocol_file: Path) -> List[commands.Command]: diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py index 5470bf4bbaa..32456b98af1 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py @@ -12,9 +12,8 @@ from opentrons_shared_data import load_shared_data from opentrons.types import DeckSlotName from opentrons.protocol_engine import DeckSlotLocation, LoadedLabware -from opentrons.protocol_reader import ProtocolReader, PythonProtocolConfig +from opentrons.protocol_reader import ProtocolReader from opentrons.protocol_runner import create_simulating_runner -from opentrons.protocols.api_support.types import APIVersion FIXTURE_LABWARE_DEF = load_shared_data("labware/fixtures/2/fixture_96_plate.json") diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py index df9bad9f3d7..3230cffa8aa 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py @@ -8,12 +8,8 @@ from opentrons.protocol_engine import ModuleModel, commands -from opentrons.protocol_reader import ( - ProtocolReader, - PythonProtocolConfig, -) +from opentrons.protocol_reader import ProtocolReader from opentrons.protocol_runner import create_simulating_runner -from opentrons.protocols.api_support.types import APIVersion @pytest.fixture() diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index 02d531de33d..6efdf5f5e6d 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -8,8 +8,6 @@ there, the ProtocolEngine state is inspected to check that everything was loaded and run as expected. """ -import pytest - from datetime import datetime from decoy import matchers from pathlib import Path @@ -27,18 +25,10 @@ commands, DeckPoint, ) -from opentrons.protocol_reader import ( - ProtocolReader, - JsonProtocolConfig, - PythonProtocolConfig, -) +from opentrons.protocol_reader import ProtocolReader from opentrons.protocol_runner import create_simulating_runner -from opentrons.protocols.api_support.types import APIVersion -# TODO (tz, 6-17-22): API version 3.x in-development. -# Currently parsing protocol versions less then MAX_SUPPORTED_VERSION -@pytest.mark.xfail(strict=True) async def test_runner_with_python( python_protocol_file: Path, tempdeck_v1_def: ModuleDefinition, @@ -104,7 +94,10 @@ async def test_runner_with_python( wellName="A1", ), result=commands.PickUpTipResult( - tipVolume=300.0, position=DeckPoint(x=0, y=0, z=0) + tipVolume=300.0, + tipLength=51.83, + tipDiameter=5.23, + position=DeckPoint(x=14.38, y=74.24, z=64.69), ), ) diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 6443c01c842..5d7f92e0a3d 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -105,7 +105,7 @@ def use_mock_extract_labware_definitions( @pytest.fixture -def json_subject( +def json_runner_subject( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, task_queue: TaskQueue, @@ -123,7 +123,7 @@ def json_subject( @pytest.fixture -def legacy_subject( +def legacy_python_runner_subject( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, task_queue: TaskQueue, @@ -143,7 +143,7 @@ def legacy_subject( @pytest.fixture -def live_subject( +def live_runner_subject( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, task_queue: TaskQueue, @@ -195,9 +195,9 @@ async def test_create_protocol_runner( @pytest.mark.parametrize( "subject", [ - (lazy_fixture("json_subject")), - (lazy_fixture("legacy_subject")), - (lazy_fixture("live_subject")), + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), ], ) async def test_play_starts_run( @@ -215,9 +215,9 @@ async def test_play_starts_run( @pytest.mark.parametrize( "subject", [ - (lazy_fixture("json_subject")), - (lazy_fixture("legacy_subject")), - (lazy_fixture("live_subject")), + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), ], ) async def test_pause( @@ -234,9 +234,9 @@ async def test_pause( @pytest.mark.parametrize( "subject", [ - (lazy_fixture("json_subject")), - (lazy_fixture("legacy_subject")), - (lazy_fixture("live_subject")), + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), ], ) async def test_stop( @@ -257,9 +257,9 @@ async def test_stop( @pytest.mark.parametrize( "subject", [ - (lazy_fixture("json_subject")), - (lazy_fixture("legacy_subject")), - (lazy_fixture("live_subject")), + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), ], ) async def test_stop_when_run_never_started( @@ -284,16 +284,16 @@ async def test_run_json_runner( hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - json_subject: JsonRunner, + json_runner_subject: JsonRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( False, True ) - assert json_subject.was_started() is False - await json_subject.run() - assert json_subject.was_started() is True + assert json_runner_subject.was_started() is False + await json_runner_subject.run() + assert json_runner_subject.was_started() is True decoy.verify( await hardware_api.home(), @@ -309,7 +309,7 @@ async def test_load_json_runner( json_translator: JsonTranslator, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - json_subject: JsonRunner, + json_runner_subject: JsonRunner, ) -> None: """It should load a JSON protocol file.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -351,7 +351,7 @@ async def test_load_json_runner( decoy.when(json_translator.translate_commands(json_protocol)).then_return(commands) decoy.when(json_translator.translate_liquids(json_protocol)).then_return(liquids) - await json_subject.load(json_protocol_source) + await json_runner_subject.load(json_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -386,7 +386,7 @@ async def test_load_legacy_python( legacy_executor: LegacyExecutor, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - legacy_subject: PythonAndLegacyRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based Python protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -434,7 +434,7 @@ async def test_load_legacy_python( ) ).then_return(legacy_context) - await legacy_subject.load(legacy_protocol_source) + await legacy_python_runner_subject.load(legacy_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -452,7 +452,7 @@ async def test_load_python_with_pe_papi_core( legacy_file_reader: LegacyFileReader, legacy_context_creator: LegacyContextCreator, protocol_engine: ProtocolEngine, - legacy_subject: PythonAndLegacyRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based Python protocol.""" legacy_protocol_source = ProtocolSource( @@ -493,7 +493,7 @@ async def test_load_python_with_pe_papi_core( ) ).then_return(legacy_context) - await legacy_subject.load(legacy_protocol_source) + await legacy_python_runner_subject.load(legacy_protocol_source) decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0) @@ -505,7 +505,7 @@ async def test_load_legacy_json( legacy_executor: LegacyExecutor, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - legacy_subject: PythonAndLegacyRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based JSON protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -548,7 +548,7 @@ async def test_load_legacy_json( ) ).then_return(legacy_context) - await legacy_subject.load(legacy_protocol_source) + await legacy_python_runner_subject.load(legacy_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -566,16 +566,16 @@ async def test_run_python_runner( hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - legacy_subject: PythonAndLegacyRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( False, True ) - assert legacy_subject.was_started() is False - await legacy_subject.run() - assert legacy_subject.was_started() is True + assert legacy_python_runner_subject.was_started() is False + await legacy_python_runner_subject.run() + assert legacy_python_runner_subject.was_started() is True decoy.verify( await hardware_api.home(), @@ -590,16 +590,16 @@ async def test_run_live_runner( hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - live_subject: LiveRunner, + live_runner_subject: LiveRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( False, True ) - assert live_subject.was_started() is False - await live_subject.run() - assert live_subject.was_started() is True + assert live_runner_subject.was_started() is False + await live_runner_subject.run() + assert live_runner_subject.was_started() is True decoy.verify( await hardware_api.home(), From b6e7385288a163d0d6c178ad8871fa66561bd7d9 Mon Sep 17 00:00:00 2001 From: Sanniti Date: Fri, 14 Apr 2023 07:41:17 -0400 Subject: [PATCH 33/34] update runner typing & type naming, renamed set_task_queue_wait() to prepare() --- api/src/opentrons/protocol_runner/__init__.py | 4 ++-- .../protocol_runner/protocol_runner.py | 6 +++--- .../protocol_runner/test_protocol_runner.py | 17 ++++++++--------- robot-server/robot_server/protocols/router.py | 2 +- robot-server/robot_server/runs/engine_store.py | 10 +++++----- 5 files changed, 19 insertions(+), 20 deletions(-) diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index 92aab4c567d..35360cc77d9 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -10,7 +10,7 @@ JsonRunner, PythonAndLegacyRunner, LiveRunner, - RunnerType, + AnyRunner, ) from .create_simulating_runner import create_simulating_runner @@ -22,5 +22,5 @@ "JsonRunner", "PythonAndLegacyRunner", "LiveRunner", - "RunnerType", + "AnyRunner", ] diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index ec88f2cc6db..59ed26a408e 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -269,7 +269,7 @@ def __init__( self._hardware_api = hardware_api self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def set_task_queue_wait(self) -> None: + def prepare(self) -> None: """Set the task queue to wait until all commands are executed.""" self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) @@ -287,7 +287,7 @@ async def run( # noqa: D102 return RunResult(commands=commands, state_summary=run_data) -RunnerType = Union[PythonAndLegacyRunner, JsonRunner, LiveRunner] +AnyRunner = Union[PythonAndLegacyRunner, JsonRunner, LiveRunner] def create_protocol_runner( @@ -300,7 +300,7 @@ def create_protocol_runner( legacy_file_reader: Optional[LegacyFileReader] = None, legacy_context_creator: Optional[LegacyContextCreator] = None, legacy_executor: Optional[LegacyExecutor] = None, -) -> RunnerType: +) -> AnyRunner: """Create a protocol runner.""" if protocol_config: if ( diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 5d7f92e0a3d..f932b40f42c 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -3,7 +3,7 @@ from pytest_lazyfixture import lazy_fixture # type: ignore[import] from decoy import Decoy, matchers from pathlib import Path -from typing import List, cast, Optional, Union +from typing import List, cast, Optional, Union, Type from opentrons_shared_data.protocol.dev_types import ( JsonProtocol as LegacyJsonProtocolDict, @@ -26,8 +26,7 @@ JsonRunner, PythonAndLegacyRunner, LiveRunner, - AbstractRunner, - RunnerType, + AnyRunner, ) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader @@ -176,7 +175,7 @@ async def test_create_protocol_runner( legacy_context_creator: LegacyContextCreator, legacy_executor: LegacyExecutor, config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], - runner_type: RunnerType, + runner_type: Type[AnyRunner], ) -> None: """It should return protocol runner type depending on the config.""" assert isinstance( @@ -188,7 +187,7 @@ async def test_create_protocol_runner( json_file_reader=json_file_reader, json_translator=json_translator, ), - runner_type, # type: ignore[arg-type] + runner_type, ) @@ -204,7 +203,7 @@ async def test_play_starts_run( decoy: Decoy, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: AbstractRunner, + subject: AnyRunner, ) -> None: """It should start a protocol run with play.""" subject.play() @@ -223,7 +222,7 @@ async def test_play_starts_run( async def test_pause( decoy: Decoy, protocol_engine: ProtocolEngine, - subject: AbstractRunner, + subject: AnyRunner, ) -> None: """It should pause a protocol run with pause.""" subject.pause() @@ -243,7 +242,7 @@ async def test_stop( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: AbstractRunner, + subject: AnyRunner, ) -> None: """It should halt a protocol run with stop.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) @@ -266,7 +265,7 @@ async def test_stop_when_run_never_started( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: AbstractRunner, + subject: AnyRunner, ) -> None: """It should clean up rather than halt if the runner was never started.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index da172746218..6989c42619c 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -46,8 +46,8 @@ get_protocol_reader, get_protocol_store, get_analysis_store, - get_protocol_directory, get_protocol_analyzer, + get_protocol_directory, get_file_reader_writer, get_file_hasher, ) diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index bed04624a0e..0d397f46045 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -6,7 +6,7 @@ from opentrons.config import feature_flags from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_runner import ( - RunnerType, + AnyRunner, JsonRunner, PythonAndLegacyRunner, RunResult, @@ -36,7 +36,7 @@ class RunnerEnginePair(NamedTuple): """A stored Runner/ProtocolEngine pair.""" run_id: str - runner: RunnerType + runner: AnyRunner engine: ProtocolEngine @@ -67,7 +67,7 @@ def engine(self) -> ProtocolEngine: return self._runner_engine_pair.engine @property - def runner(self) -> RunnerType: + def runner(self) -> AnyRunner: """Get the "current" persisted ProtocolRunner.""" assert self._runner_engine_pair is not None, "Runner not yet created." return self._runner_engine_pair.runner @@ -155,8 +155,8 @@ async def create( protocol is not None ), "A Python or JSON protocol should have a protocol source file." await runner.load(protocol.source) - else: # Why does linter not like `else:`..? - runner.set_task_queue_wait() + else: + runner.prepare() for offset in labware_offsets: engine.add_labware_offset(offset) From 5789c41fc9c2f1d0e837497fc9f28d2926f29ac2 Mon Sep 17 00:00:00 2001 From: Sanniti Date: Fri, 14 Apr 2023 16:00:14 -0400 Subject: [PATCH 34/34] address all remaining review comments --- .../protocol_runner/protocol_runner.py | 1 + .../smoke_tests/test_protocol_runner.py | 1 - .../test_pause_run_not_started.tavern.yaml | 28 +------------ .../http_api/runs/test_persistence.py | 20 +++------- ...t_run_queued_protocol_commands.tavern.yaml | 3 -- robot-server/tests/runs/test_engine_store.py | 39 ++++++++++++++----- 6 files changed, 39 insertions(+), 53 deletions(-) diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 59ed26a408e..25d184b68fa 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -277,6 +277,7 @@ async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, ) -> RunResult: + assert protocol_source is None await self._hardware_api.home() self.play() self._task_queue.start() diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index 6efdf5f5e6d..459361a5972 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -13,7 +13,6 @@ from pathlib import Path from opentrons_shared_data.pipette.dev_types import PipetteNameType - from opentrons.types import MountType, DeckSlotName from opentrons.protocol_engine import ( DeckSlotLocation, diff --git a/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml index 9162ff6b212..ebd9301e8f5 100644 --- a/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml @@ -5,24 +5,11 @@ marks: - run_server stages: - - name: Upload a protocol - request: - url: '{host:s}:{port:d}/protocols' - method: POST - files: - files: 'tests/integration/protocols/load_one_labware.py' - response: - status_code: 201 - save: - json: - protocol_id: data.id - - - name: Create a run + - name: Create empty run request: url: '{host:s}:{port:d}/runs' json: - data: - protocolId: '{protocol_id}' + data: {} method: POST response: strict: @@ -36,7 +23,6 @@ stages: save: json: run_id: data.id - - name: Issue pause to a none active run request: url: '{host:s}:{port:d}/runs/{run_id}/actions' @@ -51,13 +37,3 @@ stages: - id: 'RunActionNotAllowed' title: 'Run Action Not Allowed' detail: 'Cannot pause a run that is not running.' - - - name: Stop the run - request: - url: '{host:s}:{port:d}/runs/{run_id}/actions' - method: POST - json: - data: - actionType: stop - response: - status_code: 201 diff --git a/robot-server/tests/integration/http_api/runs/test_persistence.py b/robot-server/tests/integration/http_api/runs/test_persistence.py index 3f008f43674..1d0e6b71ecc 100644 --- a/robot-server/tests/integration/http_api/runs/test_persistence.py +++ b/robot-server/tests/integration/http_api/runs/test_persistence.py @@ -1,6 +1,5 @@ from typing import Any, AsyncGenerator, Dict, NamedTuple, cast from datetime import datetime -from pathlib import Path import anyio import pytest @@ -102,14 +101,12 @@ async def test_runs_persist_via_actions_router( """Test that runs commands and state are persisted when calling play action through dev server restart.""" client, server = client_and_server - await client.post_protocol([Path("./tests/integration/protocols/simple.py")]) - - protocols = (await client.get_protocols()).json()["data"] - protocol_id = protocols[0]["id"] + # await client.post_protocol([Path("./tests/integration/protocols/simple.py")]) + # + # protocols = (await client.get_protocols()).json()["data"] + # protocol_id = protocols[0]["id"] # create a run - create_run_response = await client.post_run( - req_body={"data": {"protocolId": protocol_id}} - ) + create_run_response = await client.post_run(req_body={"data": {}}) run_id = create_run_response.json()["data"]["id"] # persist the run by hitting the actions router @@ -232,14 +229,9 @@ async def test_runs_completed_started_at_persist_via_actions_router( ) -> None: """Test that startedAt and completedAt are be persisted via play action.""" client, server = client_and_server - await client.post_protocol([Path("./tests/integration/protocols/simple.py")]) - - protocols = (await client.get_protocols()).json()["data"] # create a run - create_run_response = await client.post_run( - req_body={"data": {"protocolId": protocols[0]["id"]}} - ) + create_run_response = await client.post_run(req_body={"data": {}}) run_id = create_run_response.json()["data"]["id"] # persist the run by hitting the actions router diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml index 8d35b1acb89..c1295d2c66b 100644 --- a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -190,6 +190,3 @@ stages: createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" - save: - json: - load_labware_command_id: data[0].id diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index 527ede8f370..b1ac93a9085 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -13,6 +13,7 @@ from opentrons.protocol_runner import ( RunResult, LiveRunner, + JsonRunner, ) from opentrons.protocol_reader import ProtocolReader, ProtocolSource @@ -60,6 +61,34 @@ async def test_create_engine(subject: EngineStore) -> None: assert isinstance(subject.engine, ProtocolEngine) +async def test_create_engine_with_protocol( + decoy: Decoy, + subject: EngineStore, + json_protocol_source: ProtocolSource, +) -> None: + """It should create an engine for a run with protocol. + + Tests only basic engine & runner creation with creation result. + Loading of protocols/ live run commands is tested in integration test. + """ + protocol = ProtocolResource( + protocol_id="my cool protocol", + protocol_key=None, + created_at=datetime(year=2021, month=1, day=1), + source=json_protocol_source, + ) + + result = await subject.create( + run_id="run-id", + labware_offsets=[], + protocol=protocol, + ) + assert subject.current_run_id == "run-id" + assert isinstance(result, StateSummary) + assert isinstance(subject.runner, JsonRunner) + assert isinstance(subject.engine, ProtocolEngine) + + @pytest.mark.parametrize("robot_type", ["OT-2 Standard", "OT-3 Standard"]) async def test_create_engine_uses_robot_type( decoy: Decoy, robot_type: RobotType @@ -130,15 +159,7 @@ async def test_clear_engine_not_stopped_or_idle( subject: EngineStore, json_protocol_source: ProtocolSource ) -> None: """It should raise a conflict if the engine is not stopped.""" - protocol_resource = ProtocolResource( - protocol_id="123", - created_at=datetime.now(), - source=json_protocol_source, - protocol_key=None, - ) - await subject.create( - run_id="run-id", labware_offsets=[], protocol=protocol_resource - ) + await subject.create(run_id="run-id", labware_offsets=[], protocol=None) subject.runner.play() with pytest.raises(EngineConflictError):