diff --git a/api/src/opentrons/cli/analyze.py b/api/src/opentrons/cli/analyze.py index 0b7e6776764..931025e15d7 100644 --- a/api/src/opentrons/cli/analyze.py +++ b/api/src/opentrons/cli/analyze.py @@ -76,7 +76,9 @@ async def _analyze( except ProtocolFilesInvalidError as error: raise click.ClickException(str(error)) - runner = await create_simulating_runner(robot_type=protocol_source.robot_type) + runner = await create_simulating_runner( + robot_type=protocol_source.robot_type, protocol_config=protocol_source.config + ) analysis = await runner.run(protocol_source) if json_output: diff --git a/api/src/opentrons/protocol_runner/__init__.py b/api/src/opentrons/protocol_runner/__init__.py index a86c10b84ba..35360cc77d9 100644 --- a/api/src/opentrons/protocol_runner/__init__.py +++ b/api/src/opentrons/protocol_runner/__init__.py @@ -1,13 +1,26 @@ """Protocol run control and management. -The main export of this module is the ProtocolRunner class. See +The main export of this module is the AbstractRunner class. See protocol_runner.py for more details. """ -from .protocol_runner import ProtocolRunner, ProtocolRunResult +from .protocol_runner import ( + AbstractRunner, + RunResult, + create_protocol_runner, + JsonRunner, + PythonAndLegacyRunner, + LiveRunner, + AnyRunner, +) from .create_simulating_runner import create_simulating_runner __all__ = [ - "ProtocolRunner", - "ProtocolRunResult", + "AbstractRunner", + "RunResult", "create_simulating_runner", + "create_protocol_runner", + "JsonRunner", + "PythonAndLegacyRunner", + "LiveRunner", + "AnyRunner", ] diff --git a/api/src/opentrons/protocol_runner/create_simulating_runner.py b/api/src/opentrons/protocol_runner/create_simulating_runner.py index 9e9061c73fb..0760845933a 100644 --- a/api/src/opentrons/protocol_runner/create_simulating_runner.py +++ b/api/src/opentrons/protocol_runner/create_simulating_runner.py @@ -1,4 +1,4 @@ -"""Simulating ProtocolRunner factory.""" +"""Simulating AbstractRunner factory.""" from opentrons.config import feature_flags from opentrons.hardware_control import API as OT2API, HardwareControlAPI @@ -6,15 +6,18 @@ Config as ProtocolEngineConfig, create_protocol_engine, ) +from opentrons.protocol_reader.protocol_source import ProtocolConfig from opentrons_shared_data.robot.dev_types import RobotType from .legacy_wrappers import LegacySimulatingContextCreator -from .protocol_runner import ProtocolRunner +from .protocol_runner import create_protocol_runner, AbstractRunner -async def create_simulating_runner(robot_type: RobotType) -> ProtocolRunner: - """Create a ProtocolRunner wired to a simulating HardwareControlAPI. +async def create_simulating_runner( + robot_type: RobotType, protocol_config: ProtocolConfig +) -> AbstractRunner: + """Create a AbstractRunner wired to a simulating HardwareControlAPI. Example: ```python @@ -24,7 +27,7 @@ async def create_simulating_runner(robot_type: RobotType) -> ProtocolRunner: from opentrons.protocol_runner import ( ProtocolType, ProtocolFile, - ProtocolRunner, + AbstractRunner, create_simulating_runner, ) @@ -32,7 +35,7 @@ async def create_simulating_runner(robot_type: RobotType) -> ProtocolRunner: protocol_type=ProtocolType.PYTHON, files=[Path("/path/to/protocol.py")], ) - runner: ProtocolRunner = await create_simulating_runner() + runner: AbstractRunner = await create_simulating_runner() commands: List[Command] = await runner.run(protocol) ``` """ @@ -62,7 +65,8 @@ async def create_simulating_runner(robot_type: RobotType) -> ProtocolRunner: protocol_engine=protocol_engine, ) - return ProtocolRunner( + return create_protocol_runner( + protocol_config=protocol_config, protocol_engine=protocol_engine, hardware_api=simulating_hardware_api, legacy_context_creator=simulating_legacy_context_creator, diff --git a/api/src/opentrons/protocol_runner/legacy_wrappers.py b/api/src/opentrons/protocol_runner/legacy_wrappers.py index b65471a5b46..85df1380c1d 100644 --- a/api/src/opentrons/protocol_runner/legacy_wrappers.py +++ b/api/src/opentrons/protocol_runner/legacy_wrappers.py @@ -96,6 +96,8 @@ def read( ) +# TODO (spp, 2023-04-05): Remove 'legacy' wording since this is the context we are using +# for all python protocols. class LegacyContextCreator: """Interface to construct Protocol API v2 contexts.""" diff --git a/api/src/opentrons/protocol_runner/protocol_runner.py b/api/src/opentrons/protocol_runner/protocol_runner.py index 4f2e13dcd74..25d184b68fa 100644 --- a/api/src/opentrons/protocol_runner/protocol_runner.py +++ b/api/src/opentrons/protocol_runner/protocol_runner.py @@ -1,16 +1,20 @@ """Protocol run control and management.""" import asyncio -from typing import Iterable, List, NamedTuple, Optional +from typing import List, NamedTuple, Optional, Union -import anyio +from abc import ABC, abstractmethod -from opentrons_shared_data.labware.labware_definition import LabwareDefinition +import anyio from opentrons.broker import Broker from opentrons.equipment_broker import EquipmentBroker from opentrons.hardware_control import HardwareControlAPI from opentrons import protocol_reader -from opentrons.protocol_reader import ProtocolSource, JsonProtocolConfig +from opentrons.protocol_reader import ( + ProtocolSource, + JsonProtocolConfig, + PythonProtocolConfig, +) from opentrons.protocol_engine import ProtocolEngine, StateSummary, Command from .task_queue import TaskQueue @@ -27,43 +31,76 @@ ) -class ProtocolRunResult(NamedTuple): +class RunResult(NamedTuple): """Result data from a run, pulled from the ProtocolEngine.""" commands: List[Command] state_summary: StateSummary -# TODO(mc, 2022-01-11): this class has become bloated. Split into an abstract -# interfaces and several concrete implementations per protocol type -class ProtocolRunner: +class AbstractRunner(ABC): """An interface to manage and control a protocol run. - The ProtocolRunner is primarily responsible for feeding a ProtocolEngine + A Runner is primarily responsible for feeding a ProtocolEngine with commands and control signals. These commands and signals are generated by protocol files, hardware signals, or externally via the HTTP robot-server. - A ProtocolRunner controls a single run. Once the run is finished, - you will need a new ProtocolRunner to do another run. + A Runner controls a single run. Once the run is finished, + you will need a new Runner to do another run. """ + def __init__(self, protocol_engine: ProtocolEngine) -> None: + self._protocol_engine = protocol_engine + + def was_started(self) -> bool: + """Whether the run has been started. + + This value is latched; once it is True, it will never become False. + """ + return self._protocol_engine.state_view.commands.has_been_played() + + def play(self) -> None: + """Start or resume the run.""" + self._protocol_engine.play() + + def pause(self) -> None: + """Pause the run.""" + self._protocol_engine.pause() + + async def stop(self) -> None: + """Stop (cancel) the run.""" + if self.was_started(): + await self._protocol_engine.stop() + else: + await self._protocol_engine.finish( + drop_tips_and_home=False, + set_run_status=False, + ) + + @abstractmethod + async def run( + self, + protocol_source: Optional[ProtocolSource] = None, + ) -> RunResult: + """Run a given protocol to completion.""" + + +class PythonAndLegacyRunner(AbstractRunner): + """Protocol runner implementation for Python protocols, and JSON protocols ≤v5.""" + def __init__( self, protocol_engine: ProtocolEngine, hardware_api: HardwareControlAPI, task_queue: Optional[TaskQueue] = None, - json_file_reader: Optional[JsonFileReader] = None, - json_translator: Optional[JsonTranslator] = None, legacy_file_reader: Optional[LegacyFileReader] = None, legacy_context_creator: Optional[LegacyContextCreator] = None, legacy_executor: Optional[LegacyExecutor] = None, ) -> None: - """Initialize the ProtocolRunner with its dependencies.""" - self._protocol_engine = protocol_engine + """Initialize the PythonAndLegacyRunner with its dependencies.""" + super().__init__(protocol_engine) self._hardware_api = hardware_api - self._json_file_reader = json_file_reader or JsonFileReader() - self._json_translator = json_translator or JsonTranslator() self._legacy_file_reader = legacy_file_reader or LegacyFileReader() self._legacy_context_creator = legacy_context_creator or LegacyContextCreator( hardware_api=hardware_api, @@ -74,21 +111,8 @@ def __init__( # of runner interface self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - def was_started(self) -> bool: - """Whether the runner has been started. - - This value is latched; once it is True, it will never become False. - """ - return self._protocol_engine.state_view.commands.has_been_played() - async def load(self, protocol_source: ProtocolSource) -> None: - """Load a ProtocolSource into managed ProtocolEngine. - - Calling this method is only necessary if the runner will be used - to control the run of a file-based protocol. - """ - config = protocol_source.config - + """Load a Python or JSONv5(& older) ProtocolSource into managed ProtocolEngine.""" labware_definitions = await protocol_reader.extract_labware_definitions( protocol_source=protocol_source ) @@ -97,41 +121,40 @@ async def load(self, protocol_source: ProtocolSource) -> None: # definitions, so we don't need to yield here. self._protocol_engine.add_labware_definition(definition) - if ( - isinstance(config, JsonProtocolConfig) - and config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF - ): - await self._load_json(protocol_source) - else: - self._load_python_or_legacy_json(protocol_source, labware_definitions) - - def play(self) -> None: - """Start or resume the run.""" - self._protocol_engine.play() + # fixme(mm, 2022-12-23): This does I/O and compute-bound parsing that will block + # the event loop. Jira RSS-165. + protocol = self._legacy_file_reader.read(protocol_source, labware_definitions) + broker = None + equipment_broker = None - def pause(self) -> None: - """Pause the run.""" - self._protocol_engine.pause() + if protocol.api_level < LEGACY_PYTHON_API_VERSION_CUTOFF: + broker = Broker() + equipment_broker = EquipmentBroker[LegacyLoadInfo]() - async def stop(self) -> None: - """Stop (cancel) the run.""" - if self.was_started(): - await self._protocol_engine.stop() - else: - await self._protocol_engine.finish( - drop_tips_and_home=False, - set_run_status=False, + self._protocol_engine.add_plugin( + LegacyContextPlugin(broker=broker, equipment_broker=equipment_broker) ) - async def run( + context = self._legacy_context_creator.create( + protocol=protocol, + broker=broker, + equipment_broker=equipment_broker, + ) + + self._task_queue.set_run_func( + func=self._legacy_executor.execute, + protocol=protocol, + context=context, + ) + + async def run( # noqa: D102 self, protocol_source: Optional[ProtocolSource] = None, - ) -> ProtocolRunResult: - """Run a given protocol to completion.""" + ) -> RunResult: # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` # currently `protocol_source` arg is only used by tests if protocol_source: - await self.load(protocol_source) + await self.load(protocol_source=protocol_source) await self._hardware_api.home() self.play() @@ -140,9 +163,40 @@ async def run( run_data = self._protocol_engine.state_view.get_summary() commands = self._protocol_engine.state_view.commands.get_all() - return ProtocolRunResult(commands=commands, state_summary=run_data) + return RunResult(commands=commands, state_summary=run_data) + + +class JsonRunner(AbstractRunner): + """Protocol runner implementation for json protocols.""" + + def __init__( + self, + protocol_engine: ProtocolEngine, + hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, + json_file_reader: Optional[JsonFileReader] = None, + json_translator: Optional[JsonTranslator] = None, + ) -> None: + """Initialize the JsonRunner with its dependencies.""" + super().__init__(protocol_engine) + self._protocol_engine = protocol_engine + self._hardware_api = hardware_api + self._json_file_reader = json_file_reader or JsonFileReader() + self._json_translator = json_translator or JsonTranslator() + # TODO(mc, 2022-01-11): replace task queue with specific implementations + # of runner interface + self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) + + async def load(self, protocol_source: ProtocolSource) -> None: + """Load a JSONv6+ ProtocolSource into managed ProtocolEngine.""" + labware_definitions = await protocol_reader.extract_labware_definitions( + protocol_source=protocol_source + ) + for definition in labware_definitions: + # Assume adding a labware definition is fast and there are not many labware + # definitions, so we don't need to yield here. + self._protocol_engine.add_labware_definition(definition) - async def _load_json(self, protocol_source: ProtocolSource) -> None: protocol = await anyio.to_thread.run_sync( self._json_file_reader.read, protocol_source, @@ -179,36 +233,103 @@ async def _load_json(self, protocol_source: ProtocolSource) -> None: self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - def _load_python_or_legacy_json( + async def run( # noqa: D102 self, - protocol_source: ProtocolSource, - labware_definitions: Iterable[LabwareDefinition], + protocol_source: Optional[ProtocolSource] = None, + ) -> RunResult: + # TODO(mc, 2022-01-11): move load to runner creation, remove from `run` + # currently `protocol_source` arg is only used by tests + if protocol_source: + await self.load(protocol_source) + + await self._hardware_api.home() + self.play() + self._task_queue.start() + await self._task_queue.join() + + run_data = self._protocol_engine.state_view.get_summary() + commands = self._protocol_engine.state_view.commands.get_all() + return RunResult(commands=commands, state_summary=run_data) + + +class LiveRunner(AbstractRunner): + """Protocol runner implementation for live http protocols.""" + + def __init__( + self, + protocol_engine: ProtocolEngine, + hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, ) -> None: - # fixme(mm, 2022-12-23): This does I/O and compute-bound parsing that will block - # the event loop. Jira RSS-165. - protocol = self._legacy_file_reader.read(protocol_source, labware_definitions) - broker = None - equipment_broker = None + """Initialize the LiveRunner with its dependencies.""" + super().__init__(protocol_engine) + self._protocol_engine = protocol_engine + # TODO(mc, 2022-01-11): replace task queue with specific implementations + # of runner interface + self._hardware_api = hardware_api + self._task_queue = task_queue or TaskQueue(cleanup_func=protocol_engine.finish) - if protocol.api_level < LEGACY_PYTHON_API_VERSION_CUTOFF: - broker = Broker() - equipment_broker = EquipmentBroker[LegacyLoadInfo]() + def prepare(self) -> None: + """Set the task queue to wait until all commands are executed.""" + self._task_queue.set_run_func(func=self._protocol_engine.wait_until_complete) - self._protocol_engine.add_plugin( - LegacyContextPlugin(broker=broker, equipment_broker=equipment_broker) - ) + async def run( # noqa: D102 + self, + protocol_source: Optional[ProtocolSource] = None, + ) -> RunResult: + assert protocol_source is None + await self._hardware_api.home() + self.play() + self._task_queue.start() + await self._task_queue.join() - context = self._legacy_context_creator.create( - protocol=protocol, - broker=broker, - equipment_broker=equipment_broker, - ) + run_data = self._protocol_engine.state_view.get_summary() + commands = self._protocol_engine.state_view.commands.get_all() + return RunResult(commands=commands, state_summary=run_data) - self._task_queue.set_run_func( - func=self._legacy_executor.execute, - protocol=protocol, - context=context, - ) + +AnyRunner = Union[PythonAndLegacyRunner, JsonRunner, LiveRunner] + + +def create_protocol_runner( + protocol_config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], + protocol_engine: ProtocolEngine, + hardware_api: HardwareControlAPI, + task_queue: Optional[TaskQueue] = None, + json_file_reader: Optional[JsonFileReader] = None, + json_translator: Optional[JsonTranslator] = None, + legacy_file_reader: Optional[LegacyFileReader] = None, + legacy_context_creator: Optional[LegacyContextCreator] = None, + legacy_executor: Optional[LegacyExecutor] = None, +) -> AnyRunner: + """Create a protocol runner.""" + if protocol_config: + if ( + isinstance(protocol_config, JsonProtocolConfig) + and protocol_config.schema_version >= LEGACY_JSON_SCHEMA_VERSION_CUTOFF + ): + return JsonRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + json_file_reader=json_file_reader, + json_translator=json_translator, + task_queue=task_queue, + ) + else: + return PythonAndLegacyRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + legacy_file_reader=legacy_file_reader, + legacy_context_creator=legacy_context_creator, + legacy_executor=legacy_executor, + ) + + return LiveRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + ) async def _yield() -> None: diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py b/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py index 2c8635f7eb4..a49625fc258 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/conftest.py @@ -92,11 +92,11 @@ def python_protocol_file(tmp_path: Path) -> Path: """ # my protocol metadata = { - "apiLevel": "3.0", + "apiLevel": "2.14", } def run(ctx): - pipette = ctx.load_pipette( - pipette_name="p300_single", + pipette = ctx.load_instrument( + instrument_name="p300_single", mount="left", ) tip_rack = ctx.load_labware( diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py index 56298e2fe3c..27f25092942 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py @@ -32,7 +32,10 @@ async def simulate_and_get_commands(protocol_file: Path) -> List[commands.Comman files=[protocol_file], directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=protocol_source.config, + ) result = await subject.run(protocol_source) assert result.state_summary.errors == [] assert result.state_summary.status == EngineStatus.SUCCEEDED diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py index 613c336c047..32456b98af1 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py @@ -52,7 +52,10 @@ async def test_legacy_custom_labware(custom_labware_protocol_files: List[Path]) directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=protocol_source.config, + ) result = await subject.run(protocol_source) expected_labware = LoadedLabware.construct( diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py index e2b51095e7c..3230cffa8aa 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py @@ -60,7 +60,10 @@ async def test_runner_with_modules_in_legacy_python( directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=protocol_source.config, + ) result = await subject.run(protocol_source) commands_result = result.commands diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index ca5528c3fec..459361a5972 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -1,6 +1,6 @@ -"""Smoke tests for the ProtocolRunner and ProtocolEngine classes. +"""Smoke tests for the AbstractRunner and ProtocolEngine classes. -These tests construct a ProtocolRunner with a real ProtocolEngine +These tests construct a AbstractRunner with a real ProtocolEngine hooked to a simulating HardwareAPI. Minimal, but valid and complete, protocol files are then loaded from @@ -8,14 +8,11 @@ there, the ProtocolEngine state is inspected to check that everything was loaded and run as expected. """ -import pytest - from datetime import datetime from decoy import matchers from pathlib import Path from opentrons_shared_data.pipette.dev_types import PipetteNameType - from opentrons.types import MountType, DeckSlotName from opentrons.protocol_engine import ( DeckSlotLocation, @@ -31,21 +28,21 @@ from opentrons.protocol_runner import create_simulating_runner -# TODO (tz, 6-17-22): API version 3.x in-development. -# Currently parsing protocol versions less then MAX_SUPPORTED_VERSION -@pytest.mark.xfail(strict=True) async def test_runner_with_python( python_protocol_file: Path, tempdeck_v1_def: ModuleDefinition, ) -> None: - """It should run a Python protocol on the ProtocolRunner.""" + """It should run a Python protocol on the PythonAndLegacyRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[python_protocol_file], directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=protocol_source.config, + ) result = await subject.run(protocol_source) commands_result = result.commands pipettes_result = result.state_summary.pipettes @@ -96,7 +93,10 @@ async def test_runner_with_python( wellName="A1", ), result=commands.PickUpTipResult( - tipVolume=300.0, position=DeckPoint(x=0, y=0, z=0) + tipVolume=300.0, + tipLength=51.83, + tipDiameter=5.23, + position=DeckPoint(x=14.38, y=74.24, z=64.69), ), ) @@ -104,14 +104,16 @@ async def test_runner_with_python( async def test_runner_with_json(json_protocol_file: Path) -> None: - """It should run a JSON protocol on the ProtocolRunner.""" + """It should run a JSON protocol on the JsonRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[json_protocol_file], directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", protocol_config=protocol_source.config + ) result = await subject.run(protocol_source) commands_result = result.commands @@ -163,14 +165,17 @@ async def test_runner_with_json(json_protocol_file: Path) -> None: async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> None: - """It should run a Python protocol on the ProtocolRunner.""" + """It should run a Python protocol on the PythonAndLegacyRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[legacy_python_protocol_file], directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", + protocol_config=protocol_source.config, + ) result = await subject.run(protocol_source) commands_result = result.commands @@ -220,14 +225,16 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: - """It should run a Python protocol on the ProtocolRunner.""" + """It should run a Python protocol on the PythonAndLegacyRunner.""" protocol_reader = ProtocolReader() protocol_source = await protocol_reader.read_saved( files=[legacy_json_protocol_file], directory=None, ) - subject = await create_simulating_runner(robot_type="OT-2 Standard") + subject = await create_simulating_runner( + robot_type="OT-2 Standard", protocol_config=protocol_source.config + ) result = await subject.run(protocol_source) commands_result = result.commands diff --git a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py index 531ff4a8401..07bf2d0283b 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py @@ -1,4 +1,4 @@ -"""Tests for the ProtocolRunner's LegacyContextPlugin.""" +"""Tests for the PythonAndLegacyRunner's LegacyCommandMapper.""" import inspect from datetime import datetime from typing import cast diff --git a/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py b/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py index fb2c475fb93..d7bb3fc9902 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_context_plugin.py @@ -1,4 +1,4 @@ -"""Tests for the ProtocolRunner's LegacyContextPlugin.""" +"""Tests for the PythonAndLegacyRunner's LegacyContextPlugin.""" import pytest from anyio import to_thread from decoy import Decoy, matchers diff --git a/api/tests/opentrons/protocol_runner/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/test_protocol_runner.py index 43e1bbbc14e..f932b40f42c 100644 --- a/api/tests/opentrons/protocol_runner/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/test_protocol_runner.py @@ -1,8 +1,9 @@ -"""Tests for the ProtocolRunner class.""" +"""Tests for the PythonAndLegacyRunner, JsonRunner & LiveRunner classes.""" import pytest +from pytest_lazyfixture import lazy_fixture # type: ignore[import] from decoy import Decoy, matchers from pathlib import Path -from typing import List, cast +from typing import List, cast, Optional, Union, Type from opentrons_shared_data.protocol.dev_types import ( JsonProtocol as LegacyJsonProtocolDict, @@ -20,7 +21,13 @@ JsonProtocolConfig, PythonProtocolConfig, ) -from opentrons.protocol_runner import ProtocolRunner +from opentrons.protocol_runner import ( + create_protocol_runner, + JsonRunner, + PythonAndLegacyRunner, + LiveRunner, + AnyRunner, +) from opentrons.protocol_runner.task_queue import TaskQueue from opentrons.protocol_runner.json_file_reader import JsonFileReader from opentrons.protocol_runner.json_translator import JsonTranslator @@ -97,34 +104,106 @@ def use_mock_extract_labware_definitions( @pytest.fixture -def subject( +def json_runner_subject( protocol_engine: ProtocolEngine, hardware_api: HardwareAPI, task_queue: TaskQueue, json_file_reader: JsonFileReader, json_translator: JsonTranslator, +) -> JsonRunner: + """Get a JsonRunner test subject with mocked dependencies.""" + return JsonRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator, + ) + + +@pytest.fixture +def legacy_python_runner_subject( + protocol_engine: ProtocolEngine, + hardware_api: HardwareAPI, + task_queue: TaskQueue, legacy_file_reader: LegacyFileReader, legacy_context_creator: LegacyContextCreator, legacy_executor: LegacyExecutor, -) -> ProtocolRunner: - """Get a ProtocolRunner test subject with mocked dependencies.""" - return ProtocolRunner( +) -> PythonAndLegacyRunner: + """Get a PythonAndLegacyRunner test subject with mocked dependencies.""" + return PythonAndLegacyRunner( protocol_engine=protocol_engine, hardware_api=hardware_api, task_queue=task_queue, - json_file_reader=json_file_reader, - json_translator=json_translator, legacy_file_reader=legacy_file_reader, legacy_context_creator=legacy_context_creator, legacy_executor=legacy_executor, ) +@pytest.fixture +def live_runner_subject( + protocol_engine: ProtocolEngine, + hardware_api: HardwareAPI, + task_queue: TaskQueue, +) -> LiveRunner: + """Get a LiveRunner test subject with mocked dependencies.""" + return LiveRunner( + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + ) + + +@pytest.mark.parametrize( + "config, runner_type", + [ + (JsonProtocolConfig(schema_version=6), JsonRunner), + (PythonProtocolConfig(api_version=APIVersion(2, 14)), PythonAndLegacyRunner), + (JsonProtocolConfig(schema_version=5), PythonAndLegacyRunner), + (PythonProtocolConfig(api_version=APIVersion(2, 13)), PythonAndLegacyRunner), + (None, LiveRunner), + ], +) +async def test_create_protocol_runner( + protocol_engine: ProtocolEngine, + hardware_api: HardwareAPI, + task_queue: TaskQueue, + json_file_reader: JsonFileReader, + json_translator: JsonTranslator, + legacy_file_reader: LegacyFileReader, + legacy_context_creator: LegacyContextCreator, + legacy_executor: LegacyExecutor, + config: Optional[Union[JsonProtocolConfig, PythonProtocolConfig]], + runner_type: Type[AnyRunner], +) -> None: + """It should return protocol runner type depending on the config.""" + assert isinstance( + create_protocol_runner( + protocol_config=config, + protocol_engine=protocol_engine, + hardware_api=hardware_api, + task_queue=task_queue, + json_file_reader=json_file_reader, + json_translator=json_translator, + ), + runner_type, + ) + + +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), + ], +) async def test_play_starts_run( decoy: Decoy, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + subject: AnyRunner, ) -> None: """It should start a protocol run with play.""" subject.play() @@ -132,10 +211,18 @@ async def test_play_starts_run( decoy.verify(protocol_engine.play(), times=1) +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), + ], +) async def test_pause( decoy: Decoy, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + subject: AnyRunner, ) -> None: """It should pause a protocol run with pause.""" subject.pause() @@ -143,11 +230,19 @@ async def test_pause( decoy.verify(protocol_engine.pause(), times=1) +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), + ], +) async def test_stop( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + subject: AnyRunner, ) -> None: """It should halt a protocol run with stop.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(True) @@ -158,11 +253,19 @@ async def test_stop( decoy.verify(await protocol_engine.stop(), times=1) -async def test_stop_never_started( +@pytest.mark.parametrize( + "subject", + [ + (lazy_fixture("json_runner_subject")), + (lazy_fixture("legacy_python_runner_subject")), + (lazy_fixture("live_runner_subject")), + ], +) +async def test_stop_when_run_never_started( decoy: Decoy, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + subject: AnyRunner, ) -> None: """It should clean up rather than halt if the runner was never started.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return(False) @@ -175,21 +278,21 @@ async def test_stop_never_started( ) -async def test_run( +async def test_run_json_runner( decoy: Decoy, hardware_api: HardwareAPI, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + json_runner_subject: JsonRunner, ) -> None: """It should run a protocol to completion.""" decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( False, True ) - assert subject.was_started() is False - await subject.run() - assert subject.was_started() is True + assert json_runner_subject.was_started() is False + await json_runner_subject.run() + assert json_runner_subject.was_started() is True decoy.verify( await hardware_api.home(), @@ -199,13 +302,13 @@ async def test_run( ) -async def test_load_json( +async def test_load_json_runner( decoy: Decoy, json_file_reader: JsonFileReader, json_translator: JsonTranslator, protocol_engine: ProtocolEngine, task_queue: TaskQueue, - subject: ProtocolRunner, + json_runner_subject: JsonRunner, ) -> None: """It should load a JSON protocol file.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -237,7 +340,7 @@ async def test_load_json( ] liquids: List[Liquid] = [ - Liquid(id="water-id", displayName="water", description=" water desc") + Liquid(id="water-id", displayName="water", description="water desc") ] decoy.when( @@ -247,10 +350,13 @@ async def test_load_json( decoy.when(json_translator.translate_commands(json_protocol)).then_return(commands) decoy.when(json_translator.translate_liquids(json_protocol)).then_return(liquids) - await subject.load(json_protocol_source) + await json_runner_subject.load(json_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), + protocol_engine.add_liquid( + id="water-id", name="water", description="water desc", color=None + ), protocol_engine.add_command( request=pe_commands.WaitForResumeCreate( params=pe_commands.WaitForResumeParams(message="hello") @@ -272,66 +378,6 @@ async def test_load_json( ) -async def test_load_json_liquids_ff_on( - decoy: Decoy, - json_file_reader: JsonFileReader, - json_translator: JsonTranslator, - protocol_engine: ProtocolEngine, - task_queue: TaskQueue, - subject: ProtocolRunner, -) -> None: - """It should load a JSON protocol file.""" - labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] - - json_protocol_source = ProtocolSource( - directory=Path("/dev/null"), - main_file=Path("/dev/null/abc.json"), - files=[], - metadata={}, - robot_type="OT-2 Standard", - config=JsonProtocolConfig(schema_version=6), - content_hash="abc123", - ) - - json_protocol = ProtocolSchemaV6.construct() # type: ignore[call-arg] - - commands: List[pe_commands.CommandCreate] = [ - pe_commands.LoadLiquidCreate( - params=pe_commands.LoadLiquidParams( - liquidId="water-id", labwareId="labware-id", volumeByWell={"A1": 30} - ) - ), - ] - - liquids: List[Liquid] = [ - Liquid(id="water-id", displayName="water", description="water desc") - ] - - decoy.when( - await protocol_reader.extract_labware_definitions(json_protocol_source) - ).then_return([labware_definition]) - decoy.when(json_file_reader.read(json_protocol_source)).then_return(json_protocol) - decoy.when(json_translator.translate_commands(json_protocol)).then_return(commands) - decoy.when(json_translator.translate_liquids(json_protocol)).then_return(liquids) - - await subject.load(json_protocol_source) - - decoy.verify( - protocol_engine.add_labware_definition(labware_definition), - protocol_engine.add_liquid( - id="water-id", name="water", description="water desc", color=None - ), - protocol_engine.add_command( - request=pe_commands.LoadLiquidCreate( - params=pe_commands.LoadLiquidParams( - liquidId="water-id", labwareId="labware-id", volumeByWell={"A1": 30} - ) - ), - ), - task_queue.set_run_func(func=protocol_engine.wait_until_complete), - ) - - async def test_load_legacy_python( decoy: Decoy, legacy_file_reader: LegacyFileReader, @@ -339,7 +385,7 @@ async def test_load_legacy_python( legacy_executor: LegacyExecutor, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based Python protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -387,7 +433,7 @@ async def test_load_legacy_python( ) ).then_return(legacy_context) - await subject.load(legacy_protocol_source) + await legacy_python_runner_subject.load(legacy_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -405,7 +451,7 @@ async def test_load_python_with_pe_papi_core( legacy_file_reader: LegacyFileReader, legacy_context_creator: LegacyContextCreator, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based Python protocol.""" legacy_protocol_source = ProtocolSource( @@ -446,7 +492,7 @@ async def test_load_python_with_pe_papi_core( ) ).then_return(legacy_context) - await subject.load(legacy_protocol_source) + await legacy_python_runner_subject.load(legacy_protocol_source) decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0) @@ -458,7 +504,7 @@ async def test_load_legacy_json( legacy_executor: LegacyExecutor, task_queue: TaskQueue, protocol_engine: ProtocolEngine, - subject: ProtocolRunner, + legacy_python_runner_subject: PythonAndLegacyRunner, ) -> None: """It should load a legacy context-based JSON protocol.""" labware_definition = LabwareDefinition.construct() # type: ignore[call-arg] @@ -501,7 +547,7 @@ async def test_load_legacy_json( ) ).then_return(legacy_context) - await subject.load(legacy_protocol_source) + await legacy_python_runner_subject.load(legacy_protocol_source) decoy.verify( protocol_engine.add_labware_definition(labware_definition), @@ -512,3 +558,51 @@ async def test_load_legacy_json( context=legacy_context, ), ) + + +async def test_run_python_runner( + decoy: Decoy, + hardware_api: HardwareAPI, + protocol_engine: ProtocolEngine, + task_queue: TaskQueue, + legacy_python_runner_subject: PythonAndLegacyRunner, +) -> None: + """It should run a protocol to completion.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( + False, True + ) + + assert legacy_python_runner_subject.was_started() is False + await legacy_python_runner_subject.run() + assert legacy_python_runner_subject.was_started() is True + + decoy.verify( + await hardware_api.home(), + protocol_engine.play(), + task_queue.start(), + await task_queue.join(), + ) + + +async def test_run_live_runner( + decoy: Decoy, + hardware_api: HardwareAPI, + protocol_engine: ProtocolEngine, + task_queue: TaskQueue, + live_runner_subject: LiveRunner, +) -> None: + """It should run a protocol to completion.""" + decoy.when(protocol_engine.state_view.commands.has_been_played()).then_return( + False, True + ) + + assert live_runner_subject.was_started() is False + await live_runner_subject.run() + assert live_runner_subject.was_started() is True + + decoy.verify( + await hardware_api.home(), + protocol_engine.play(), + task_queue.start(), + await task_queue.join(), + ) diff --git a/g-code-testing/g_code_parsing/g_code_engine.py b/g-code-testing/g_code_parsing/g_code_engine.py index 2416a370550..fcc56371caa 100644 --- a/g-code-testing/g_code_parsing/g_code_engine.py +++ b/g-code-testing/g_code_parsing/g_code_engine.py @@ -15,7 +15,7 @@ ProtocolSource, PythonProtocolConfig, ) -from opentrons.protocol_runner.protocol_runner import ProtocolRunner +from opentrons.protocol_runner.protocol_runner import create_protocol_runner from opentrons.protocols.parse import parse from opentrons.protocols.execution import execute from contextlib import asynccontextmanager, contextmanager @@ -156,7 +156,8 @@ async def run_protocol( content_hash=path, ) - protocol_runner: ProtocolRunner = ProtocolRunner( + protocol_runner = create_protocol_runner( + protocol_config=config, protocol_engine=await create_protocol_engine( hardware_api=hardware, # type: ignore config=Config(robot_type=robot_type), diff --git a/robot-server/robot_server/protocols/dependencies.py b/robot-server/robot_server/protocols/dependencies.py index f7742bde1df..f2d3245ddce 100644 --- a/robot-server/robot_server/protocols/dependencies.py +++ b/robot-server/robot_server/protocols/dependencies.py @@ -11,9 +11,6 @@ from sqlalchemy.engine import Engine as SQLEngine from opentrons.protocol_reader import ProtocolReader, FileReaderWriter, FileHasher -from opentrons.protocol_runner import create_simulating_runner - -from opentrons_shared_data.robot.dev_types import RobotType from server_utils.fastapi_utils.app_state import ( AppState, @@ -21,7 +18,6 @@ get_app_state, ) from robot_server.deletion_planner import ProtocolDeletionPlanner -from robot_server.hardware import get_robot_type from robot_server.persistence import get_sql_engine, get_persistence_directory from robot_server.settings import get_settings @@ -112,13 +108,9 @@ async def get_analysis_store( async def get_protocol_analyzer( analysis_store: AnalysisStore = Depends(get_analysis_store), - analysis_robot_type: RobotType = Depends(get_robot_type), ) -> ProtocolAnalyzer: """Construct a ProtocolAnalyzer for a single request.""" - protocol_runner = await create_simulating_runner(robot_type=analysis_robot_type) - return ProtocolAnalyzer( - protocol_runner=protocol_runner, analysis_store=analysis_store, ) diff --git a/robot-server/robot_server/protocols/protocol_analyzer.py b/robot-server/robot_server/protocols/protocol_analyzer.py index 6af3069f8bd..cd8c44cb539 100644 --- a/robot-server/robot_server/protocols/protocol_analyzer.py +++ b/robot-server/robot_server/protocols/protocol_analyzer.py @@ -1,7 +1,7 @@ """Protocol analysis module.""" import logging -from opentrons.protocol_runner import ProtocolRunner +from opentrons import protocol_runner from .protocol_store import ProtocolResource from .analysis_store import AnalysisStore @@ -15,11 +15,9 @@ class ProtocolAnalyzer: def __init__( self, - protocol_runner: ProtocolRunner, analysis_store: AnalysisStore, ) -> None: """Initialize the analyzer and its dependencies.""" - self._protocol_runner = protocol_runner self._analysis_store = analysis_store async def analyze( @@ -28,7 +26,11 @@ async def analyze( analysis_id: str, ) -> None: """Analyze a given protocol, storing the analysis when complete.""" - result = await self._protocol_runner.run(protocol_resource.source) + runner = await protocol_runner.create_simulating_runner( + robot_type=protocol_resource.source.robot_type, + protocol_config=protocol_resource.source.config, + ) + result = await runner.run(protocol_resource.source) log.info(f'Completed analysis "{analysis_id}".') diff --git a/robot-server/robot_server/runs/engine_store.py b/robot-server/robot_server/runs/engine_store.py index 04425a73775..0d397f46045 100644 --- a/robot-server/robot_server/runs/engine_store.py +++ b/robot-server/robot_server/runs/engine_store.py @@ -5,7 +5,13 @@ from opentrons.config import feature_flags from opentrons.hardware_control import HardwareControlAPI -from opentrons.protocol_runner import ProtocolRunner, ProtocolRunResult +from opentrons.protocol_runner import ( + AnyRunner, + JsonRunner, + PythonAndLegacyRunner, + RunResult, + create_protocol_runner, +) from opentrons.protocol_engine import ( ProtocolEngine, Config as ProtocolEngineConfig, @@ -27,10 +33,10 @@ class EngineConflictError(RuntimeError): class RunnerEnginePair(NamedTuple): - """A stored ProtocolRunner/ProtocolEngine pair.""" + """A stored Runner/ProtocolEngine pair.""" run_id: str - runner: ProtocolRunner + runner: AnyRunner engine: ProtocolEngine @@ -61,7 +67,7 @@ def engine(self) -> ProtocolEngine: return self._runner_engine_pair.engine @property - def runner(self) -> ProtocolRunner: + def runner(self) -> AnyRunner: """Get the "current" persisted ProtocolRunner.""" assert self._runner_engine_pair is not None, "Runner not yet created." return self._runner_engine_pair.runner @@ -132,16 +138,25 @@ async def create( block_on_door_open=feature_flags.enable_door_safety_switch(), ), ) - runner = ProtocolRunner(protocol_engine=engine, hardware_api=self._hardware_api) + runner = create_protocol_runner( + protocol_engine=engine, + hardware_api=self._hardware_api, + protocol_config=protocol.source.config if protocol else None, + ) if self._runner_engine_pair is not None: raise EngineConflictError("Another run is currently active.") - if protocol is not None: + if isinstance(runner, (PythonAndLegacyRunner, JsonRunner)): # FIXME(mm, 2022-12-21): This `await` introduces a concurrency hazard. If # two requests simultaneously call this method, they will both "succeed" # (with undefined results) instead of one raising EngineConflictError. + assert ( + protocol is not None + ), "A Python or JSON protocol should have a protocol source file." await runner.load(protocol.source) + else: + runner.prepare() for offset in labware_offsets: engine.add_labware_offset(offset) @@ -154,7 +169,7 @@ async def create( return engine.state_view.get_summary() - async def clear(self) -> ProtocolRunResult: + async def clear(self) -> RunResult: """Remove the persisted ProtocolEngine. Raises: @@ -173,4 +188,4 @@ async def clear(self) -> ProtocolRunResult: commands = state_view.commands.get_all() self._runner_engine_pair = None - return ProtocolRunResult(state_summary=run_data, commands=commands) + return RunResult(state_summary=run_data, commands=commands) diff --git a/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml index 0ff14461e6e..ebd9301e8f5 100644 --- a/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml +++ b/robot-server/tests/integration/http_api/runs/test_pause_run_not_started.tavern.yaml @@ -5,7 +5,7 @@ marks: - run_server stages: - - name: Create Empty Run + - name: Create empty run request: url: '{host:s}:{port:d}/runs' json: diff --git a/robot-server/tests/integration/http_api/runs/test_persistence.py b/robot-server/tests/integration/http_api/runs/test_persistence.py index 99f30896357..1d0e6b71ecc 100644 --- a/robot-server/tests/integration/http_api/runs/test_persistence.py +++ b/robot-server/tests/integration/http_api/runs/test_persistence.py @@ -101,7 +101,10 @@ async def test_runs_persist_via_actions_router( """Test that runs commands and state are persisted when calling play action through dev server restart.""" client, server = client_and_server - + # await client.post_protocol([Path("./tests/integration/protocols/simple.py")]) + # + # protocols = (await client.get_protocols()).json()["data"] + # protocol_id = protocols[0]["id"] # create a run create_run_response = await client.post_run(req_body={"data": {}}) run_id = create_run_response.json()["data"]["id"] diff --git a/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml new file mode 100644 index 00000000000..c1295d2c66b --- /dev/null +++ b/robot-server/tests/integration/http_api/runs/test_run_queued_protocol_commands.tavern.yaml @@ -0,0 +1,192 @@ +test_name: Make sure multiple commands can be queued and played on a basic run + +marks: + - usefixtures: + - run_server + - clean_server_state + +stages: + - name: Create empty run + request: + url: '{host:s}:{port:d}/runs' + json: + data: {} + method: POST + response: + strict: + - json:off + status_code: 201 + json: + data: + id: !anystr + status: idle + current: true + save: + json: + run_id: data.id + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 1' + response: + status_code: 201 + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 2' + response: + status_code: 201 + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 3' + response: + status_code: 201 + + - name: Create protocol commands + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: POST + json: + data: + commandType: 'comment' + intent: 'protocol' + params: + message: 'test 4' + response: + status_code: 201 + + - name: Get run + request: + url: '{host:s}:{port:d}/runs/{run_id}' + method: GET + response: + status_code: 200 + json: + data: + actions: [] + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + current: True + errors: [] + id: '{run_id}' + labware: + - id: 'fixedTrash' + loadName: 'opentrons_1_trash_1100ml_fixed' + definitionUri: 'opentrons/opentrons_1_trash_1100ml_fixed/1' + location: + slotName: '12' + labwareOffsets: [] + liquids: [] + modules: [] + pipettes: [] + status: 'idle' + + - name: Play the run + request: + url: '{host:s}:{port:d}/runs/{run_id}/actions' + method: POST + json: + data: + actionType: play + response: + status_code: 201 + json: + data: + id: !anystr + actionType: play + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + save: + json: + play_action_id: data.id + + # While the protocol is ongoing, several fields (like labware) + # are nondeterministic depending on request timing. + # Wait for the protocol to complete so things settle. + - name: Wait for the protocol to complete + max_retries: 10 + delay_after: 0.1 + request: + url: '{host:s}:{port:d}/runs/{run_id}' + method: GET + response: + status_code: 200 + strict: + - json:off + json: + data: + status: succeeded + + - name: Verify commands succeeded with the expected results + request: + url: '{host:s}:{port:d}/runs/{run_id}/commands' + method: GET + response: + status_code: 200 + json: + links: + current: !anydict + meta: + cursor: 0 + totalLength: 4 + data: + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 1' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 2' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 3' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + - id: !anystr + key: !anystr + commandType: comment + status: succeeded + intent: protocol + params: + message: 'test 4' + createdAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + startedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" + completedAt: !re_search "^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}:\\d{2}$" diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index 96191e1217e..c655db805a5 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -14,7 +14,7 @@ errors as pe_errors, types as pe_types, ) -from opentrons.protocol_runner import ProtocolRunner, ProtocolRunResult +import opentrons.protocol_runner as protocol_runner from opentrons.protocol_reader import ProtocolSource, JsonProtocolConfig from robot_server.protocols.analysis_store import AnalysisStore @@ -22,10 +22,13 @@ from robot_server.protocols.protocol_analyzer import ProtocolAnalyzer -@pytest.fixture -def protocol_runner(decoy: Decoy) -> ProtocolRunner: - """Get a mocked out ProtocolRunner.""" - return decoy.mock(cls=ProtocolRunner) +@pytest.fixture(autouse=True) +def patch_mock_create_simulating_runner( + decoy: Decoy, monkeypatch: pytest.MonkeyPatch +) -> None: + """Replace protocol_runner.check() with a mock.""" + mock = decoy.mock(func=protocol_runner.create_simulating_runner) + monkeypatch.setattr(protocol_runner, "create_simulating_runner", mock) @pytest.fixture @@ -36,19 +39,16 @@ def analysis_store(decoy: Decoy) -> AnalysisStore: @pytest.fixture def subject( - protocol_runner: ProtocolRunner, analysis_store: AnalysisStore, ) -> ProtocolAnalyzer: """Get a ProtocolAnalyzer test subject.""" return ProtocolAnalyzer( - protocol_runner=protocol_runner, analysis_store=analysis_store, ) async def test_analyze( decoy: Decoy, - protocol_runner: ProtocolRunner, analysis_store: AnalysisStore, subject: ProtocolAnalyzer, ) -> None: @@ -97,8 +97,17 @@ async def test_analyze( mount=MountType.LEFT, ) - decoy.when(await protocol_runner.run(protocol_resource.source)).then_return( - ProtocolRunResult( + json_runner = decoy.mock(cls=protocol_runner.JsonRunner) + + decoy.when( + await protocol_runner.create_simulating_runner( + robot_type="OT-3 Standard", + protocol_config=JsonProtocolConfig(schema_version=123), + ) + ).then_return(json_runner) + + decoy.when(await json_runner.run(protocol_resource.source)).then_return( + protocol_runner.RunResult( commands=[analysis_command], state_summary=StateSummary( status=EngineStatus.SUCCEEDED, diff --git a/robot-server/tests/runs/test_engine_store.py b/robot-server/tests/runs/test_engine_store.py index e494810abd0..b1ac93a9085 100644 --- a/robot-server/tests/runs/test_engine_store.py +++ b/robot-server/tests/runs/test_engine_store.py @@ -1,7 +1,6 @@ """Tests for the EngineStore interface.""" from datetime import datetime from pathlib import Path - import pytest from decoy import Decoy, matchers @@ -11,7 +10,11 @@ from opentrons.types import DeckSlotName from opentrons.hardware_control import HardwareControlAPI from opentrons.protocol_engine import ProtocolEngine, StateSummary, types as pe_types -from opentrons.protocol_runner import ProtocolRunner, ProtocolRunResult +from opentrons.protocol_runner import ( + RunResult, + LiveRunner, + JsonRunner, +) from opentrons.protocol_reader import ProtocolReader, ProtocolSource from robot_server.protocols import ProtocolResource @@ -19,11 +22,18 @@ @pytest.fixture -def subject(decoy: Decoy) -> EngineStore: - """Get a EngineStore test subject.""" +def hardware_api( + decoy: Decoy, +) -> HardwareControlAPI: + """Return a mock in the shape of a HardwareControlAPI.""" # TODO(mc, 2021-06-11): to make these test more effective and valuable, we # should pass in some sort of actual, valid HardwareAPI instead of a mock - hardware_api = decoy.mock(cls=HardwareControlAPI) + return decoy.mock(cls=HardwareControlAPI) + + +@pytest.fixture +def subject(decoy: Decoy, hardware_api: HardwareControlAPI) -> EngineStore: + """Get a EngineStore test subject.""" return EngineStore( hardware_api=hardware_api, # Arbitrary choice of robot_type. Tests where robot_type matters should @@ -33,7 +43,7 @@ def subject(decoy: Decoy) -> EngineStore: @pytest.fixture -async def protocol_source(tmp_path: Path) -> ProtocolSource: +async def json_protocol_source(tmp_path: Path) -> ProtocolSource: """Get a protocol source fixture.""" simple_protocol = ( get_shared_data_root() / "protocol" / "fixtures" / "6" / "simpleV6.json" @@ -47,7 +57,35 @@ async def test_create_engine(subject: EngineStore) -> None: assert subject.current_run_id == "run-id" assert isinstance(result, StateSummary) - assert isinstance(subject.runner, ProtocolRunner) + assert isinstance(subject.runner, LiveRunner) + assert isinstance(subject.engine, ProtocolEngine) + + +async def test_create_engine_with_protocol( + decoy: Decoy, + subject: EngineStore, + json_protocol_source: ProtocolSource, +) -> None: + """It should create an engine for a run with protocol. + + Tests only basic engine & runner creation with creation result. + Loading of protocols/ live run commands is tested in integration test. + """ + protocol = ProtocolResource( + protocol_id="my cool protocol", + protocol_key=None, + created_at=datetime(year=2021, month=1, day=1), + source=json_protocol_source, + ) + + result = await subject.create( + run_id="run-id", + labware_offsets=[], + protocol=protocol, + ) + assert subject.current_run_id == "run-id" + assert isinstance(result, StateSummary) + assert isinstance(subject.runner, JsonRunner) assert isinstance(subject.engine, ProtocolEngine) @@ -91,29 +129,6 @@ async def test_create_engine_with_labware_offsets(subject: EngineStore) -> None: ] -@pytest.mark.xfail(strict=True, raises=NotImplementedError) -async def test_create_engine_with_protocol( - subject: EngineStore, - protocol_source: ProtocolSource, -) -> None: - """It should create an engine for a run with labware offsets.""" - # TODO(mc, 2022-05-18): https://github.com/Opentrons/opentrons/pull/10170 - raise NotImplementedError("Implement this test when JSONv6 runs are supported") - - protocol = ProtocolResource( - protocol_id="my cool protocol", - protocol_key=None, - created_at=datetime(year=2021, month=1, day=1), - source=protocol_source, - ) - - await subject.create( - run_id="run-id", - labware_offsets=[], - protocol=protocol, - ) - - async def test_archives_state_if_engine_already_exists(subject: EngineStore) -> None: """It should not create more than one engine / runner pair.""" await subject.create(run_id="run-id-1", labware_offsets=[], protocol=None) @@ -131,7 +146,7 @@ async def test_clear_engine(subject: EngineStore) -> None: result = await subject.clear() assert subject.current_run_id is None - assert isinstance(result, ProtocolRunResult) + assert isinstance(result, RunResult) with pytest.raises(AssertionError): subject.engine @@ -140,7 +155,9 @@ async def test_clear_engine(subject: EngineStore) -> None: subject.runner -async def test_clear_engine_not_stopped_or_idle(subject: EngineStore) -> None: +async def test_clear_engine_not_stopped_or_idle( + subject: EngineStore, json_protocol_source: ProtocolSource +) -> None: """It should raise a conflict if the engine is not stopped.""" await subject.create(run_id="run-id", labware_offsets=[], protocol=None) subject.runner.play() diff --git a/robot-server/tests/runs/test_run_controller.py b/robot-server/tests/runs/test_run_controller.py index b31b2b1553e..7387af2d912 100644 --- a/robot-server/tests/runs/test_run_controller.py +++ b/robot-server/tests/runs/test_run_controller.py @@ -11,7 +11,7 @@ commands as pe_commands, errors as pe_errors, ) -from opentrons.protocol_runner import ProtocolRunResult +from opentrons.protocol_runner import RunResult, JsonRunner, PythonAndLegacyRunner from robot_server.service.task_runner import TaskRunner from robot_server.runs.action_models import RunAction, RunActionType @@ -94,7 +94,9 @@ async def test_create_play_action_to_resume( subject: RunController, ) -> None: """It should resume a run.""" - decoy.when(mock_engine_store.runner.was_started()).then_return(True) + mock_json_runner = decoy.mock(cls=JsonRunner) + decoy.when(mock_engine_store.runner).then_return(mock_json_runner) + decoy.when(mock_json_runner.was_started()).then_return(True) result = subject.create_action( action_id="some-action-id", @@ -109,8 +111,8 @@ async def test_create_play_action_to_resume( ) decoy.verify(mock_run_store.insert_action(run_id, result), times=1) - decoy.verify(mock_engine_store.runner.play(), times=1) - decoy.verify(await mock_engine_store.runner.run(), times=0) + decoy.verify(mock_json_runner.play(), times=1) + decoy.verify(await mock_json_runner.run(), times=0) async def test_create_play_action_to_start( @@ -124,7 +126,9 @@ async def test_create_play_action_to_start( subject: RunController, ) -> None: """It should start a run.""" - decoy.when(mock_engine_store.runner.was_started()).then_return(False) + mock_python_runner = decoy.mock(cls=PythonAndLegacyRunner) + decoy.when(mock_engine_store.runner).then_return(mock_python_runner) + decoy.when(mock_python_runner.was_started()).then_return(False) result = subject.create_action( action_id="some-action-id", @@ -143,8 +147,8 @@ async def test_create_play_action_to_start( background_task_captor = matchers.Captor() decoy.verify(mock_task_runner.run(background_task_captor)) - decoy.when(await mock_engine_store.runner.run()).then_return( - ProtocolRunResult( + decoy.when(await mock_python_runner.run()).then_return( + RunResult( commands=protocol_commands, state_summary=engine_state_summary, ) diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 1ecdf9e5896..d03052ecc03 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -6,7 +6,7 @@ from decoy import Decoy, matchers from opentrons.types import DeckSlotName -from opentrons.protocol_runner import ProtocolRunResult +from opentrons.protocol_runner import RunResult from opentrons.protocol_engine import ( EngineStatus, StateSummary, @@ -485,7 +485,7 @@ async def test_update_current( run_id = "hello world" decoy.when(mock_engine_store.current_run_id).then_return(run_id) decoy.when(await mock_engine_store.clear()).then_return( - ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) + RunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when( @@ -593,7 +593,7 @@ async def test_create_archives_existing( decoy.when(mock_engine_store.current_run_id).then_return(run_id_old) decoy.when(await mock_engine_store.clear()).then_return( - ProtocolRunResult(commands=[run_command], state_summary=engine_state_summary) + RunResult(commands=[run_command], state_summary=engine_state_summary) ) decoy.when(