diff --git a/performance-metrics/Makefile b/performance-metrics/Makefile index 993fdc37196..087429bf139 100644 --- a/performance-metrics/Makefile +++ b/performance-metrics/Makefile @@ -37,8 +37,8 @@ clean_all_cmd = $(clean_cmd) dist .PHONY: lint lint: $(python) -m black --check . - $(python) -m flake8 . $(python) -m mypy . + $(python) -m flake8 . .PHONY: format format: diff --git a/performance-metrics/Pipfile b/performance-metrics/Pipfile index a71db703e33..84401f865ef 100644 --- a/performance-metrics/Pipfile +++ b/performance-metrics/Pipfile @@ -6,6 +6,8 @@ name = "pypi" [packages] opentrons-shared-data = {file = "../shared-data/python", editable = true} performance-metrics = {file = ".", editable = true} +psutil = "6.0.0" +systemd-python = "*" [dev-packages] pytest = "==7.4.4" @@ -16,6 +18,7 @@ flake8-docstrings = "~=1.7.0" flake8-noqa = "~=1.4.0" black = "==22.3.0" pytest-asyncio = "~=0.23.0" +types-psutil = "*" [requires] python_version = "3.10" diff --git a/performance-metrics/Pipfile.lock b/performance-metrics/Pipfile.lock index 5c836231b7e..4f2484b9a16 100644 --- a/performance-metrics/Pipfile.lock +++ b/performance-metrics/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "d811fa2b7dca8a5be8b2dba79ab7200243b2e10fb65f9ee221623f2710b24372" + "sha256": "6caf5cef09378a9d599cab204d2e7908844301e0a5053aa062fd6e959e3749cb" }, "pipfile-spec": 6, "requires": { @@ -35,53 +35,84 @@ "opentrons-shared-data": { "editable": true, "file": "../shared-data/python", - "markers": "python_version >= '3.8'" + "markers": "python_version >= '3.10'" }, "performance-metrics": { "editable": true, "file": "." }, + "psutil": { + "hashes": [ + "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", + "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0", + "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c", + "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", + "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3", + "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c", + "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", + "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3", + "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", + "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", + "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6", + "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d", + "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c", + "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", + "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132", + "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14", + "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0" + ], + "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==6.0.0" + }, "pydantic": { "hashes": [ - "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de", - "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986", - "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55", - "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4", - "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58", - "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3", - "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12", - "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d", - "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7", - "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53", - "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb", - "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51", - "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948", - "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022", - "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed", - "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383", - "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4", - "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b", - "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2", - "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528", - "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf", - "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8", - "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc", - "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f", - "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0", - "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7", - "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c", - "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44", - "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654", - "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0", - "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb", - "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00", - "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1", - "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c", - "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22", - "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0" + "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f", + "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc", + "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b", + "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b", + "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b", + "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e", + "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3", + "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7", + "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227", + "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f", + "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6", + "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab", + "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad", + "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076", + "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681", + "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54", + "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb", + "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7", + "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe", + "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b", + "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab", + "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d", + "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0", + "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75", + "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741", + "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63", + "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd", + "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33", + "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815", + "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7", + "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a", + "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655", + "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773", + "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c", + "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7", + "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3", + "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768", + "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d", + "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688", + "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f", + "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e", + "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991", + "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a" ], "markers": "python_version >= '3.7'", - "version": "==1.10.15" + "version": "==1.10.17" }, "pyrsistent": { "hashes": [ @@ -121,13 +152,20 @@ "markers": "python_version >= '3.8'", "version": "==0.20.0" }, + "systemd-python": { + "hashes": [ + "sha256:4e57f39797fd5d9e2d22b8806a252d7c0106c936039d1e71c8c6b8008e695c0a" + ], + "index": "pypi", + "version": "==235" + }, "typing-extensions": { "hashes": [ - "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0", - "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a" + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], "markers": "python_version >= '3.8'", - "version": "==4.11.0" + "version": "==4.12.2" } }, "develop": { @@ -179,11 +217,11 @@ }, "exceptiongroup": { "hashes": [ - "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14", - "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68" + "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad", + "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16" ], "markers": "python_version < '3.11'", - "version": "==1.2.0" + "version": "==1.2.1" }, "flake8": { "hashes": [ @@ -281,11 +319,11 @@ }, "packaging": { "hashes": [ - "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", - "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" ], - "markers": "python_version >= '3.7'", - "version": "==24.0" + "markers": "python_version >= '3.8'", + "version": "==24.1" }, "pathspec": { "hashes": [ @@ -297,19 +335,19 @@ }, "platformdirs": { "hashes": [ - "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068", - "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768" + "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee", + "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3" ], "markers": "python_version >= '3.8'", - "version": "==4.2.0" + "version": "==4.2.2" }, "pluggy": { "hashes": [ - "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981", - "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be" + "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", + "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" ], "markers": "python_version >= '3.8'", - "version": "==1.4.0" + "version": "==1.5.0" }, "pycodestyle": { "hashes": [ @@ -346,12 +384,12 @@ }, "pytest-asyncio": { "hashes": [ - "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a", - "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f" + "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b", + "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==0.23.6" + "version": "==0.23.7" }, "snowballstemmer": { "hashes": [ @@ -368,13 +406,22 @@ "markers": "python_version < '3.11'", "version": "==2.0.1" }, + "types-psutil": { + "hashes": [ + "sha256:1be027326c42ff51ebd65255a5146f9dc57e5cf8c4f9519a88b3f3f6a7fcd00e", + "sha256:b02f05d2c4141cd5926d82d8b56e4292a4d8f483d8a3400b73edf153834a3c64" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==6.0.0.20240621" + }, "typing-extensions": { "hashes": [ - "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0", - "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a" + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], "markers": "python_version >= '3.8'", - "version": "==4.11.0" + "version": "==4.12.2" } } } diff --git a/performance-metrics/src/performance_metrics/__init__.py b/performance-metrics/src/performance_metrics/__init__.py index 8c3b293f814..c9c6b3e6796 100644 --- a/performance-metrics/src/performance_metrics/__init__.py +++ b/performance-metrics/src/performance_metrics/__init__.py @@ -1,6 +1,13 @@ """Opentrons performance metrics library.""" -from .robot_context_tracker import RobotContextTracker -from .types import RobotContextState, SupportsTracking +from ._robot_context_tracker import RobotContextTracker +from ._types import RobotContextState, SupportsTracking +from .system_resource_tracker._config import SystemResourceTrackerConfiguration -__all__ = ["RobotContextTracker", "RobotContextState", "SupportsTracking"] + +__all__ = [ + "RobotContextTracker", + "RobotContextState", + "SupportsTracking", + "SystemResourceTrackerConfiguration", +] diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/_robot_context_tracker.py similarity index 86% rename from performance-metrics/src/performance_metrics/robot_context_tracker.py rename to performance-metrics/src/performance_metrics/_robot_context_tracker.py index 1b32e071827..46236964b36 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/_robot_context_tracker.py @@ -2,15 +2,15 @@ import inspect from pathlib import Path -import platform -from functools import partial, wraps +from functools import wraps from time import perf_counter_ns import typing from .metrics_store import MetricsStore from .data_shapes import RawContextData, MetricsMetadata -from .types import SupportsTracking, RobotContextState +from ._types import SupportsTracking, RobotContextState +from .util import get_timing_function _UnderlyingFunctionParameters = typing.ParamSpec("_UnderlyingFunctionParameters") _UnderlyingFunctionReturn = typing.TypeVar("_UnderlyingFunctionReturn") @@ -19,24 +19,7 @@ ] -def _get_timing_function() -> typing.Callable[[], int]: - """Returns a timing function for the current platform.""" - time_function: typing.Callable[[], int] - if platform.system() == "Linux": - from time import clock_gettime_ns, CLOCK_REALTIME - - time_function = typing.cast( - typing.Callable[[], int], partial(clock_gettime_ns, CLOCK_REALTIME) - ) - else: - from time import time_ns - - time_function = time_ns - - return time_function - - -timing_function = _get_timing_function() +_timing_function = get_timing_function() class RobotContextTracker(SupportsTracking): @@ -99,7 +82,7 @@ async def async_wrapper( *args: _UnderlyingFunctionParameters.args, **kwargs: _UnderlyingFunctionParameters.kwargs ) -> _UnderlyingFunctionReturn: - function_start_time = timing_function() + function_start_time = _timing_function() duration_start_time = perf_counter_ns() try: result = await func_to_track(*args, **kwargs) @@ -124,7 +107,7 @@ def wrapper( *args: _UnderlyingFunctionParameters.args, **kwargs: _UnderlyingFunctionParameters.kwargs ) -> _UnderlyingFunctionReturn: - function_start_time = timing_function() + function_start_time = _timing_function() duration_start_time = perf_counter_ns() try: diff --git a/performance-metrics/src/performance_metrics/types.py b/performance-metrics/src/performance_metrics/_types.py similarity index 100% rename from performance-metrics/src/performance_metrics/types.py rename to performance-metrics/src/performance_metrics/_types.py diff --git a/performance-metrics/src/performance_metrics/data_shapes.py b/performance-metrics/src/performance_metrics/data_shapes.py index b9a39a2fcea..42f8f8ea804 100644 --- a/performance-metrics/src/performance_metrics/data_shapes.py +++ b/performance-metrics/src/performance_metrics/data_shapes.py @@ -4,7 +4,10 @@ import typing from pathlib import Path -from .types import SupportsCSVStorage, StorableData, RobotContextState +from ._types import SupportsCSVStorage, StorableData, RobotContextState +from .util import get_timing_function + +_timing_function = get_timing_function() @dataclasses.dataclass(frozen=True) @@ -45,6 +48,62 @@ def from_csv_row(cls, row: typing.Sequence[StorableData]) -> SupportsCSVStorage: ) +@dataclasses.dataclass(frozen=True) +class ProcessResourceUsageSnapshot(SupportsCSVStorage): + """Represents process resource usage data. + + Attributes: + - query_time (int): The time in nanoseconds since the process started. + - command (str): The command that was executed. + - running_since (float): The time in nanoseconds since the process started. + - user_cpu_time (float): The user CPU time in seconds. + - system_cpu_time (float): The system CPU time in seconds. + - memory_percent (float): The memory usage percentage. + """ + + query_time: int # nanoseconds + command: str + running_since: float # seconds + user_cpu_time: float # seconds + system_cpu_time: float # seconds + memory_percent: float + + @classmethod + def headers(self) -> typing.Tuple[str, str, str, str, str, str]: + """Returns the headers for the process resource usage data.""" + return ( + "query_time", + "command", + "running_since", + "user_cpu_time", + "system_cpu_time", + "memory_percent", + ) + + def csv_row(self) -> typing.Tuple[int, str, float, float, float, float]: + """Returns the process resource usage data as a string.""" + return ( + self.query_time, + self.command, + self.running_since, + self.user_cpu_time, + self.system_cpu_time, + self.memory_percent, + ) + + @classmethod + def from_csv_row(cls, row: typing.Sequence[StorableData]) -> SupportsCSVStorage: + """Returns a ProcessResourceUsageData object from a CSV row.""" + return cls( + query_time=int(row[0]), + command=str(row[1]), + running_since=float(row[2]), + user_cpu_time=float(row[3]), + system_cpu_time=float(row[4]), + memory_percent=float(row[4]), + ) + + @dataclasses.dataclass(frozen=True) class MetricsMetadata: """Dataclass to store metadata about the metrics.""" diff --git a/performance-metrics/src/performance_metrics/metrics_store.py b/performance-metrics/src/performance_metrics/metrics_store.py index 5e12ffeed33..cd2cec74c97 100644 --- a/performance-metrics/src/performance_metrics/metrics_store.py +++ b/performance-metrics/src/performance_metrics/metrics_store.py @@ -3,7 +3,7 @@ import csv import typing from .data_shapes import MetricsMetadata -from .types import SupportsCSVStorage +from ._types import SupportsCSVStorage T = typing.TypeVar("T", bound=SupportsCSVStorage) @@ -20,6 +20,10 @@ def add(self, context_data: T) -> None: """Add data to the store.""" self._data.append(context_data) + def add_all(self, context_data: typing.Iterable[T]) -> None: + """Add data to the store.""" + self._data.extend(context_data) + def setup(self) -> None: """Set up the data store.""" self.metadata.storage_dir.mkdir(parents=True, exist_ok=True) @@ -33,5 +37,5 @@ def store(self) -> None: self._data.clear() rows_to_write = [context_data.csv_row() for context_data in stored_data] with open(self.metadata.data_file_location, "a") as storage_file: - writer = csv.writer(storage_file) + writer = csv.writer(storage_file, quoting=csv.QUOTE_ALL) writer.writerows(rows_to_write) diff --git a/performance-metrics/src/performance_metrics/system_resource_tracker/__init__.py b/performance-metrics/src/performance_metrics/system_resource_tracker/__init__.py new file mode 100644 index 00000000000..5baed3a1683 --- /dev/null +++ b/performance-metrics/src/performance_metrics/system_resource_tracker/__init__.py @@ -0,0 +1 @@ +"""System Resource Tracker package.""" diff --git a/performance-metrics/src/performance_metrics/system_resource_tracker/__main__.py b/performance-metrics/src/performance_metrics/system_resource_tracker/__main__.py new file mode 100644 index 00000000000..ecfcbf51840 --- /dev/null +++ b/performance-metrics/src/performance_metrics/system_resource_tracker/__main__.py @@ -0,0 +1,37 @@ +"""System resource tracker.""" + +import logging +import time +from ._logging_config import log_init +from ._config import SystemResourceTrackerConfiguration +from ._system_resource_tracker import _SystemResourceTracker + +log_init() +logger = logging.getLogger(__name__) + +log_level = SystemResourceTrackerConfiguration.parse_logging_level() + +if log_level is not None: + logger.setLevel(log_level) + + +if __name__ == "__main__": + logger.info("Starting system resource tracker...") + config = SystemResourceTrackerConfiguration() + tracker = _SystemResourceTracker(config) + + try: + while True: + refreshed_config = SystemResourceTrackerConfiguration.from_env() + + if tracker.config.logging_level_update_needed(refreshed_config): + logger.setLevel(refreshed_config.logging_level) + + tracker.update_changes_to_config(refreshed_config) + tracker.get_and_store_system_data_snapshots() + + time.sleep(tracker.config.refresh_interval) + except Exception as e: + logger.error("Exception occurred: %s", str(e)) + finally: + logger.info("System resource tracker is stopping.") diff --git a/performance-metrics/src/performance_metrics/system_resource_tracker/_config.py b/performance-metrics/src/performance_metrics/system_resource_tracker/_config.py new file mode 100644 index 00000000000..5133244c87e --- /dev/null +++ b/performance-metrics/src/performance_metrics/system_resource_tracker/_config.py @@ -0,0 +1,232 @@ +import os +import typing +import dataclasses +from pathlib import Path, PurePosixPath +import logging + + +logger = logging.getLogger(__name__) + + +def default_filters() -> typing.Tuple[str, str]: + """Get default filters.""" + return ("/opt/opentrons*", "python3*") + + +@dataclasses.dataclass(frozen=True) +class SystemResourceTrackerConfiguration: + """Environment variables for the system resource tracker.""" + + SHOULD_TRACK_ENV_VAR_NAME: typing.Final[str] = "OT_SYSTEM_RESOURCE_TRACKER_ENABLED" + PROCESS_FILTERS_ENV_VAR_NAME: typing.Final[ + str + ] = "OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS" + REFRESH_INTERVAL_ENV_VAR_NAME: typing.Final[ + str + ] = "OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL" + STORAGE_DIR_ENV_VAR_NAME: typing.Final[ + str + ] = "OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR" + LOGGING_LEVEL_ENV_VAR_NAME: typing.Final[ + str + ] = "OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL" + + should_track: bool = False + process_filters: typing.Tuple[str, ...] = dataclasses.field( + default_factory=default_filters + ) + refresh_interval: float = 10.0 + storage_dir: Path = Path("/data/performance_metrics_data/") + logging_level: str = "INFO" + + @staticmethod + def log_missing_env_var(env_var: str) -> None: + """Log a warning if an environment variable is missing. + + Args: + env_var (str): The name of the missing environment variable. + """ + logger.warning( + f"Environment variable {env_var} is not set. Using default value." + ) + + @classmethod + def parse_should_track(cls) -> bool | None: + """Parse the should track environment variable. + + Returns: + bool | None: The parsed value or None if the environment variable is not set. + """ + should_track_env_var = os.environ.get(cls.SHOULD_TRACK_ENV_VAR_NAME) + + if should_track_env_var is None: + cls.log_missing_env_var(cls.SHOULD_TRACK_ENV_VAR_NAME) + return None + + coerced_should_track = should_track_env_var.lower() == "true" + logger.info(f"Should track: {coerced_should_track}") + return coerced_should_track + + @classmethod + def parse_process_filters(cls) -> typing.Tuple[str, ...] | None: + """Parse the process filters environment variable. + + Returns: + typing.Tuple[str, ...] | None: The parsed value or None if the environment variable is not set. + """ + process_filters_env_var = os.environ.get(cls.PROCESS_FILTERS_ENV_VAR_NAME) + + if process_filters_env_var is None: + cls.log_missing_env_var(cls.PROCESS_FILTERS_ENV_VAR_NAME) + return None + + coerced_process_filters = tuple( + [ + filter.strip() + for filter in process_filters_env_var.split(",") + if filter.strip() != "" + ] + ) + + if len(coerced_process_filters) == 0: + logger.warning( + f"{cls.PROCESS_FILTERS_ENV_VAR_NAME} environment variable must be a comma-separated list of process names to monitor. Globbing is supported. Using default value." + ) + return None + + logger.debug(f"Process filters: {coerced_process_filters}") + return coerced_process_filters + + @classmethod + def parse_refresh_interval(cls) -> float | None: + """Parse the refresh interval environment variable. + + Returns: + float | None: The parsed value or None if the environment variable is not set. + """ + refresh_interval = os.environ.get(cls.REFRESH_INTERVAL_ENV_VAR_NAME) + + if refresh_interval is None: + cls.log_missing_env_var(cls.REFRESH_INTERVAL_ENV_VAR_NAME) + return None + + try: + coerced_refresh_interval = float(refresh_interval) + except ValueError: + logger.warning( + f"{cls.REFRESH_INTERVAL_ENV_VAR_NAME} environment variable must be a number. Using default value." + ) + return None + + coerced_refresh_interval = float(refresh_interval) + + if coerced_refresh_interval < 1.0: + logger.warning( + f"{cls.REFRESH_INTERVAL_ENV_VAR_NAME} environment variable must be greater than or equal to 1.0. Using default value." + ) + return None + + logger.debug(f"Refresh interval: {coerced_refresh_interval}") + return coerced_refresh_interval + + @classmethod + def parse_storage_dir(cls) -> PurePosixPath | None: + """Parse the storage directory environment variable. + + Returns: + PurePosixPath | None: The parsed value or None if the environment variable is not set. + """ + storage_dir = os.environ.get(cls.STORAGE_DIR_ENV_VAR_NAME) + + if storage_dir is None: + cls.log_missing_env_var(cls.STORAGE_DIR_ENV_VAR_NAME) + return None + + coerced_storage_dir = PurePosixPath(storage_dir) + + if not coerced_storage_dir.is_absolute(): + logger.warning( + f"{cls.STORAGE_DIR_ENV_VAR_NAME} environment variable must be an absolute path to a directory.\n" + f"You specified: {coerced_storage_dir}.\n" + f"Is absolute: {coerced_storage_dir.is_absolute()}.\n" + "Using default value." + ) + return None + + logger.debug(f"Storage dir: {coerced_storage_dir}") + return coerced_storage_dir + + @classmethod + def parse_logging_level(cls) -> str | None: + """Parse the logging level environment variable. + + Returns: + str | None: The parsed value or None if the environment variable is not set. + """ + logging_level = os.environ.get(cls.LOGGING_LEVEL_ENV_VAR_NAME) + + if logging_level is None: + cls.log_missing_env_var(cls.LOGGING_LEVEL_ENV_VAR_NAME) + return None + + if logging_level not in logging._nameToLevel: + logger.warning( + f"{cls.LOGGING_LEVEL_ENV_VAR_NAME} environment variable must be one of {list(logging._nameToLevel.keys())}. Using default value." + ) + return None + + logger.setLevel(logging._nameToLevel[logging_level]) + + logger.debug(f"Logging level: {logging_level}") + return logging_level + + @classmethod + def from_env(cls) -> "SystemResourceTrackerConfiguration": + """Create a SystemResourceTrackerConfiguration instance from environment variables. + + Returns: + SystemResourceTrackerConfiguration: An instance of SystemResourceTrackerConfiguration. + """ + kwargs: typing.Dict[str, typing.Any] = {} + + # If using default do not add to kwargs + + should_track = cls.parse_should_track() + process_filters = cls.parse_process_filters() + refresh_interval = cls.parse_refresh_interval() + storage_dir = cls.parse_storage_dir() + logging_level = cls.parse_logging_level() + + if should_track is not None: + kwargs["should_track"] = should_track + if process_filters is not None: + kwargs["process_filters"] = process_filters + if refresh_interval is not None: + kwargs["refresh_interval"] = refresh_interval + if storage_dir is not None: + kwargs["storage_dir"] = storage_dir + if logging_level is not None: + kwargs["logging_level"] = logging_level + + return cls(**kwargs) + + def to_env(self) -> None: + """Set the environment variables for the system resource tracker.""" + os.environ[self.SHOULD_TRACK_ENV_VAR_NAME] = str(self.should_track).lower() + os.environ[self.PROCESS_FILTERS_ENV_VAR_NAME] = ",".join(self.process_filters) + os.environ[self.REFRESH_INTERVAL_ENV_VAR_NAME] = str(self.refresh_interval) + os.environ[self.STORAGE_DIR_ENV_VAR_NAME] = str(self.storage_dir) + os.environ[self.LOGGING_LEVEL_ENV_VAR_NAME] = self.logging_level + + def logging_level_update_needed( + self, new_config: "SystemResourceTrackerConfiguration" + ) -> bool: + """Check if the logging level needs to be updated. + + Args: + new_config (SystemResourceTrackerConfiguration): The new configuration to compare with. + + Returns: + bool: True if the logging level needs to be updated, False otherwise. + """ + return self.logging_level != new_config.logging_level diff --git a/performance-metrics/src/performance_metrics/system_resource_tracker/_logging_config.py b/performance-metrics/src/performance_metrics/system_resource_tracker/_logging_config.py new file mode 100644 index 00000000000..5798150e59f --- /dev/null +++ b/performance-metrics/src/performance_metrics/system_resource_tracker/_logging_config.py @@ -0,0 +1,35 @@ +"""System resource tracker logging config.""" + +import logging +import logging.config + + +def log_init(level_value: int = logging.INFO) -> None: + """Initialize logging for the system resource tracker.""" + logging_config = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + }, + }, + "handlers": { + "console": { + "level": level_value, + "class": "logging.StreamHandler", + "formatter": "standard", + }, + "journal": { + "level": level_value, + "class": "systemd.journal.JournalHandler", + "formatter": "standard", + }, + }, + "root": { + "handlers": ["console", "journal"], + "level": level_value, + }, + } + + logging.config.dictConfig(logging_config) diff --git a/performance-metrics/src/performance_metrics/system_resource_tracker/_system_resource_tracker.py b/performance-metrics/src/performance_metrics/system_resource_tracker/_system_resource_tracker.py new file mode 100644 index 00000000000..fd14d4a0a8b --- /dev/null +++ b/performance-metrics/src/performance_metrics/system_resource_tracker/_system_resource_tracker.py @@ -0,0 +1,109 @@ +"""System resource tracker.""" + +import logging +import typing +import psutil +import fnmatch + +from ..util import format_command, get_timing_function +from ..data_shapes import ProcessResourceUsageSnapshot, MetricsMetadata +from ..metrics_store import MetricsStore +from ._config import SystemResourceTrackerConfiguration + +_timing_function = get_timing_function() + +logger = logging.getLogger(__name__) + + +class _SystemResourceTracker: + """Tracks system resource usage.""" + + def __init__(self, config: SystemResourceTrackerConfiguration) -> None: + """Initialize the tracker.""" + self.config = config + self._processes: typing.List[ + psutil.Process + ] # intentionally not public as process.kill can be called + self._store = MetricsStore[ProcessResourceUsageSnapshot]( + MetricsMetadata( + name="system_resource_data", + storage_dir=self.config.storage_dir, + headers=ProcessResourceUsageSnapshot.headers(), + ) + ) + self._store.setup() + self.refresh_processes() + + def refresh_processes(self) -> None: + """Filter processes by their command line path with globbing support.""" + # Note that psutil.process_iter caches the list of processes + # As long as the process is alive, it will be cached and reused on the next call to process_iter. + + # Ensure that when calling process_iter you specify at least one attribute to the attr list. + # Otherwise all processes info will be retrieved which is slow. + # Ideally you will only specify the attributes that you want to filter on. + + # See https://psutil.readthedocs.io/en/latest/#psutil.process_iter + + processes = [] + for process in psutil.process_iter(attrs=["cmdline"]): + try: + process_cmdline: typing.List[str] | None = process.info.get("cmdline") + except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): + continue + + if not process_cmdline: + continue + + formatted_cmdline: str = format_command(process_cmdline) + + if not formatted_cmdline: + continue + + if any( + fnmatch.fnmatch(formatted_cmdline, pattern) + for pattern in self.config.process_filters + ): + processes.append(process) + + self._processes = processes + + @property + def snapshots(self) -> typing.List[ProcessResourceUsageSnapshot]: + """Get snapshots.""" + snapshots: typing.List[ProcessResourceUsageSnapshot] = [] + for process in self._processes: + # It is very important to use oneshot context manager when querying for + # process resource usage. Doing this ensure that all the process data is + # only queried once per process instead of once per metric captured. + + # https://psutil.readthedocs.io/en/latest/#psutil.Process.oneshot + with process.oneshot(): + cpu_time = process.cpu_times() + snapshots.append( + ProcessResourceUsageSnapshot( + query_time=_timing_function(), + command=format_command(process.cmdline()), + running_since=process.create_time(), + system_cpu_time=cpu_time.system, + user_cpu_time=cpu_time.user, + memory_percent=process.memory_percent(), + ) + ) + + return snapshots + + def get_and_store_system_data_snapshots(self) -> None: + """Get and store system data snapshots.""" + if self.config.should_track: + self.refresh_processes() + self._store.add_all(self.snapshots) + self._store.store() + + def update_changes_to_config( + self, new_config: SystemResourceTrackerConfiguration + ) -> None: + """Update config.""" + if new_config != self.config: + self.config = new_config + logger.info("Config updated: %s", new_config) diff --git a/performance-metrics/src/performance_metrics/util.py b/performance-metrics/src/performance_metrics/util.py new file mode 100644 index 00000000000..c39a0c82c35 --- /dev/null +++ b/performance-metrics/src/performance_metrics/util.py @@ -0,0 +1,27 @@ +"""Utility functions.""" + +import typing +import platform +from functools import partial + + +def format_command(cmd_list: typing.List[str]) -> str: + """Format the command line for the given process.""" + return " ".join([cmd.strip() for cmd in cmd_list]).strip() + + +def get_timing_function() -> typing.Callable[[], int]: + """Returns a timing function for the current platform.""" + time_function: typing.Callable[[], int] + if platform.system() == "Linux": + from time import clock_gettime_ns, CLOCK_REALTIME + + time_function = typing.cast( + typing.Callable[[], int], partial(clock_gettime_ns, CLOCK_REALTIME) + ) + else: + from time import time_ns + + time_function = time_ns + + return time_function diff --git a/performance-metrics/tests/performance_metrics/system_resource_tracker/test_config.py b/performance-metrics/tests/performance_metrics/system_resource_tracker/test_config.py new file mode 100644 index 00000000000..453f0c63694 --- /dev/null +++ b/performance-metrics/tests/performance_metrics/system_resource_tracker/test_config.py @@ -0,0 +1,300 @@ +"""Test system resource tracker configuration.""" + +import os +import pytest +from pathlib import Path, PurePosixPath +import logging +from performance_metrics import SystemResourceTrackerConfiguration + + +@pytest.fixture +def mock_env_vars(monkeypatch: pytest.MonkeyPatch) -> None: + """Fixture to set mock environment variables.""" + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_ENABLED", "true") + monkeypatch.setenv( + "OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS", "/opt/opentrons*,python3*" + ) + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL", "5.0") + monkeypatch.setenv( + "OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR", "/data/performance_metrics_data" + ) + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL", "DEBUG") + + +@pytest.fixture +def clear_env_vars(monkeypatch: pytest.MonkeyPatch) -> None: + """Fixture to clear environment variables.""" + monkeypatch.delenv("OT_SYSTEM_RESOURCE_TRACKER_ENABLED", raising=False) + monkeypatch.delenv("OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS", raising=False) + monkeypatch.delenv("OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL", raising=False) + monkeypatch.delenv("OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR", raising=False) + monkeypatch.delenv("OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL", raising=False) + + +@pytest.mark.usefixtures("mock_env_vars") +def test_parse_should_track() -> None: + """Test parsing of the should track environment variable.""" + assert SystemResourceTrackerConfiguration.parse_should_track() is True + + +@pytest.mark.usefixtures("clear_env_vars") +def test_parse_should_track_missing(caplog: pytest.LogCaptureFixture) -> None: + """Test parsing of the should track environment variable when it is missing. + + Args: + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_should_track() is None + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_ENABLED is not set. Using default value." + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_env_vars") +def test_parse_process_filters() -> None: + """Test parsing of the process filters environment variable.""" + expected_filters = ("/opt/opentrons*", "python3*") + assert ( + SystemResourceTrackerConfiguration.parse_process_filters() == expected_filters + ) + + +@pytest.mark.usefixtures("clear_env_vars") +def test_parse_process_filters_missing(caplog: pytest.LogCaptureFixture) -> None: + """Test parsing of the process filters environment variable when it is missing. + + Args: + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_process_filters() is None + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS is not set. Using default value." + in caplog.text + ) + + +def test_parse_process_filters_empty( + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + """Test parsing of the process filters environment variable when it is empty. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to modify environment variables. + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS", "") + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_process_filters() is None + assert ( + "OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS environment variable must be a comma-separated list of process names to monitor. Globbing is supported. Using default value." + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_env_vars") +def test_parse_refresh_interval() -> None: + """Test parsing of the refresh interval environment variable.""" + assert SystemResourceTrackerConfiguration.parse_refresh_interval() == 5.0 + + +@pytest.mark.usefixtures("clear_env_vars") +def test_parse_refresh_interval_missing(caplog: pytest.LogCaptureFixture) -> None: + """Test parsing of the refresh interval environment variable when it is missing. + + Args: + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_refresh_interval() is None + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL is not set. Using default value." + in caplog.text + ) + + +def test_parse_refresh_interval_too_small( + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + """Test parsing of the refresh interval environment variable when it is too small. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to modify environment variables. + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL", "0.5") + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_refresh_interval() is None + assert ( + "OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL environment variable must be greater than or equal to 1.0. Using default value." + in caplog.text + ) + + +def test_parse_refresh_interval_non_numeric( + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + """Test parsing of the refresh interval environment variable when it is not numeric. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to modify environment variables. + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL", "not_a_number") + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_refresh_interval() is None + assert ( + "OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL environment variable must be a number. Using default value." + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_env_vars") +def test_parse_storage_dir() -> None: + """Test parsing of the storage directory environment variable.""" + expected_path = PurePosixPath("/data/performance_metrics_data") + assert SystemResourceTrackerConfiguration.parse_storage_dir() == expected_path + + +@pytest.mark.usefixtures("clear_env_vars") +def test_parse_storage_dir_missing(caplog: pytest.LogCaptureFixture) -> None: + """Test parsing of the storage directory environment variable when it is missing. + + Args: + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_storage_dir() is None + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR is not set. Using default value." + in caplog.text + ) + + +def test_parse_storage_dir_invalid( + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + """Test parsing of the storage directory environment variable when it is invalid. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to modify environment variables. + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR", "relative/path") + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_storage_dir() is None + assert ( + "OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR environment variable must be an absolute path to a directory.\nYou specified: relative/path.\nIs absolute: False.\nUsing default value." + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_env_vars") +def test_parse_logging_level() -> None: + """Test parsing of the logging level environment variable.""" + assert SystemResourceTrackerConfiguration.parse_logging_level() == "DEBUG" + + +@pytest.mark.usefixtures("clear_env_vars") +def test_parse_logging_level_missing(caplog: pytest.LogCaptureFixture) -> None: + """Test parsing of the logging level environment variable when it is missing. + + Args: + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_logging_level() is None + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL is not set. Using default value." + in caplog.text + ) + + +def test_parse_logging_level_invalid( + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + """Test parsing of the logging level environment variable when it is invalid. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to modify environment variables. + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + monkeypatch.setenv("OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL", "INVALID") + with caplog.at_level(logging.WARNING): + assert SystemResourceTrackerConfiguration.parse_logging_level() is None + assert ( + "OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL environment variable must be one of ['CRITICAL', 'FATAL', 'ERROR', 'WARN', 'WARNING', 'INFO', 'DEBUG', 'NOTSET']. Using default value." + in caplog.text + ) + + +@pytest.mark.usefixtures("mock_env_vars") +def test_from_env() -> None: + """Test the creation of a SystemResourceTrackerConfiguration object from environment variables.""" + config = SystemResourceTrackerConfiguration.from_env() + assert config.should_track is True + assert config.process_filters == ("/opt/opentrons*", "python3*") + assert config.refresh_interval == 5.0 + assert config.storage_dir == Path("/data/performance_metrics_data") + assert config.logging_level == "DEBUG" + + +@pytest.mark.usefixtures("clear_env_vars") +def test_from_env_with_defaults(caplog: pytest.LogCaptureFixture) -> None: + """Test the creation of a SystemResourceTrackerConfiguration object from environment variables with default values. + + Args: + caplog (pytest.LogCaptureFixture): Fixture to capture log messages. + """ + with caplog.at_level(logging.WARNING): + config = SystemResourceTrackerConfiguration.from_env() + assert config.should_track is False + assert config.process_filters == ("/opt/opentrons*", "python3*") + assert config.refresh_interval == 10.0 + assert config.storage_dir == Path("/data/performance_metrics_data") + assert config.logging_level == "INFO" + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_ENABLED is not set. Using default value." + in caplog.text + ) + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS is not set. Using default value." + in caplog.text + ) + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL is not set. Using default value." + in caplog.text + ) + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR is not set. Using default value." + in caplog.text + ) + assert ( + "Environment variable OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL is not set. Using default value." + in caplog.text + ) + + +def test_to_env() -> None: + """Test the conversion of a SystemResourceTrackerConfiguration object to environment variables.""" + config = SystemResourceTrackerConfiguration( + should_track=True, + process_filters=("/opt/opentrons*", "python3*"), + refresh_interval=5.0, + storage_dir=Path("/data/performance_metrics_data"), + logging_level="DEBUG", + ) + config.to_env() + assert os.environ["OT_SYSTEM_RESOURCE_TRACKER_ENABLED"] == "true" + assert ( + os.environ["OT_SYSTEM_RESOURCE_TRACKER_PROCESS_FILTERS"] + == "/opt/opentrons*,python3*" + ) + assert os.environ["OT_SYSTEM_RESOURCE_TRACKER_REFRESH_INTERVAL"] == "5.0" + assert ( + os.environ["OT_SYSTEM_RESOURCE_TRACKER_STORAGE_DIR"] + == "/data/performance_metrics_data" + ) + assert os.environ["OT_SYSTEM_RESOURCE_TRACKER_LOGGING_LEVEL"] == "DEBUG" diff --git a/performance-metrics/tests/performance_metrics/system_resource_tracker/test_system_resource_tracker.py b/performance-metrics/tests/performance_metrics/system_resource_tracker/test_system_resource_tracker.py new file mode 100644 index 00000000000..170677b7bf2 --- /dev/null +++ b/performance-metrics/tests/performance_metrics/system_resource_tracker/test_system_resource_tracker.py @@ -0,0 +1,50 @@ +"""Test process resource tracker.""" + +import typing +import psutil +from pathlib import Path +from unittest.mock import patch, MagicMock +from performance_metrics.system_resource_tracker._system_resource_tracker import ( + _SystemResourceTracker, +) +from performance_metrics import ( + SystemResourceTrackerConfiguration, +) + + +def mock_process_iter( + attrs: typing.Tuple[str, ...] +) -> typing.Tuple[psutil.Process, ...]: + """Mock psutil.process_iter to return a list of mocked processes.""" + + def create_mock_process( + pid: int, cmdline: typing.Tuple[str, ...] + ) -> psutil.Process: + mock_proc = MagicMock(spec=psutil.Process) + mock_proc.pid = pid + mock_proc.info = {"cmdline": cmdline} + mock_proc.cmdline.return_value = cmdline + return mock_proc + + return ( + create_mock_process(1, ("python", "my_script.py")), + create_mock_process(2, ("bash", "another_script.sh")), + create_mock_process(3, ("python", "yet_another_script.py")), + create_mock_process(4, ("java", "my_java_app.jar")), + ) + + +@patch("psutil.process_iter", mock_process_iter) +def test_process_filtering(tmp_path: Path) -> None: + """Test process filtering.""" + config = SystemResourceTrackerConfiguration( + process_filters=("*my_script.py", "*another_script*"), storage_dir=tmp_path + ) + tracker = _SystemResourceTracker(config) + + tracker.refresh_processes() + snapshots = tracker.snapshots + assert len(snapshots) == 3 + assert snapshots[0].command == "python my_script.py" + assert snapshots[1].command == "bash another_script.sh" + assert snapshots[2].command == "python yet_another_script.py" diff --git a/performance-metrics/tests/performance_metrics/test_metrics_store.py b/performance-metrics/tests/performance_metrics/test_metrics_store.py index 6d3afe19176..9a47c8ffbc8 100644 --- a/performance-metrics/tests/performance_metrics/test_metrics_store.py +++ b/performance-metrics/tests/performance_metrics/test_metrics_store.py @@ -3,7 +3,7 @@ from pathlib import Path from time import sleep -from performance_metrics.robot_context_tracker import RobotContextTracker +from performance_metrics._robot_context_tracker import RobotContextTracker from performance_metrics.data_shapes import RawContextData # Corrected times in seconds @@ -40,7 +40,9 @@ def analyzing_protocol() -> None: lines = file.readlines() assert len(lines) == 3, "All stored data should be written to the file." - split_lines: list[list[str]] = [line.strip().split(",") for line in lines] + split_lines: list[list[str]] = [ + line.replace('"', "").strip().split(",") for line in lines + ] assert all( RawContextData.from_csv_row(line) for line in split_lines ), "All lines should be valid RawContextData instances." diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index 447e750b19d..365df4586e6 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -1,9 +1,9 @@ -"""Tests for the RobotContextTracker class in performance_metrics.robot_context_tracker.""" +"""Tests for the RobotContextTracker class in performance_metrics._robot_context_tracker.""" import asyncio from pathlib import Path import pytest -from performance_metrics.robot_context_tracker import RobotContextTracker +from performance_metrics._robot_context_tracker import RobotContextTracker from time import sleep, time_ns from unittest.mock import patch @@ -273,7 +273,7 @@ def error_prone_operation() -> None: @patch( - "performance_metrics.robot_context_tracker._get_timing_function", + "performance_metrics.util.get_timing_function", return_value=time_ns, ) def test_using_non_linux_time_functions(tmp_path: Path) -> None: diff --git a/performance-metrics/tests/performance_metrics/test_util.py b/performance-metrics/tests/performance_metrics/test_util.py new file mode 100644 index 00000000000..34630c2889f --- /dev/null +++ b/performance-metrics/tests/performance_metrics/test_util.py @@ -0,0 +1,22 @@ +"""Test utility functions.""" + +import typing +import pytest +from performance_metrics.util import format_command + + +@pytest.mark.parametrize( + "cmd_list, expected", + [ + (["python", "my_script.py"], "python my_script.py"), + (["bash", "another_script.sh"], "bash another_script.sh"), + (["python", "yet_another_script.py"], "python yet_another_script.py"), + (["java", "my_java_app.jar"], "java my_java_app.jar"), + (["some-command", '"foo bar"', "baz"], 'some-command "foo bar" baz'), + (["some-command", '"foo', ' bar"', "baz"], 'some-command "foo bar" baz'), + (["some-command", '"foo', 'bar"', "baz"], 'some-command "foo bar" baz'), + ], +) +def test_format_command(cmd_list: typing.List[str], expected: str) -> None: + """Test format_command.""" + assert format_command(cmd_list) == expected