From afa9c72ce96a84b9b7fb92279cc56a3d62b8eca6 Mon Sep 17 00:00:00 2001 From: Erki Date: Wed, 1 May 2024 13:25:07 +0300 Subject: [PATCH] Initial commit --- .gitignore | 160 +++++++++++++++++++++++++++++ LICENSE | 21 ++++ agent.py | 66 ++++++++++++ api/__init__.py | 4 + api/protos/sys-mon-agent-api.proto | 48 +++++++++ api/sys_mon_agent_api_pb2.py | 40 ++++++++ api/sys_mon_agent_api_pb2.pyi | 70 +++++++++++++ api/sys_mon_agent_api_pb2_grpc.py | 100 ++++++++++++++++++ storage-tests.py | 19 ++++ storage/__init__.py | 1 + storage/monitor.py | 31 ++++++ sys-mon-agent.py | 63 ++++++++++++ sys-mon-client-tests.py | 31 ++++++ systemd-tests.py | 20 ++++ systemd/__init__.py | 1 + systemd/monitor.py | 86 ++++++++++++++++ 16 files changed, 761 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 agent.py create mode 100644 api/__init__.py create mode 100644 api/protos/sys-mon-agent-api.proto create mode 100644 api/sys_mon_agent_api_pb2.py create mode 100644 api/sys_mon_agent_api_pb2.pyi create mode 100644 api/sys_mon_agent_api_pb2_grpc.py create mode 100644 storage-tests.py create mode 100644 storage/__init__.py create mode 100644 storage/monitor.py create mode 100644 sys-mon-agent.py create mode 100644 sys-mon-client-tests.py create mode 100644 systemd-tests.py create mode 100644 systemd/__init__.py create mode 100644 systemd/monitor.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..68bc17f --- /dev/null +++ b/.gitignore @@ -0,0 +1,160 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..44400cf --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Rusted Skull + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/agent.py b/agent.py new file mode 100644 index 0000000..f2b339a --- /dev/null +++ b/agent.py @@ -0,0 +1,66 @@ +import asyncio +import logging +import platform + +from typing import Optional + +import api +import grpc +import systemd +import storage + +from google.protobuf.empty_pb2 import Empty + +class AgentApi(api.AgentServicer): + def __init__(self) -> None: + super().__init__() + + self._systemd_monitor: Optional[systemd.Monitor] = None + self._storage_monitor: Optional[storage.Monitor] = None + + async def Configure(self, + request: api.AgentConfiguration, + context: grpc.aio.ServicerContext) -> api.AgentConfigurationResponse: + + if self._systemd_monitor: + del self._systemd_monitor + self._systemd_monitor = None + if self._storage_monitor: + del self._storage_monitor + self._storage_monitor = None + + try: + if len(request.services_to_monitor) > 0: + self._systemd_monitor = systemd.Monitor(request.services_to_monitor) + await self._systemd_monitor.initialize() + + if len(request.storage_to_monitor) > 0: + self._storage_monitor = storage.Monitor(request.storage_to_monitor) + await self._storage_monitor.initialize() + except Exception as e: + context.set_code(grpc.StatusCode.INTERNAL) + context.set_details(f"{e}") + raise RuntimeWarning("Error encountered while servicing API.") + + return api.AgentConfigurationResponse(hostname=platform.node()) + + async def Poll(self, + request: Empty, + context: grpc.aio.ServicerContext) -> api.MonitoringStats: + + try: + if self._systemd_monitor: + services = await self._systemd_monitor.poll() + else: + services = None + + if self._storage_monitor: + storage = await self._storage_monitor.poll() + else: + storage = None + + return api.MonitoringStats(services=services, storage=storage) + except Exception as e: + context.set_code(grpc.StatusCode.INTERNAL) + context.set_details(f"{e}") + raise RuntimeWarning("Error encountered while servicing API.") diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000..64e386d --- /dev/null +++ b/api/__init__.py @@ -0,0 +1,4 @@ +import grpc + +from .sys_mon_agent_api_pb2 import AgentConfiguration, AgentConfigurationResponse, MonitoringStats, MonitoredServiceState, MonitoredService, MonitoredStorage +from .sys_mon_agent_api_pb2_grpc import AgentServicer, AgentStub, add_AgentServicer_to_server diff --git a/api/protos/sys-mon-agent-api.proto b/api/protos/sys-mon-agent-api.proto new file mode 100644 index 0000000..5243c46 --- /dev/null +++ b/api/protos/sys-mon-agent-api.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; + +package sys_mon_agent_api; + +import "google/protobuf/timestamp.proto"; +import "google/protobuf/empty.proto"; + +service Agent { + rpc Configure(AgentConfiguration) returns (AgentConfigurationResponse); + rpc Poll(google.protobuf.Empty) returns (MonitoringStats); +} + +message AgentConfiguration { + repeated string services_to_monitor = 2; + repeated string storage_to_monitor = 3; +} + +message AgentConfigurationResponse { + string hostname = 1; +} + +message MonitoringStats { + repeated MonitoredService services = 1; + repeated MonitoredStorage storage = 2; +} + +enum MonitoredServiceState { + ACTIVE = 0; + RELOADING = 1; + INACTIVE = 2; + FAILED = 3; + ACTIVATING = 4; + DEACTIVATING = 5; +} + +message MonitoredService { + string name = 1; + MonitoredServiceState state = 2; + bool result_success = 3; + optional google.protobuf.Timestamp main_started = 4; + optional google.protobuf.Timestamp main_exited = 5; +} + +message MonitoredStorage { + string name = 1; + bool smart_pass = 2; + bool present = 3; +} diff --git a/api/sys_mon_agent_api_pb2.py b/api/sys_mon_agent_api_pb2.py new file mode 100644 index 0000000..79718a2 --- /dev/null +++ b/api/sys_mon_agent_api_pb2.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sys-mon-agent-api.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17sys-mon-agent-api.proto\x12\x11sys_mon_agent_api\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bgoogle/protobuf/empty.proto\"M\n\x12\x41gentConfiguration\x12\x1b\n\x13services_to_monitor\x18\x02 \x03(\t\x12\x1a\n\x12storage_to_monitor\x18\x03 \x03(\t\".\n\x1a\x41gentConfigurationResponse\x12\x10\n\x08hostname\x18\x01 \x01(\t\"~\n\x0fMonitoringStats\x12\x35\n\x08services\x18\x01 \x03(\x0b\x32#.sys_mon_agent_api.MonitoredService\x12\x34\n\x07storage\x18\x02 \x03(\x0b\x32#.sys_mon_agent_api.MonitoredStorage\"\xff\x01\n\x10MonitoredService\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x05state\x18\x02 \x01(\x0e\x32(.sys_mon_agent_api.MonitoredServiceState\x12\x16\n\x0eresult_success\x18\x03 \x01(\x08\x12\x35\n\x0cmain_started\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x88\x01\x01\x12\x34\n\x0bmain_exited\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x88\x01\x01\x42\x0f\n\r_main_startedB\x0e\n\x0c_main_exited\"E\n\x10MonitoredStorage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nsmart_pass\x18\x02 \x01(\x08\x12\x0f\n\x07present\x18\x03 \x01(\x08*n\n\x15MonitoredServiceState\x12\n\n\x06\x41\x43TIVE\x10\x00\x12\r\n\tRELOADING\x10\x01\x12\x0c\n\x08INACTIVE\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nACTIVATING\x10\x04\x12\x10\n\x0c\x44\x45\x41\x43TIVATING\x10\x05\x32\xae\x01\n\x05\x41gent\x12\x61\n\tConfigure\x12%.sys_mon_agent_api.AgentConfiguration\x1a-.sys_mon_agent_api.AgentConfigurationResponse\x12\x42\n\x04Poll\x12\x16.google.protobuf.Empty\x1a\".sys_mon_agent_api.MonitoringStatsb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'sys_mon_agent_api_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_MONITOREDSERVICESTATE']._serialized_start=692 + _globals['_MONITOREDSERVICESTATE']._serialized_end=802 + _globals['_AGENTCONFIGURATION']._serialized_start=108 + _globals['_AGENTCONFIGURATION']._serialized_end=185 + _globals['_AGENTCONFIGURATIONRESPONSE']._serialized_start=187 + _globals['_AGENTCONFIGURATIONRESPONSE']._serialized_end=233 + _globals['_MONITORINGSTATS']._serialized_start=235 + _globals['_MONITORINGSTATS']._serialized_end=361 + _globals['_MONITOREDSERVICE']._serialized_start=364 + _globals['_MONITOREDSERVICE']._serialized_end=619 + _globals['_MONITOREDSTORAGE']._serialized_start=621 + _globals['_MONITOREDSTORAGE']._serialized_end=690 + _globals['_AGENT']._serialized_start=805 + _globals['_AGENT']._serialized_end=979 +# @@protoc_insertion_point(module_scope) diff --git a/api/sys_mon_agent_api_pb2.pyi b/api/sys_mon_agent_api_pb2.pyi new file mode 100644 index 0000000..6e8c85c --- /dev/null +++ b/api/sys_mon_agent_api_pb2.pyi @@ -0,0 +1,70 @@ +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf import empty_pb2 as _empty_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class MonitoredServiceState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ACTIVE: _ClassVar[MonitoredServiceState] + RELOADING: _ClassVar[MonitoredServiceState] + INACTIVE: _ClassVar[MonitoredServiceState] + FAILED: _ClassVar[MonitoredServiceState] + ACTIVATING: _ClassVar[MonitoredServiceState] + DEACTIVATING: _ClassVar[MonitoredServiceState] +ACTIVE: MonitoredServiceState +RELOADING: MonitoredServiceState +INACTIVE: MonitoredServiceState +FAILED: MonitoredServiceState +ACTIVATING: MonitoredServiceState +DEACTIVATING: MonitoredServiceState + +class AgentConfiguration(_message.Message): + __slots__ = ("services_to_monitor", "storage_to_monitor") + SERVICES_TO_MONITOR_FIELD_NUMBER: _ClassVar[int] + STORAGE_TO_MONITOR_FIELD_NUMBER: _ClassVar[int] + services_to_monitor: _containers.RepeatedScalarFieldContainer[str] + storage_to_monitor: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, services_to_monitor: _Optional[_Iterable[str]] = ..., storage_to_monitor: _Optional[_Iterable[str]] = ...) -> None: ... + +class AgentConfigurationResponse(_message.Message): + __slots__ = ("hostname",) + HOSTNAME_FIELD_NUMBER: _ClassVar[int] + hostname: str + def __init__(self, hostname: _Optional[str] = ...) -> None: ... + +class MonitoringStats(_message.Message): + __slots__ = ("services", "storage") + SERVICES_FIELD_NUMBER: _ClassVar[int] + STORAGE_FIELD_NUMBER: _ClassVar[int] + services: _containers.RepeatedCompositeFieldContainer[MonitoredService] + storage: _containers.RepeatedCompositeFieldContainer[MonitoredStorage] + def __init__(self, services: _Optional[_Iterable[_Union[MonitoredService, _Mapping]]] = ..., storage: _Optional[_Iterable[_Union[MonitoredStorage, _Mapping]]] = ...) -> None: ... + +class MonitoredService(_message.Message): + __slots__ = ("name", "state", "result_success", "main_started", "main_exited") + NAME_FIELD_NUMBER: _ClassVar[int] + STATE_FIELD_NUMBER: _ClassVar[int] + RESULT_SUCCESS_FIELD_NUMBER: _ClassVar[int] + MAIN_STARTED_FIELD_NUMBER: _ClassVar[int] + MAIN_EXITED_FIELD_NUMBER: _ClassVar[int] + name: str + state: MonitoredServiceState + result_success: bool + main_started: _timestamp_pb2.Timestamp + main_exited: _timestamp_pb2.Timestamp + def __init__(self, name: _Optional[str] = ..., state: _Optional[_Union[MonitoredServiceState, str]] = ..., result_success: bool = ..., main_started: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., main_exited: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + +class MonitoredStorage(_message.Message): + __slots__ = ("name", "smart_pass", "present") + NAME_FIELD_NUMBER: _ClassVar[int] + SMART_PASS_FIELD_NUMBER: _ClassVar[int] + PRESENT_FIELD_NUMBER: _ClassVar[int] + name: str + smart_pass: bool + present: bool + def __init__(self, name: _Optional[str] = ..., smart_pass: bool = ..., present: bool = ...) -> None: ... diff --git a/api/sys_mon_agent_api_pb2_grpc.py b/api/sys_mon_agent_api_pb2_grpc.py new file mode 100644 index 0000000..c3e78e5 --- /dev/null +++ b/api/sys_mon_agent_api_pb2_grpc.py @@ -0,0 +1,100 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from . import sys_mon_agent_api_pb2 as sys__mon__agent__api__pb2 + + +class AgentStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Configure = channel.unary_unary( + '/sys_mon_agent_api.Agent/Configure', + request_serializer=sys__mon__agent__api__pb2.AgentConfiguration.SerializeToString, + response_deserializer=sys__mon__agent__api__pb2.AgentConfigurationResponse.FromString, + ) + self.Poll = channel.unary_unary( + '/sys_mon_agent_api.Agent/Poll', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=sys__mon__agent__api__pb2.MonitoringStats.FromString, + ) + + +class AgentServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Configure(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Poll(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_AgentServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Configure': grpc.unary_unary_rpc_method_handler( + servicer.Configure, + request_deserializer=sys__mon__agent__api__pb2.AgentConfiguration.FromString, + response_serializer=sys__mon__agent__api__pb2.AgentConfigurationResponse.SerializeToString, + ), + 'Poll': grpc.unary_unary_rpc_method_handler( + servicer.Poll, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=sys__mon__agent__api__pb2.MonitoringStats.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'sys_mon_agent_api.Agent', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class Agent(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Configure(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/sys_mon_agent_api.Agent/Configure', + sys__mon__agent__api__pb2.AgentConfiguration.SerializeToString, + sys__mon__agent__api__pb2.AgentConfigurationResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Poll(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/sys_mon_agent_api.Agent/Poll', + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + sys__mon__agent__api__pb2.MonitoringStats.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/storage-tests.py b/storage-tests.py new file mode 100644 index 0000000..c560117 --- /dev/null +++ b/storage-tests.py @@ -0,0 +1,19 @@ +import asyncio + +from google.protobuf.json_format import MessageToJson +import storage + +async def main() -> None: + devices = ["/dev/nvme0", "/dev/sda"] + monitor = storage.Monitor(devices) + await monitor.initialize() + + resp = await monitor.poll() + + for device in resp: + json = MessageToJson(device, including_default_value_fields=True) + print(json) + +if __name__ == "__main__": + loop = asyncio.new_event_loop() + loop.run_until_complete(main()) diff --git a/storage/__init__.py b/storage/__init__.py new file mode 100644 index 0000000..7e1a6ee --- /dev/null +++ b/storage/__init__.py @@ -0,0 +1 @@ +from .monitor import Monitor diff --git a/storage/monitor.py b/storage/monitor.py new file mode 100644 index 0000000..8d78380 --- /dev/null +++ b/storage/monitor.py @@ -0,0 +1,31 @@ +from typing import Iterable + +import pySMART +import api + +class Monitor: + def __init__(self, device_names: Iterable[str]): + self._monitored_devices: list[str] = [] + + for name in device_names: + self._monitored_devices.append(name) + + async def initialize(self) -> None: + for name in self._monitored_devices: + device = pySMART.Device(name) + if device.interface == None: + raise RuntimeError(f"Storage device {name} not found.") + + async def poll(self) -> list[api.MonitoredStorage]: + devices = [] + for name in self._monitored_devices: + device = pySMART.Device(name) + + if device.interface == None: + devices.append(api.MonitoredStorage(name=name, smart_pass=False, present=False)) + continue + + smart_pass = True if device.assessment == "PASS" else False + devices.append(api.MonitoredStorage(name=name, smart_pass=smart_pass, present=True)) + + return devices diff --git a/sys-mon-agent.py b/sys-mon-agent.py new file mode 100644 index 0000000..a18941b --- /dev/null +++ b/sys-mon-agent.py @@ -0,0 +1,63 @@ +import asyncio +import argparse +import logging + +import grpc +import api + +from agent import AgentApi + +def get_arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + prog="sys-mon-agent", + description="Launch the Home Assistant integrated system monitoring agent.") + + parser.add_argument("-p", "--port", type=int, default=8202) + parser.add_argument("-a", "--address", type=str, default="[::]") + parser.add_argument("--ssl-cert", type=argparse.FileType("rb"), dest="cert") + parser.add_argument("--ssl-cert-key", type=argparse.FileType("rb"), dest="cert_key") + + return parser + +def configure_server_port(server: grpc.Server, arguments) -> grpc.Server: + address_str = f"{arguments.address}:{arguments.port}" + + if arguments.cert and arguments.cert_key: + cert_data = arguments.cert.read() + cert_key_data = arguments.cert_key.read() + + credentials = grpc.ssl_server_credentials(((cert_data, cert_key_data))) + + logging.info("Listening to secure port on %s.", address_str) + server.add_secure_port(address_str, credentials) + else: + logging.info("Listening to insecure port on %s.", address_str) + server.add_insecure_port(address_str) + + return server + + +async def main() -> None: + args = get_arg_parser().parse_args() + server = grpc.aio.server() + + api.add_AgentServicer_to_server(AgentApi(), server) + + server = configure_server_port(server, args) + + logging.info("Starting server...") + + await server.start() + + await server.wait_for_termination() + + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + loop = asyncio.new_event_loop() + + try: + loop.run_until_complete(main()) + finally: + loop.close() diff --git a/sys-mon-client-tests.py b/sys-mon-client-tests.py new file mode 100644 index 0000000..5ad8a89 --- /dev/null +++ b/sys-mon-client-tests.py @@ -0,0 +1,31 @@ +import asyncio +import logging + +from typing import Optional + +import api +import grpc + +from google.protobuf.json_format import MessageToJson +from google.protobuf.empty_pb2 import Empty + +async def main() -> None: + async with grpc.aio.insecure_channel("localhost:8202") as channel: + stub = api.AgentStub(channel) + conf = api.AgentConfiguration(name="some_agent", + services_to_monitor=["docker.service", "sshd.service"], + storage_to_monitor=["/dev/sda", "/dev/nvme0"]) + + await stub.RequestConfiguration(conf) + + stats = await stub.Poll(Empty()) + json = MessageToJson(stats, including_default_value_fields=True) + print(json) + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + loop = asyncio.new_event_loop() + try: + loop.run_until_complete(main()) + finally: + loop.close() diff --git a/systemd-tests.py b/systemd-tests.py new file mode 100644 index 0000000..6a83d01 --- /dev/null +++ b/systemd-tests.py @@ -0,0 +1,20 @@ +import asyncio + +from google.protobuf.json_format import MessageToJson +import systemd + +async def main() -> None: + services = ["docker.service", "sshd.service"] + monitor = systemd.Monitor(services) + await monitor.initialize() + + resp = await monitor.poll() + + for unit in resp: + json = MessageToJson(unit, including_default_value_fields=True) + print(json) + + +if __name__ == "__main__": + loop = asyncio.new_event_loop() + loop.run_until_complete(main()) diff --git a/systemd/__init__.py b/systemd/__init__.py new file mode 100644 index 0000000..7e1a6ee --- /dev/null +++ b/systemd/__init__.py @@ -0,0 +1 @@ +from .monitor import Monitor diff --git a/systemd/monitor.py b/systemd/monitor.py new file mode 100644 index 0000000..0a711a4 --- /dev/null +++ b/systemd/monitor.py @@ -0,0 +1,86 @@ +from typing import Any, Optional, Iterable +from google.protobuf.timestamp_pb2 import Timestamp + +from dbus_next.aio import MessageBus +from dbus_next.constants import BusType +import api + +class Monitor: + def __init__(self, unit_names: Iterable[str]): + self._monitored_units: dict[str, Optional[str]] = {} + + for name in unit_names: + self._monitored_units[name] = None + + async def initialize(self) -> None: + self._dbus = await self._try_construct_bus() + self._manager = await self._try_construct_systemd_manager() + + await self._validate_existing_units() + + async def poll(self) -> list[api.MonitoredService]: + response_list = [] + for name, path in self._monitored_units.items(): + unit = await self._get_unit(path) + monitored_service = await self._unit_into_monitored_service(name, unit) + response_list.append(monitored_service) + + return response_list + + async def _try_construct_bus(self) -> MessageBus: + return await MessageBus(bus_type=BusType.SYSTEM).connect() + + async def _try_construct_systemd_manager(self) -> "ProxyInterface": + introspect = await self._dbus.introspect("org.freedesktop.systemd1", "/org/freedesktop/systemd1") + obj = self._dbus.get_proxy_object("org.freedesktop.systemd1", "/org/freedesktop/systemd1", introspect) + + manager = obj.get_interface("org.freedesktop.systemd1.Manager") + + return manager + + async def _validate_existing_units(self) -> None: + all_units = await self._manager.call_list_units() + looking_for = [key for key, _ in self._monitored_units.items()] + + for unit in all_units: + (name, _, _, _, _, _, path, _, _, _) = unit + if name in looking_for: + looking_for.remove(name) + self._monitored_units[name] = path + + if len(looking_for) > 0: + raise RuntimeError(f"{looking_for.count} units unaccounted for by systemd monitor.") + + async def _get_unit(self, path: str) -> "ProxyInterface": + interface = await self._dbus.introspect("org.freedesktop.systemd1", path) + return self._dbus.get_proxy_object("org.freedesktop.systemd1", path, interface) + + async def _unit_into_monitored_service(self, name: str, unit: "ProxyInterface") -> api.MonitoredService: + unit_object = unit.get_interface("org.freedesktop.systemd1.Unit") + service_object = unit.get_interface("org.freedesktop.systemd1.Service") + + state: str = await unit_object.get_active_state() + + result = await service_object.get_result() + result = True if result == "success" else False + + started_usec: int = await service_object.get_exec_main_start_timestamp() + exited_usec: int = await service_object.get_exec_main_exit_timestamp() + + if started_usec > 0: + started = Timestamp() + started.FromMicroseconds(started_usec) + else: + started = None + + if exited_usec > 0: + exited = Timestamp() + exited.FromMicroseconds(exited_usec) + else: + exited = None + + return api.MonitoredService(name=name, + state=state.upper(), + result_success=result, + main_started=started, + main_exited=exited)