diff --git a/.github/workflows/docs_pages.yaml b/.github/workflows/docs_pages.yaml
index 888ef29..ae8dc2a 100644
--- a/.github/workflows/docs_pages.yaml
+++ b/.github/workflows/docs_pages.yaml
@@ -3,6 +3,10 @@ on: [ push, pull_request, workflow_dispatch ]
permissions:
contents: write
+concurrency:
+ group: docs-${{ github.ref }}
+ cancel-in-progress: true
+
jobs:
build-docs:
runs-on: ubuntu-latest
@@ -19,9 +23,9 @@ jobs:
- name: Install dependencies
run: uv sync --all-extras
- - name: Sphinx build
+ - name: Build MkDocs site
run: |
- uv run sphinx-build -b html docs/source docs/build/html
+ uv run mkdocs build --strict
- name: Deploy documentation
uses: peaceiris/actions-gh-pages@v4
diff --git a/README.md b/README.md
index 9d4112e..0a24c55 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,54 @@ recommendations for data, retrieving data in real time, archival streams, and ba
API Documentation available [here](https://botts-innovative-research.github.io/OSHConnect-Python/)
-Links:
+Links:
* [Architecture Doc](https://docs.google.com/document/d/1pIaeQw0ocU6ApNgqTVRZuSwjJAbhCcmweMq6RiVYEic/edit?usp=sharing)
- * [UML Diagram](https://drive.google.com/file/d/1FVrnYiuAR8ykqfOUa1NuoMyZ1abXzMPw/view?usp=drive_link)
\ No newline at end of file
+ * [UML Diagram](https://drive.google.com/file/d/1FVrnYiuAR8ykqfOUa1NuoMyZ1abXzMPw/view?usp=drive_link)
+
+## Generating the Docs
+
+The documentation is built with [MkDocs](https://www.mkdocs.org/) using the
+Material theme, [mkdocstrings](https://mkdocstrings.github.io/) for
+auto-generated API reference from the source, and
+[mermaid](https://mermaid.js.org/) for architecture diagrams. Markdown sources
+live under `docs/markdown/`.
+
+Install dev dependencies (including MkDocs and plugins):
+
+```bash
+uv sync
+```
+
+Build the HTML docs:
+
+```bash
+uv run mkdocs build
+```
+
+The output will be in `docs/build/html/`. Open `docs/build/html/index.html` in
+a browser to view locally.
+
+For a live-reloading preview while editing:
+
+```bash
+uv run mkdocs serve
+```
+
+Then visit http://127.0.0.1:8000.
+
+To match what CI publishes (warnings become errors — useful when you've
+touched docstrings):
+
+```bash
+uv run mkdocs build --strict
+```
+
+CI builds the site on every push and deploys `main` to GitHub Pages via
+`.github/workflows/docs_pages.yaml`.
+
+The legacy Sphinx setup under `docs/source/` is kept temporarily for
+reference and builds to a separate output directory:
+
+```bash
+uv run sphinx-build -b html docs/source docs/build/sphinx
+```
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..d9b75ed
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,35 @@
+# =============================================================================
+# OSHConnect-Python — Local PyPI Server
+#
+# A lightweight pypiserver for publishing dev builds of oshconnect so that
+# downstream projects (OCSASim, etc.) can `pip install` normally instead of
+# pointing at raw wheel paths.
+#
+# Usage:
+# docker compose up -d # start the local PyPI
+# ./scripts/publish-local.sh # build wheel + upload to local PyPI
+# docker compose down -v # tear down + remove packages volume
+#
+# Consumer side (e.g. OCSASim):
+# pip install --index-url http://localhost:8090/simple/ oshconnect
+# uv pip install --index-url http://localhost:8090/simple/ oshconnect
+# =============================================================================
+
+services:
+ pypi:
+ image: pypiserver/pypiserver:latest
+ container_name: local-pypi
+ command: run -a . -P . -o
+ ports:
+ - "8090:8080"
+ volumes:
+ - pypi-packages:/data/packages
+ healthcheck:
+ test: ["CMD", "wget", "-q", "-O", "/dev/null", "http://localhost:8080/"]
+ interval: 5s
+ timeout: 3s
+ start_period: 5s
+ retries: 3
+
+volumes:
+ pypi-packages:
diff --git a/docs/markdown/api.md b/docs/markdown/api.md
new file mode 100644
index 0000000..8a0f205
--- /dev/null
+++ b/docs/markdown/api.md
@@ -0,0 +1,90 @@
+# API Reference
+
+All public symbols are re-exported from the top-level package and can be
+imported directly:
+
+```python
+from oshconnect import OSHConnect, Node, Datastream, TimePeriod, ObservationFormat
+```
+
+Lower-level CS API utilities are available from the `oshconnect.csapi4py`
+sub-package:
+
+```python
+from oshconnect.csapi4py import APIResourceTypes, MQTTCommClient, ConnectedSystemsRequestBuilder
+```
+
+---
+
+## Core Application
+
+::: oshconnect.oshconnectapi
+
+---
+
+## Streamable Resources
+
+The primary objects for interacting with systems, datastreams, and control
+streams on an OSH node. Includes `Node`, `System`, `Datastream`,
+`ControlStream`, and supporting enums.
+
+::: oshconnect.streamableresource
+
+---
+
+## Resource Data Models
+
+Pydantic models that represent CS API resources returned from or sent to an
+OSH server.
+
+::: oshconnect.resource_datamodels
+
+---
+
+## SWE Schema Components
+
+Builder classes for constructing datastream and command schemas using SWE
+Common data types.
+
+::: oshconnect.swe_components
+
+::: oshconnect.schema_datamodels
+
+---
+
+## Event System
+
+Pub/sub event bus for in-process notifications. Implement `IEventListener`
+to receive events.
+
+::: oshconnect.eventbus
+
+::: oshconnect.events.core
+
+::: oshconnect.events.builder
+
+---
+
+## Time Management
+
+::: oshconnect.timemanagement
+
+---
+
+## CS API Integration (`csapi4py`)
+
+### Constants and Enums
+
+::: oshconnect.csapi4py.constants
+
+### Request Builder
+
+::: oshconnect.csapi4py.con_sys_api
+
+### API Helper
+
+::: oshconnect.csapi4py.default_api_helpers
+
+### MQTT Client
+
+::: oshconnect.csapi4py.mqtt
\ No newline at end of file
diff --git a/docs/markdown/architecture.md b/docs/markdown/architecture.md
new file mode 100644
index 0000000..98549f5
--- /dev/null
+++ b/docs/markdown/architecture.md
@@ -0,0 +1,93 @@
+# Architecture
+
+OSHConnect is structured around a small number of long-lived objects that mirror
+the resource hierarchy of the OGC API – Connected Systems specification.
+
+## Object hierarchy
+
+```mermaid
+graph TD
+ OSHConnect[OSHConnect
application entry point]
+ Node[Node
connection to one OSH server]
+ APIHelper[APIHelper
CS API HTTP requests]
+ Session[SessionManager
OSHClientSession instances]
+ MQTT[MQTTCommClient
paho-mqtt wrapper]
+ System[System
sensor system]
+ Datastream[Datastream
output channel — observations]
+ ControlStream[ControlStream
input channel — commands & status]
+
+ OSHConnect --> Node
+ Node --> APIHelper
+ Node --> Session
+ Node --> MQTT
+ Node --> System
+ System --> Datastream
+ System --> ControlStream
+```
+
+## Key abstractions
+
+- **`OSHConnect`** (`oshconnectapi.py`) — top-level class. Owns nodes and
+ provides `discover_systems()`, `discover_datastreams()`,
+ `save_config()` / `load_config()`, and `create_and_insert_system()`.
+- **`Node`** (`streamableresource.py`) — wraps a server connection. Drives
+ discovery via `APIHelper` and owns the `MQTTCommClient`. All HTTP resource
+ creation goes through here.
+- **`StreamableResource`** (`streamableresource.py`) — abstract base for
+ `System`, `Datastream`, and `ControlStream`. Manages MQTT
+ subscriptions/publications, WebSocket connections, and the inbound /
+ outbound message deques. Connection modes: `PUSH`, `PULL`, `BIDIRECTIONAL`.
+- **`Datastream` / `ControlStream`** (`streamableresource.py`) — concrete
+ streamable resources. Datastreams publish observations; ControlStreams
+ publish commands and receive status updates. Both follow CS API Part 3
+ topic conventions (`:data`, `:status`, `:commands`).
+- **`resource_datamodels.py`** — Pydantic models for the CS API resource types
+ (`SystemResource`, `DatastreamResource`, `ControlStreamResource`,
+ `ObservationResource`). These map directly to API request and response
+ bodies.
+- **`swe_components.py`** — Pydantic models for SWE Common schema components
+ (`DataRecordSchema`, `QuantitySchema`, `VectorSchema`, etc.). Used to define
+ observation and command schemas when creating new datastreams.
+- **`csapi4py/`** — sub-package that handles the CS API specifics: URL
+ construction (`endpoints.py`), request building (`con_sys_api.py`), enums
+ (`constants.py`), and MQTT topic conventions (`mqtt.py`).
+- **`EventHandler`** (`eventbus.py`) — singleton pub/sub bus. Listeners
+ subscribe to event types (e.g. `NEW_OBSERVATION`) and topic strings; events
+ are dispatched asynchronously through an internal queue.
+- **`timemanagement.py`** — `TimeInstant` (epoch / ISO-8601), `TimePeriod`,
+ `TemporalModes` (`REAL_TIME`, `ARCHIVE`, `BATCH`), and `TimeUtils`
+ conversions.
+
+## Typical data flow
+
+```mermaid
+sequenceDiagram
+ autonumber
+ participant App as OSHConnect
+ participant N as Node
+ participant H as APIHelper
+ participant S as Server
+ participant DS as Datastream
+
+ App->>N: add_node()
+ App->>N: discover_systems()
+ N->>H: retrieve_resource(SYSTEM)
+ H->>S: HTTP GET /systems
+ S-->>H: JSON
+ H-->>N: System objects
+ App->>DS: discover_datastreams()
+ DS->>DS: initialize() — open MQTT/WebSocket
+ DS->>DS: start() — begin streaming
+ S-->>DS: observations → _inbound_deque
+ Note over App,DS: To insert: resource.insert_self() →
APIHelper.create_resource() → POST →
server returns Location header with new ID
+```
+
+## Dependencies
+
+- **pydantic** — all resource and schema models. Bumping the minimum requires
+ confirming pre-built wheels exist for all supported Python versions
+ (3.12 – 3.14).
+- **shapely** — geometry handling for spatial resources.
+- **paho-mqtt** — MQTT streaming for CS API Part 3.
+- **websockets** / **aiohttp** — WebSocket and async HTTP streaming.
+- **requests** — synchronous HTTP for discovery and resource creation.
\ No newline at end of file
diff --git a/docs/markdown/index.md b/docs/markdown/index.md
new file mode 100644
index 0000000..09bbf67
--- /dev/null
+++ b/docs/markdown/index.md
@@ -0,0 +1,24 @@
+# OSHConnect-Python
+
+OSHConnect-Python is the Python member of the OSHConnect family of application
+libraries. It provides a simple, straightforward way to interact with
+OpenSensorHub (or any other OGC API – Connected Systems server).
+
+It supports Parts 1, 2, and 3 (Pub/Sub) of the OGC Connected Systems API,
+including:
+
+- System, Datastream, and ControlStream discovery and management
+- Real-time MQTT streaming using CS API Part 3 `:data` topic conventions
+- Resource event topic subscriptions (CloudEvents lifecycle notifications)
+- Batch retrieval and archival stream playback
+- Configuration persistence (JSON save/load)
+- SWE Common schema builders for defining datastream and command schemas
+
+All major classes and utilities are importable directly from `oshconnect`.
+Lower-level CS API utilities are available from `oshconnect.csapi4py`.
+
+## Where to next
+
+- [Architecture](architecture.md) — object hierarchy, data flow, and key abstractions
+- [Tutorial](tutorial.md) — common workflows for connecting, discovering, streaming, and inserting resources
+- [API Reference](api.md) — auto-generated reference for every public symbol
\ No newline at end of file
diff --git a/docs/markdown/tutorial.md b/docs/markdown/tutorial.md
new file mode 100644
index 0000000..6a4afa7
--- /dev/null
+++ b/docs/markdown/tutorial.md
@@ -0,0 +1,208 @@
+# Tutorial
+
+OSHConnect-Python is a library for interacting with OpenSensorHub through
+OGC API – Connected Systems. This tutorial walks through the most common
+workflows.
+
+## Installation
+
+Install with `uv` (recommended):
+
+```bash
+uv add git+https://github.com/Botts-Innovative-Research/OSHConnect-Python.git
+```
+
+Or with `pip`:
+
+```bash
+pip install git+https://github.com/Botts-Innovative-Research/OSHConnect-Python.git
+```
+
+All public classes and utilities can be imported directly from `oshconnect`:
+
+```python
+from oshconnect import OSHConnect, Node, System, Datastream, ControlStream
+from oshconnect import TimePeriod, TimeInstant, TemporalModes
+from oshconnect import DataRecordSchema, QuantitySchema, TimeSchema, TextSchema
+from oshconnect import ObservationFormat, DefaultEventTypes
+```
+
+## Creating an OSHConnect instance
+
+The main entry point is the `OSHConnect` class:
+
+```python
+from oshconnect import OSHConnect, TemporalModes
+
+app = OSHConnect(name='MyApp')
+```
+
+## Adding a Node
+
+A `Node` represents a connection to a single OSH server. The `OSHConnect`
+instance can manage multiple nodes simultaneously.
+
+```python
+from oshconnect import OSHConnect, Node
+
+app = OSHConnect(name='MyApp')
+node = Node(protocol='http', address='localhost', port=8585,
+ username='test', password='test')
+app.add_node(node)
+```
+
+To connect a node with MQTT support for streaming:
+
+```python
+node = Node(protocol='http', address='localhost', port=8585,
+ username='test', password='test',
+ enable_mqtt=True, mqtt_port=1883)
+app.add_node(node)
+```
+
+## Discovery
+
+Discover all systems available on all registered nodes:
+
+```python
+app.discover_systems()
+```
+
+Discover all datastreams across all discovered systems:
+
+```python
+app.discover_datastreams()
+```
+
+## Streaming observations (MQTT)
+
+Once a node is configured with MQTT and datastreams are discovered, start
+receiving observations by initializing and starting each datastream:
+
+```python
+from oshconnect import StreamableModes
+
+for ds in app.get_datastreams():
+ ds.set_connection_mode(StreamableModes.PULL)
+ ds.initialize()
+ ds.start()
+```
+
+Incoming messages are appended to each datastream's inbound deque:
+
+```python
+import time
+
+time.sleep(2) # allow messages to arrive
+for ds in app.get_datastreams():
+ while ds.get_inbound_deque():
+ msg = ds.get_inbound_deque().popleft()
+ print(msg)
+```
+
+## Resource event subscriptions
+
+Subscribe to resource lifecycle events (create / update / delete) using
+`subscribe_events()`. These arrive as CloudEvents v1.0 JSON payloads:
+
+```python
+def on_event(client, userdata, msg):
+ print(f"Event on {msg.topic}: {msg.payload}")
+
+for ds in app.get_datastreams():
+ topic = ds.subscribe_events(callback=on_event)
+ print(f"Subscribed to event topic: {topic}")
+```
+
+## Inserting a new System
+
+```python
+from oshconnect import OSHConnect, Node
+
+app = OSHConnect(name='MyApp')
+node = Node(protocol='http', address='localhost', port=8585,
+ username='admin', password='admin')
+app.add_node(node)
+
+new_system = app.create_and_insert_system(
+ system_opts={
+ 'name': 'Test System',
+ 'description': 'A test system',
+ 'uid': 'urn:system:test:001',
+ },
+ target_node=node
+)
+```
+
+## Inserting a new Datastream
+
+Build a schema using SWE Common component classes, then attach it to a system:
+
+```python
+from oshconnect import DataRecordSchema, TimeSchema, QuantitySchema, TextSchema
+from oshconnect.api_utils import URI, UCUMCode
+
+datarecord = DataRecordSchema(
+ label='Example Record',
+ description='Example datastream record',
+ definition='http://example.org/records/example',
+ fields=[]
+)
+
+# TimeSchema must be the first field for OSH
+datarecord.fields.append(
+ TimeSchema(label='Timestamp',
+ definition='http://www.opengis.net/def/property/OGC/0/SamplingTime',
+ name='timestamp',
+ uom=URI(href='http://www.opengis.net/def/uom/ISO-8601/0/Gregorian'))
+)
+datarecord.fields.append(
+ QuantitySchema(name='distance', label='Distance',
+ definition='http://example.org/Distance',
+ uom=UCUMCode(code='m', label='meters'))
+)
+datarecord.fields.append(
+ TextSchema(name='label', label='Label',
+ definition='http://example.org/Label')
+)
+
+datastream = new_system.add_insert_datastream(datarecord)
+```
+
+!!! note
+ A `TimeSchema` must be the first field in the `DataRecordSchema` when
+ targeting OpenSensorHub.
+
+## Inserting an Observation
+
+Once a datastream is registered, send observation data using
+`insert_observation_dict()`:
+
+```python
+from oshconnect import TimeInstant
+
+datastream.insert_observation_dict({
+ 'resultTime': TimeInstant.now_as_time_instant().get_iso_time(),
+ 'phenomenonTime': TimeInstant.now_as_time_instant().get_iso_time(),
+ 'result': {
+ 'timestamp': TimeInstant.now_as_time_instant().epoch_time,
+ 'distance': 1.0,
+ 'label': 'example observation',
+ }
+})
+```
+
+!!! note
+ The keys in `result` correspond to the `name` fields of each schema
+ component. `resultTime` and `phenomenonTime` are required by
+ OpenSensorHub.
+
+## Saving and loading configuration
+
+The OSHConnect state (nodes, systems, datastreams) can be persisted to a JSON
+file:
+
+```python
+app.save_config() # saves to a default file
+app = OSHConnect.load_config('my_config.json')
+```
\ No newline at end of file
diff --git a/docs/source/api.rst b/docs/source/api.rst
index 408ecbe..e9a101b 100644
--- a/docs/source/api.rst
+++ b/docs/source/api.rst
@@ -1,69 +1,117 @@
API Reference
=============
-OSHConnect
-----------
+All public symbols are re-exported from the top-level package and can be imported directly::
+ from oshconnect import OSHConnect, Node, Datastream, TimePeriod, ObservationFormat, ...
+
+Lower-level CS API utilities are available from the ``oshconnect.csapi4py`` subpackage::
+
+ from oshconnect.csapi4py import APIResourceTypes, MQTTCommClient, ConnectedSystemsRequestBuilder, ...
+
+---
+
+Core Application
+----------------
-OSHConnect Utilities and Helpers
---------------------------------
.. automodule:: oshconnect.oshconnectapi
:members:
:undoc-members:
:show-inheritance:
-OSH Connect Data Models
------------------------
-These are the second highest level pieces in the hierarchy of the library and the utilities needed to help almost
-everything else in the app function.
+---
+
+Streamable Resources
+--------------------
+These are the primary objects for interacting with systems, datastreams, and control streams on an OSH node.
+Includes ``Node``, ``System``, ``Datastream``, ``ControlStream``, and supporting enums.
+
+.. automodule:: oshconnect.streamableresource
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+---
+
+Resource Data Models
+--------------------
+Pydantic models that represent CS API resources returned from or sent to an OSH server.
-.. automodule:: oshconnect.osh_connect_datamodels
+.. automodule:: oshconnect.resource_datamodels
:members:
:undoc-members:
:show-inheritance:
+---
-DataSources and Messaging
--------------------------
-Due to their extreme importance in the library, the data sources are listed separately along with the classes that help
-manage them and their data.
+SWE Schema Components
+---------------------
+Builder classes for constructing datastream and command schemas using SWE Common data types.
-.. automodule:: oshconnect.datasource
+.. automodule:: oshconnect.swe_components
:members:
:undoc-members:
:show-inheritance:
+.. automodule:: oshconnect.schema_datamodels
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+---
+
+Event System
+------------
+Pub/sub event bus for in-process notifications. Implement ``IEventListener`` to receive events.
+
+.. automodule:: oshconnect.eventbus
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+---
+
Time Management
---------------
-Currently **WIP** but this module will contain the classes and functions that help manage the current time and other
-playback features of groups of datasources/datafeeds
.. automodule:: oshconnect.timemanagement
:members:
:undoc-members:
:show-inheritance:
-Styling
--------
-**WIP** This module contains the classes and functions that help manage the styling and visualization recommendations that
-the library provides.
+---
-Datastore
----------
-**WIP** This module is for managing the state of the app. The configurations files are intended to be interchgangale
-among all language versions of the OSHConnect ecosystem.
+CS API Integration (``csapi4py``)
+----------------------------------
-Core Data Models
-----------------
-Theses data models are not often intended to be used directly by the user, but are used by the library to help manage
-validation of data that flows to and from the API.
+Constants and Enums
+~~~~~~~~~~~~~~~~~~~
-.. automodule:: oshconnect.core_datamodels
+.. automodule:: oshconnect.csapi4py.constants
:members:
:undoc-members:
:show-inheritance:
+Request Builder
+~~~~~~~~~~~~~~~
+.. automodule:: oshconnect.csapi4py.con_sys_api
+ :members:
+ :undoc-members:
+ :show-inheritance:
-Helpers
-~~~~~~~
+API Helper
+~~~~~~~~~~
+
+.. automodule:: oshconnect.csapi4py.default_api_helpers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+MQTT Client
+~~~~~~~~~~~
+
+.. automodule:: oshconnect.csapi4py.mqtt
+ :members:
+ :undoc-members:
+ :show-inheritance:
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 91c8965..b018781 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -10,7 +10,7 @@
import sys
import traceback
-sys.path.insert(0, os.path.abspath("../.."))
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../src')))
def process_exception(app, what, name, obj, options, lines):
@@ -22,9 +22,9 @@ def setup(app):
project = 'OSHConnect-Python'
-copyright = '2024, Botts Innovative Research, Inc.'
+copyright = '2025, Botts Innovative Research, Inc.'
author = 'Ian Patterson'
-release = '0.2'
+release = '0.4'
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
diff --git a/docs/source/datasources.rst b/docs/source/datasources.rst
deleted file mode 100644
index 0e67027..0000000
--- a/docs/source/datasources.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-DataSources
-==============
diff --git a/docs/source/external.rst b/docs/source/external.rst
deleted file mode 100644
index 2b62158..0000000
--- a/docs/source/external.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-External Models
-===============
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 4727d43..380694c 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -3,17 +3,28 @@ Welcome to OSHConnect-Python's documentation!
OSHConnect-Python
=================
-OSHConnect-Python is the Python version of the OSHConnect family of application libraries inteded to provide a simple
-and straightforward way to interact with OpenSensorHub (or another CSAPI server) by way of OGC API - Connected Systems.
-It supports or will support at the time of a 1.0 release Part 1 and Part 2 of the Connected Systems api, as well as
-certain streaming features made possible by OpenSensorHub.
+OSHConnect-Python is the Python version of the OSHConnect family of application libraries intended to provide a
+simple and straightforward way to interact with OpenSensorHub (or another CS API server) by way of
+OGC API - Connected Systems.
+
+It supports Parts 1, 2, and 3 (Pub/Sub) of the OGC Connected Systems API, including:
+
+- System, Datastream, and ControlStream discovery and management
+- Real-time MQTT streaming with CS API Part 3 ``:data`` topic conventions
+- Resource event topic subscriptions (CloudEvents lifecycle notifications)
+- Batch retrieval and archival stream playback
+- Configuration persistence (JSON save/load)
+- SWE Common schema builders for defining datastream and command schemas
+
+All major classes and utilities are importable directly from ``oshconnect``.
+Lower-level CS API utilities are available from ``oshconnect.csapi4py``.
.. toctree::
:maxdepth: 2
:caption: Contents
- api
tutorial
+ api
@@ -22,6 +33,4 @@ Indices and tables
* :ref:`genindex`
* :ref:`modindex`
-* :ref:`search`
-
-
+* :ref:`search`
\ No newline at end of file
diff --git a/docs/source/insertion_tutorial.rst b/docs/source/insertion_tutorial.rst
deleted file mode 100644
index e69de29..0000000
diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst
index 43d1d73..7733825 100644
--- a/docs/source/tutorial.rst
+++ b/docs/source/tutorial.rst
@@ -1,179 +1,211 @@
-OSH Connect Tutorial
-====================
-OSH Connect for Python is a straightforward library for interacting with OpenSensorHub using OGC API Connected Systems.
-This tutorial will help guide you through a few simple examples to get you started with OSH Connect.
+OSHConnect-Python Tutorial
+==========================
+OSHConnect-Python is a library for interacting with OpenSensorHub using OGC API Connected Systems.
+This tutorial walks through the most common workflows.
-OSH Connect Installation
---------------------------
-OSH Connect can be installed using `pip`. To install the latest version of OSH Connect, run the following command:
+Installation
+------------
+Install using ``uv`` (recommended):
.. code-block:: bash
- pip install git+https://github.com/Botts-Innovative-Research/OSHConnect-Python.git
+ uv add git+https://github.com/Botts-Innovative-Research/OSHConnect-Python.git
-Or, if you prefer `poetry`:
+Or with ``pip``:
.. code-block:: bash
- poetry add git+https://github.com/Botts-Innovative-Research/OSHConnect-Python.git
+ pip install git+https://github.com/Botts-Innovative-Research/OSHConnect-Python.git
-Creating an instance of OSHConnect
----------------------------------------
-The intended method of interacting with OpenSensorHub is through the `OSHConnect` class.
-To this you must first create an instance of `OSHConnect`:
+All public classes and utilities can be imported directly from ``oshconnect``:
.. code-block:: python
- from oshconnect.oshconnectapi import OSHConnect, TemporalModes
+ from oshconnect import OSHConnect, Node, System, Datastream, ControlStream
+ from oshconnect import TimePeriod, TimeInstant, TemporalModes
+ from oshconnect import DataRecordSchema, QuantitySchema, TimeSchema, TextSchema
+ from oshconnect import ObservationFormat, DefaultEventTypes
+
+
+Creating an OSHConnect Instance
+--------------------------------
+The main entry point is the ``OSHConnect`` class:
+
+.. code-block:: python
- connect_app = OSHConnect(name='OSHConnect', playback_mode=TemporalModes.REAL_TIME)
+ from oshconnect import OSHConnect, TemporalModes
-.. tip::
+ app = OSHConnect(name='MyApp')
- The `name` parameter is optional, but can be useful for debugging purposes.
- The playback mode determines how the data is retrieved from OpenSensorHub.
-The next step is to add a `Node` to the `OSHConnect` instance. A `Node` is a representation of a server that you want to connect to.
-The OSHConnect instance can support multiple Nodes at once.
+Adding a Node
+-------------
+A ``Node`` represents a connection to a single OSH server.
+The ``OSHConnect`` instance can manage multiple nodes simultaneously.
-Adding a Node to an OSHConnect instance
------------------------------------------
.. code-block:: python
- from oshconnect.oshconnectapi import OSHConnect, TemporalModes
- from oshconnect.osh_connect_datamodels import Node
+ from oshconnect import OSHConnect, Node
- connect_app = OSHConnect(name='OSHConnect', playback_mode=TemporalModes.REAL_TIME)
- node = Node(protocol='http', address="localhost", port=8585, username="test", password="test")
- connect_app.add_node(node)
+ app = OSHConnect(name='MyApp')
+ node = Node(protocol='http', address='localhost', port=8585,
+ username='test', password='test')
+ app.add_node(node)
-System Discovery
------------------------------------------
-Once you have added a Node to the OSHConnect instance, you can discover the systems that are available on that Node.
-This is done by calling the `discover_systems()` method on the OSHConnect instance.
+To connect a node with MQTT support for streaming:
.. code-block:: python
- connect_app.discover_systems()
+ node = Node(protocol='http', address='localhost', port=8585,
+ username='test', password='test',
+ enable_mqtt=True, mqtt_port=1883)
+ app.add_node(node)
-Datastream Discovery
------------------------------------------
-Once you have discovered the systems that are available on a Node, you can discover the datastreams that are available to those
-systems. This is done by calling the `discover_datastreams` method on the OSHConnect instance.
+
+Discovery
+---------
+
+Discover all systems available on all registered nodes:
.. code-block:: python
- connect_app.discover_datastreams()
+ app.discover_systems()
-Playing back data
------------------------------------------
-Once you have discovered the datastreams that are available on a Node, you can play back the data from those datastreams.
-This is done by calling the `playback_streams` method on the OSHConnect instance.
+Discover all datastreams across all discovered systems:
.. code-block:: python
- connect_app.playback_streams()
+ app.discover_datastreams()
-Accessing data
------------------------------------------
-To access the data retrieved from the datastreams, you need to access the messages available to the OSHConnect instance.
-Calling the `get_messages` method on the OSHConnect instance will return a list of `MessageWrapper` objects that contain individual
-observations.
-.. code-block:: python
+Streaming Observations (MQTT)
+------------------------------
+Once a node is configured with MQTT and datastreams are discovered, start receiving
+observations by initializing and starting each datastream:
- messages = connect_app.get_messages()
+.. code-block:: python
- for message in messages:
- print(message)
+ from oshconnect import StreamableModes
- # or, to access the individual observations
- for message in messages:
- for observation in message.observations:
- do_something_with(observation)
+ for ds in app.get_datastreams():
+ ds.set_connection_mode(StreamableModes.PULL)
+ ds.initialize()
+ ds.start()
+Incoming messages are appended to each datastream's inbound deque:
-Resource Insertion
-=========================================
-Other use cases of the OSH Connect library may involve inserting new resources into OpenSensorHub or another Connected Systems API server.
+.. code-block:: python
-Adding and Inserting a New System
------------------------------------------
-The first major step in a common workflow is to add a new system to the OSH Connect instance.
-There are a couple of ways to do this, but the recommended method is as follows:
+ import time
-.. note::
+ time.sleep(2) # allow messages to arrive
+ for ds in app.get_datastreams():
+ while ds.get_inbound_deque():
+ msg = ds.get_inbound_deque().popleft()
+ print(msg)
- The `insert_system` method requires a `Node` object to be passed in as the second argument.
- Creating one is covered in an earlier section.
+Resource Event Subscriptions
+-----------------------------
+Subscribe to resource lifecycle events (create/update/delete) using
+``subscribe_events()``. These arrive as CloudEvents v1.0 JSON payloads:
.. code-block:: python
- from oshconnect.osh_connect_datamodels import System
+ def on_event(client, userdata, msg):
+ print(f"Event on {msg.topic}: {msg.payload}")
- new_system = app.insert_system(
- System(name="Test System", description="Test System Description", label="Test System",
- urn="urn:system:test"), node)
+ for ds in app.get_datastreams():
+ topic = ds.subscribe_events(callback=on_event)
+ print(f"Subscribed to event topic: {topic}")
-Adding and Inserting a New Datastream
------------------------------------------
-Once you have a `System` object, you can add a new datastream to it. This is one of the more complex operations
-in the library as the schema is very flexible by design. Luckily, the schemas are validated by the underlying data
-models, so you can be sure that your datastream is valid before inserting it.
-.. caution::
+Inserting a New System
+-----------------------
- Some implementations of the Connected Systems API may require additional fields to be filled in.
- OSH Connect is primarily focused on the OpenSensorHub implementation, but does not some of the fields that
- are required by and OpenSensorHub node.
+.. code-block:: python
+
+ from oshconnect import OSHConnect, Node
+
+ app = OSHConnect(name='MyApp')
+ node = Node(protocol='http', address='localhost', port=8585,
+ username='admin', password='admin')
+ app.add_node(node)
+
+ new_system = app.create_and_insert_system(
+ system_opts={
+ 'name': 'Test System',
+ 'description': 'A test system',
+ 'uid': 'urn:system:test:001',
+ },
+ target_node=node
+ )
-In this example, we will add a new datastream to the `new_system` object that we created in the previous example.
-You'll note the creation of a `DataRecordSchema` object, in OSH's implementation, a DataRecord is the root of all
-datastream schemas.
+
+Inserting a New Datastream
+--------------------------
+Build a schema using SWE Common component classes, then attach it to a system:
.. code-block:: python
- from oshconnect.osh_connect_datamodels import Datastream
-
- datarecord_schema = DataRecordSchema(label='Example Data Record', description='Example Data Record Description',
- definition='www.test.org/records/example-datarecord', fields=[])
- time_schema = TimeSchema(label="Timestamp", definition="http://test.com/Time", name="timestamp",
- uom=URI(href="http://test.com/TimeUOM"))
- continuous_value_field = QuantitySchema(name='continuous-value-distance', label='Continuous Value Distance',
- description='Continuous Value Description',
- definition='www.test.org/fields/continuous-value',
- uom=UCUMCode(code='m', label='meters'))
- example_text_field = TextSchema(name='example-text-field', label='Example Text Field', definition='www.test.org/fields/example-text-field')
- # add the fields to the datarecord schema, these can also be added added to the datarecord when it is created
- datarecord_schema.fields.append(time_schema) # TimeSchema is required to be the first field in the datarecord for OSH
- datarecord_schema.fields.append(continuous_value_field)
- datarecord_schema.fields.append(example_text_field)
- # Add the datastream to the system
- datastream = new_system.add_insert_datastream(datarecord_schema)
+ from oshconnect import DataRecordSchema, TimeSchema, QuantitySchema, TextSchema
+ from oshconnect.api_utils import URI, UCUMCode
+
+ datarecord = DataRecordSchema(
+ label='Example Record',
+ description='Example datastream record',
+ definition='http://example.org/records/example',
+ fields=[]
+ )
+
+ # TimeSchema must be the first field for OSH
+ datarecord.fields.append(
+ TimeSchema(label='Timestamp', definition='http://www.opengis.net/def/property/OGC/0/SamplingTime',
+ name='timestamp', uom=URI(href='http://www.opengis.net/def/uom/ISO-8601/0/Gregorian'))
+ )
+ datarecord.fields.append(
+ QuantitySchema(name='distance', label='Distance', definition='http://example.org/Distance',
+ uom=UCUMCode(code='m', label='meters'))
+ )
+ datarecord.fields.append(
+ TextSchema(name='label', label='Label', definition='http://example.org/Label')
+ )
+
+ datastream = new_system.add_insert_datastream(datarecord)
.. note::
- A TimeSchema is required to be the first field in the DataRecordSchema for OSH.
+ A ``TimeSchema`` must be the first field in the ``DataRecordSchema`` when targeting OpenSensorHub.
+
-Inserting an Observation into and OpenSensorHub Node
------------------------------------------------------
-Upon successfully adding a new datastream to a system, it is now possible to send observation data to the node.
+Inserting an Observation
+------------------------
+Once a datastream is registered, send observation data using ``insert_observation_dict()``:
.. code-block:: python
- datastream.insert_observation_dict({
- "resultTime": TimeInstant.now_as_time_instant().get_iso_time(), # resultTime is required for OSH
- "phenomenonTime": TimeInstant.now_as_time_instant().get_iso_time(), # phenomenonTime is required for OSH
- "result": {
- "timestamp": TimeInstant.now_as_time_instant().epoch_time,
- "continuous-value-distance": 1.0,
- "example-text-field": "Here is some text"
- }
- })
+ from oshconnect import TimeInstant
+
+ datastream.insert_observation_dict({
+ 'resultTime': TimeInstant.now_as_time_instant().get_iso_time(),
+ 'phenomenonTime': TimeInstant.now_as_time_instant().get_iso_time(),
+ 'result': {
+ 'timestamp': TimeInstant.now_as_time_instant().epoch_time,
+ 'distance': 1.0,
+ 'label': 'example observation',
+ }
+ })
.. note::
- The `resultTime` and `phenomenonTime` fields are required for OSH.
- The `result` field is representative of the schemas included in the DataRecordSchema's fields.
- You'll notice that they are referred to by their `name` field in the schema as it is the "machine" name
- of the output.
\ No newline at end of file
+ The keys in ``result`` correspond to the ``name`` fields of each schema component.
+ ``resultTime`` and ``phenomenonTime`` are required by OpenSensorHub.
+
+
+Saving and Loading Configuration
+---------------------------------
+The OSHConnect state (nodes, systems, datastreams) can be persisted to a JSON file:
+
+.. code-block:: python
+
+ app.save_config() # saves to a default file
+ app = OSHConnect.load_config('my_config.json')
\ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..6db1be9
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,71 @@
+site_name: OSHConnect-Python
+site_description: Python library for the OGC API – Connected Systems (Parts 1, 2, and 3 Pub/Sub)
+site_author: Ian Patterson
+repo_url: https://github.com/Botts-Innovative-Research/OSHConnect-Python
+edit_uri: ""
+
+docs_dir: docs/markdown
+site_dir: docs/build/html
+
+theme:
+ name: material
+ features:
+ - navigation.sections
+ - navigation.expand
+ - navigation.top
+ - content.code.copy
+ - toc.follow
+ palette:
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ primary: indigo
+ accent: indigo
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ primary: indigo
+ accent: indigo
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+
+plugins:
+ - search
+ - mkdocstrings:
+ default_handler: python
+ handlers:
+ python:
+ paths: [src]
+ options:
+ show_root_heading: true
+ show_source: false
+ show_signature_annotations: true
+ separate_signature: true
+ docstring_style: sphinx
+ members_order: source
+ filters: ["!^_"]
+ merge_init_into_class: true
+
+markdown_extensions:
+ - admonition
+ - attr_list
+ - md_in_html
+ - toc:
+ permalink: true
+ - pymdownx.highlight:
+ anchor_linenums: true
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.superfences:
+ custom_fences:
+ - name: mermaid
+ class: mermaid
+ format: !!python/name:pymdownx.superfences.fence_code_format
+
+nav:
+ - Home: index.md
+ - Architecture: architecture.md
+ - Tutorial: tutorial.md
+ - API Reference: api.md
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 3ffaf51..06ee198 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[project]
name = "oshconnect"
-version = "0.3.0a5.post1"
-description = "Library for interfacing with OSH, helping guide visualization efforts, and providing a place to store configurations."
+version = "0.5.0a0"
+description = "Library for interfacing with OSH, helping guide visualization efforts, and providing a place to store configurations. Implements OGC CS API Part 3 (Pub/Sub) MQTT topic conventions including :data topics and resource event topics."
readme = "README.md"
authors = [
{ name = "Ian Patterson", email = "ian@botts-inc.com" },
@@ -9,20 +9,25 @@ authors = [
requires-python = "<4.0,>=3.12"
dependencies = [
"paho-mqtt>=2.1.0",
- "pydantic>=2.7.4,<3.0.0",
- "shapely>=2.0.4,<3.0.0",
+ "pydantic>=2.12.5,<3.0.0",
+ "shapely>=2.1.2,<3.0.0",
"websockets>=12.0,<16.0",
"requests",
"aiohttp>=3.12.15",
]
-
-[dependency-groups]
+[project.optional-dependencies]
dev = [
"flake8>=7.2.0",
"pytest>=8.3.5",
"sphinx>=7.4.7",
"sphinx-rtd-theme>=2.0.0",
+ "mkdocs-material>=9.5.0",
+ "mkdocstrings[python]>=0.26.0",
]
+tinydb = ["tinydb>=4.8.0,<5.0.0"]
[tool.setuptools]
packages = {find = { where = ["src/"]}}
+
+[tool.pytest.ini_options]
+pythonpath = ["src"]
diff --git a/scripts/publish-local.py b/scripts/publish-local.py
new file mode 100755
index 0000000..8639445
--- /dev/null
+++ b/scripts/publish-local.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3
+# =============================================================================
+# publish-local.py — Build oshconnect and publish to the local PyPI server.
+#
+# One-command dev loop: edit code -> run this -> downstream picks up the new
+# version via `pip install --index-url http://localhost:8090/simple/ oshconnect`.
+#
+# The local pypiserver container must be running (started automatically below
+# via `docker compose up -d pypi` if it isn't). pypiserver is configured with
+# `-o` so re-uploading the same version overwrites — no version bump needed.
+#
+# Usage:
+# ./scripts/publish-local.py # build + upload
+# ./scripts/publish-local.py --no-build # upload existing wheel(s) in dist/
+# LOCAL_PYPI_URL=http://host:port ./scripts/publish-local.py # override URL
+# =============================================================================
+from __future__ import annotations
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import time
+import urllib.error
+import urllib.request
+from pathlib import Path
+
+PROJECT_ROOT = Path(__file__).resolve().parent.parent
+PYPI_URL = os.environ.get("LOCAL_PYPI_URL", "http://localhost:8090")
+
+CYAN = "\033[0;36m"
+GREEN = "\033[0;32m"
+RED = "\033[0;31m"
+NC = "\033[0m"
+
+
+def info(msg: str) -> None:
+ print(f"{CYAN}[INFO]{NC} {msg}")
+
+
+def ok(msg: str) -> None:
+ print(f"{GREEN}[OK]{NC} {msg}")
+
+
+def fail(msg: str, code: int = 1) -> None:
+ print(f"{RED}[FAIL]{NC} {msg}", file=sys.stderr)
+ sys.exit(code)
+
+
+def pypi_ready(url: str) -> bool:
+ """Return True iff the URL responds with a 2xx or 3xx status."""
+ try:
+ with urllib.request.urlopen(url, timeout=3) as resp:
+ return 200 <= resp.status < 400
+ except (urllib.error.URLError, urllib.error.HTTPError, TimeoutError, OSError):
+ return False
+
+
+def ensure_pypi(url: str) -> None:
+ info(f"Checking local PyPI at {url}")
+ if pypi_ready(url):
+ ok("Local PyPI is already running")
+ return
+
+ info("Local PyPI not running — starting container...")
+ res = subprocess.run(
+ ["docker", "compose", "up", "-d", "pypi"], cwd=PROJECT_ROOT
+ )
+ if res.returncode != 0:
+ fail("docker compose up failed")
+
+ for i in range(1, 11):
+ time.sleep(1)
+ if pypi_ready(url):
+ ok("Local PyPI started")
+ return
+ info(f" waiting... ({i}/10)")
+
+ fail("Could not start local PyPI")
+
+
+def build_wheel() -> None:
+ info("Building wheel...")
+ for sub in ("dist", "build"):
+ shutil.rmtree(PROJECT_ROOT / sub, ignore_errors=True)
+ for egg in (PROJECT_ROOT / "src").glob("*.egg-info"):
+ shutil.rmtree(egg, ignore_errors=True)
+
+ res = subprocess.run(["uv", "build"], cwd=PROJECT_ROOT)
+ if res.returncode != 0:
+ fail("uv build failed")
+
+
+def find_wheels() -> list[Path]:
+ return sorted((PROJECT_ROOT / "dist").glob("*.whl"))
+
+
+def publish(url: str, wheels: list[Path]) -> None:
+ # pypiserver runs with `-a . -P .` (auth disabled), but `uv publish`/
+ # pypiserver still issue a Basic-Auth challenge that triggers an
+ # interactive prompt. Pass empty credentials to satisfy it.
+ info(f"Uploading to {url}")
+ cmd = [
+ "uv", "publish",
+ "--publish-url", url,
+ "--username", "",
+ "--password", "",
+ *[str(w) for w in wheels],
+ ]
+ res = subprocess.run(cmd, cwd=PROJECT_ROOT)
+ if res.returncode != 0:
+ fail("uv publish failed")
+
+
+def main() -> int:
+ parser = argparse.ArgumentParser(
+ description="Build oshconnect and publish it to the local PyPI server.",
+ )
+ parser.add_argument(
+ "--no-build",
+ action="store_true",
+ help="Skip wheel build; upload whatever is in dist/.",
+ )
+ args = parser.parse_args()
+
+ info(f"Project root: {PROJECT_ROOT}")
+ ensure_pypi(PYPI_URL)
+
+ if not args.no_build:
+ build_wheel()
+
+ wheels = find_wheels()
+ if not wheels:
+ fail(
+ f"No wheel found in {PROJECT_ROOT}/dist/. "
+ "Build first or remove --no-build."
+ )
+
+ ok(f"Wheel(s): {' '.join(str(w.relative_to(PROJECT_ROOT)) for w in wheels)}")
+ publish(PYPI_URL, wheels)
+
+ ok("Published to local PyPI")
+ print()
+ print(f" Browse: {PYPI_URL}/simple/")
+ print(f" Install: pip install --index-url {PYPI_URL}/simple/ oshconnect")
+ print(f" uv: uv pip install --index-url {PYPI_URL}/simple/ oshconnect")
+ print(f" uv sync: uv sync (if pyproject.toml has [[tool.uv.index]] configured)")
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/publish-local.sh b/scripts/publish-local.sh
new file mode 100755
index 0000000..1413ad9
--- /dev/null
+++ b/scripts/publish-local.sh
@@ -0,0 +1,111 @@
+#!/usr/bin/env bash
+# =============================================================================
+# publish-local.sh — Build oshconnect and publish to the local PyPI server
+#
+# This is the one-command dev loop: edit code → run this → downstream picks
+# up the new version via `pip install --index-url http://localhost:8090/simple/`.
+#
+# The local pypiserver container must be running (`docker compose up -d`).
+# The --overwrite flag on the server allows re-uploading the same version,
+# so you don't need to bump the version for every dev iteration.
+#
+# Usage:
+# ./scripts/publish-local.sh # build + upload
+# ./scripts/publish-local.sh --no-build # upload existing wheel(s) in dist/
+# =============================================================================
+
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
+
+PYPI_URL="${LOCAL_PYPI_URL:-http://localhost:8090}"
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+CYAN='\033[0;36m'
+NC='\033[0m'
+
+info() { echo -e "${CYAN}[INFO]${NC} $*"; }
+ok() { echo -e "${GREEN}[OK]${NC} $*"; }
+fail() { echo -e "${RED}[FAIL]${NC} $*"; exit 1; }
+
+info "Project root: ${PROJECT_ROOT}"
+
+# ── Parse args ──────────────────────────────────────────────────────────────
+SKIP_BUILD=false
+for arg in "$@"; do
+ case "$arg" in
+ --no-build) SKIP_BUILD=true ;;
+ --help|-h)
+ echo "Usage: $0 [--no-build]"
+ echo " --no-build Skip wheel build, upload whatever is in dist/"
+ exit 0
+ ;;
+ esac
+done
+
+# ── Ensure local PyPI is running ────────────────────────────────────────────
+pypi_ready() {
+ # Just check for any HTTP response (2xx or 3xx) — an empty index still returns 200
+ local code
+ code=$(curl -s -o /dev/null -w "%{http_code}" --max-time 3 "${PYPI_URL}/" 2>/dev/null) || return 1
+ [ "$code" -ge 200 ] && [ "$code" -lt 400 ]
+}
+
+info "Checking local PyPI at ${PYPI_URL}"
+if pypi_ready; then
+ ok "Local PyPI is already running"
+else
+ info "Local PyPI not running — starting container..."
+ cd "${PROJECT_ROOT}"
+ docker compose up -d pypi
+
+ READY=false
+ for i in $(seq 1 10); do
+ sleep 1
+ if pypi_ready; then
+ READY=true
+ break
+ fi
+ info " waiting... (${i}/10)"
+ done
+
+ if [ "$READY" = false ]; then
+ fail "Could not start local PyPI"
+ fi
+ ok "Local PyPI started"
+fi
+
+# ── Build ───────────────────────────────────────────────────────────────────
+cd "${PROJECT_ROOT}"
+info "Working directory: $(pwd)"
+
+if [ "$SKIP_BUILD" = false ]; then
+ info "Building wheel..."
+ rm -rf dist/ build/ src/*.egg-info
+
+ uv build || fail "uv build failed"
+fi
+
+WHEELS=(dist/*.whl)
+if [ ${#WHEELS[@]} -eq 0 ] || [ ! -f "${WHEELS[0]}" ]; then
+ fail "No wheel found in ${PROJECT_ROOT}/dist/. Build first or remove --no-build."
+fi
+
+ok "Wheel(s): ${WHEELS[*]}"
+
+# ── Upload ──────────────────────────────────────────────────────────────────
+# pypiserver runs with `-a . -P .` (auth disabled), but `uv publish` still
+# prompts for credentials when none are configured. Pass empty values via
+# flags to skip the prompt and run non-interactively.
+info "Uploading to ${PYPI_URL}"
+uv publish --publish-url "${PYPI_URL}" --username "" --password "" dist/*.whl \
+ || fail "uv publish failed"
+
+ok "Published to local PyPI"
+echo ""
+echo " Browse: ${PYPI_URL}/simple/"
+echo " Install: pip install --index-url ${PYPI_URL}/simple/ oshconnect"
+echo " uv: uv pip install --index-url ${PYPI_URL}/simple/ oshconnect"
+echo " uv sync: uv sync (if pyproject.toml has [[tool.uv.index]] configured)"
diff --git a/src/oshconnect/__init__.py b/src/oshconnect/__init__.py
index c4d111c..c1a2287 100644
--- a/src/oshconnect/__init__.py
+++ b/src/oshconnect/__init__.py
@@ -5,5 +5,92 @@
# Contact Email: ian@botts-inc.com
# ==============================================================================
+# Core resources
from .oshconnectapi import OSHConnect
-from .streamableresource import System, Node, Datastream, ControlStream
+from .streamableresource import Node, System, Datastream, ControlStream, StreamableModes, Status
+
+# Time management
+from .timemanagement import TimePeriod, TimeInstant, TemporalModes, TimeUtils
+
+# Resource data models
+from .resource_datamodels import (
+ SystemResource,
+ DatastreamResource,
+ ControlStreamResource,
+ ObservationResource,
+)
+
+# SWE schema components
+from .swe_components import (
+ DataRecordSchema,
+ VectorSchema,
+ QuantitySchema,
+ TimeSchema,
+ BooleanSchema,
+ CountSchema,
+ CategorySchema,
+ TextSchema,
+ QuantityRangeSchema,
+ TimeRangeSchema,
+)
+from .schema_datamodels import SWEDatastreamRecordSchema, JSONDatastreamRecordSchema, JSONCommandSchema
+
+# Event system
+from .events import EventHandler, IEventListener, CallbackListener, DefaultEventTypes, AtomicEventTypes, Event, EventBuilder
+
+# DataStore
+from .datastore import DataStore
+from .datastores import SQLiteDataStore
+
+# CS API constants
+from .csapi4py.constants import ObservationFormat, APIResourceTypes, ContentTypes
+
+__all__ = [
+ # Core resources
+ "OSHConnect",
+ "Node",
+ "System",
+ "Datastream",
+ "ControlStream",
+ "StreamableModes",
+ "Status",
+ # Time management
+ "TimePeriod",
+ "TimeInstant",
+ "TemporalModes",
+ "TimeUtils",
+ # Resource data models
+ "SystemResource",
+ "DatastreamResource",
+ "ControlStreamResource",
+ "ObservationResource",
+ # SWE schema components
+ "DataRecordSchema",
+ "VectorSchema",
+ "QuantitySchema",
+ "TimeSchema",
+ "BooleanSchema",
+ "CountSchema",
+ "CategorySchema",
+ "TextSchema",
+ "QuantityRangeSchema",
+ "TimeRangeSchema",
+ "SWEDatastreamRecordSchema",
+ "JSONDatastreamRecordSchema",
+ "JSONCommandSchema",
+ # Event system
+ "EventHandler",
+ "IEventListener",
+ "CallbackListener",
+ "DefaultEventTypes",
+ "AtomicEventTypes",
+ "Event",
+ "EventBuilder",
+ # CS API constants
+ "ObservationFormat",
+ "APIResourceTypes",
+ "ContentTypes",
+ # DataStore
+ "DataStore",
+ "SQLiteDataStore",
+]
diff --git a/src/oshconnect/csapi4py/__init__.py b/src/oshconnect/csapi4py/__init__.py
index e69de29..7cb640f 100644
--- a/src/oshconnect/csapi4py/__init__.py
+++ b/src/oshconnect/csapi4py/__init__.py
@@ -0,0 +1,22 @@
+# CS API integration layer — public re-exports for power-user access
+
+from .constants import APIResourceTypes, ObservationFormat, ContentTypes, APITerms, SystemTypes
+from .con_sys_api import ConnectedSystemsRequestBuilder, ConnectedSystemAPIRequest
+from .mqtt import MQTTCommClient
+from .default_api_helpers import APIHelper
+
+__all__ = [
+ # Constants / enums
+ "APIResourceTypes",
+ "ObservationFormat",
+ "ContentTypes",
+ "APITerms",
+ "SystemTypes",
+ # Request builder
+ "ConnectedSystemsRequestBuilder",
+ "ConnectedSystemAPIRequest",
+ # MQTT client
+ "MQTTCommClient",
+ # API helper
+ "APIHelper",
+]
diff --git a/src/oshconnect/csapi4py/default_api_helpers.py b/src/oshconnect/csapi4py/default_api_helpers.py
index 4ecce4b..f0d7e30 100644
--- a/src/oshconnect/csapi4py/default_api_helpers.py
+++ b/src/oshconnect/csapi4py/default_api_helpers.py
@@ -92,10 +92,19 @@ class APIHelper(ABC):
protocol: str = "https"
server_root: str = "sensorhub"
api_root: str = "api"
+ mqtt_topic_root: str = None
username: str = None
password: str = None
user_auth: bool = False
+ def get_mqtt_root(self) -> str:
+ """
+ Returns the root path segment used when building MQTT topic strings.
+ Defaults to ``api_root`` when ``mqtt_topic_root`` has not been set explicitly,
+ so existing callers that only configure ``api_root`` are unaffected.
+ """
+ return self.mqtt_topic_root if self.mqtt_topic_root is not None else self.api_root
+
def create_resource(self, res_type: APIResourceTypes, json_data: any, parent_res_id: str = None,
from_collection: bool = False, url_endpoint: str = None, req_headers: dict = None):
"""
@@ -284,25 +293,27 @@ def set_protocol(self, protocol: str):
self.protocol = protocol
# TODO: add validity checking for resource type combinations
- def get_mqtt_topic(self, resource_type, subresource_type, resource_id: str, subresource_id: str = None):
+ def get_mqtt_topic(self, resource_type, subresource_type, resource_id: str, subresource_id: str = None,
+ data_topic: bool = True):
"""
Returns the MQTT topic for the resource type, does not check for validity of the resource type combination
- :param resource_type : The API resource type of the resource that comes first in the URL, cannot be None
+ :param resource_type: The API resource type of the resource that comes first in the URL, cannot be None
:param subresource_type: The API resource type of the sub-resource that comes second in the URL, optional if there
is no sub-resource.
:param resource_id: The ID of the primary resource, can be none if the request is being made for all resources of
the given type.
:param subresource_id: The ID of the sub-resource, can be none if the request is being made for all sub-resources of
the given type.
+ :param data_topic: If True (default), appends ':data' to the subresource collection endpoint per CS API Part 3
+ spec for Resource Data Topics. Set to False for Resource Event Topics (no suffix).
:return:
"""
+ data_suffix = ':data' if data_topic else ''
subresource_endpoint = f'/{resource_type_to_endpoint(subresource_type)}'
resource_endpoint = "" if resource_type is None else f'/{resource_type_to_endpoint(resource_type)}'
resource_ident = "" if resource_id is None else f'/{resource_id}'
subresource_ident = "" if subresource_id is None else f'/{subresource_id}'
- topic_locator = f'/{self.api_root}{resource_endpoint}{resource_ident}{subresource_endpoint}{subresource_ident}'
- print(f'MQTT Topic: {topic_locator}')
-
+ topic_locator = f'{self.get_mqtt_root()}{resource_endpoint}{resource_ident}{subresource_endpoint}{data_suffix}{subresource_ident}'
return topic_locator
diff --git a/src/oshconnect/csapi4py/mqtt.py b/src/oshconnect/csapi4py/mqtt.py
index 5fb95d0..69f2bd0 100644
--- a/src/oshconnect/csapi4py/mqtt.py
+++ b/src/oshconnect/csapi4py/mqtt.py
@@ -1,8 +1,12 @@
+import logging
import paho.mqtt.client as mqtt
+logger = logging.getLogger(__name__)
+
class MQTTCommClient:
- def __init__(self, url, port=1883, username=None, password=None, path='mqtt', client_id_suffix="", transport='tcp'):
+ def __init__(self, url, port=1883, username=None, password=None, path='mqtt', client_id_suffix="",
+ transport='tcp', use_tls=False, reconnect_delay=5):
"""
Wraps a paho mqtt client to provide a simple interface for interacting with the mqtt server that is customized
for this library.
@@ -13,59 +17,78 @@ def __init__(self, url, port=1883, username=None, password=None, path='mqtt', cl
:param username: used if node is requiring authentication to access this service
:param password: used if node is requiring authentication to access this service
:param path: used for setting the path when using websockets (usually sensorhub/mqtt by default)
+ :param transport: 'tcp' (default) or 'websockets'
+ :param use_tls: explicitly enable TLS; when False (default), credentials are sent without TLS
+ :param reconnect_delay: seconds between automatic reconnect attempts on disconnect (0 disables)
"""
self.__url = url
self.__port = port
self.__path = path
self.__client_id = f'oscapy_mqtt-{client_id_suffix}'
self.__transport = transport
+ self.__reconnect_delay = reconnect_delay
- self.__client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2, client_id=self.__client_id)
+ self.__client = mqtt.Client(
+ mqtt.CallbackAPIVersion.VERSION2,
+ client_id=self.__client_id,
+ transport=self.__transport,
+ )
if self.__transport == 'websockets':
self.__client.ws_set_options(path=self.__path)
if username is not None and password is not None:
self.__client.username_pw_set(username, password)
+
+ if use_tls:
self.__client.tls_set(tls_version=mqtt.ssl.PROTOCOL_TLSv1_2)
- self.__client.on_connect = self.on_connect
- self.__client.on_subscribe = self.on_subscribe
- self.__client.on_message = self.on_message
- self.__client.on_publish = self.on_publish
- self.__client.on_log = self.on_log
- self.__client.on_disconnect = self.on_disconnect
+ self.__client.on_connect = self._on_connect
+ self.__client.on_subscribe = self._on_subscribe
+ self.__client.on_message = self._on_message
+ self.__client.on_publish = self._on_publish
+ self.__client.on_log = self._on_log
+ self.__client.on_disconnect = self._on_disconnect
self.__is_connected = False
- @staticmethod
- def on_connect(client, userdata, flags, rc, properties):
- print(f'Connected with result code: {rc}')
- print(f'{properties}')
+ def _on_connect(self, client, userdata, flags, rc, properties):
+ if rc == mqtt.MQTT_ERR_SUCCESS:
+ self.__is_connected = True
+ logger.info('MQTT connected to %s:%s (rc=%s)', self.__url, self.__port, rc)
+ else:
+ self.__is_connected = False
+ logger.error('MQTT connection failed: rc=%s (%s)', rc, mqtt.error_string(rc))
- @staticmethod
- def on_subscribe(client, userdata, mid, granted_qos, properties):
- print(f'Subscribed: {mid} {granted_qos}')
+ def _on_subscribe(self, client, userdata, mid, granted_qos, properties):
+ logger.debug('MQTT subscribed: mid=%s granted_qos=%s', mid, granted_qos)
- @staticmethod
- def on_message(client, userdata, msg):
- print(f'{msg.payload.decode("utf-8")}')
+ def _on_message(self, client, userdata, msg):
+ logger.debug('MQTT message on %s: %s bytes', msg.topic, len(msg.payload))
- @staticmethod
- def on_publish(client, userdata, mid, info, properties):
- print(f'Published: {mid}')
+ def _on_publish(self, client, userdata, mid, info, properties):
+ logger.debug('MQTT published: mid=%s', mid)
- @staticmethod
- def on_log(client, userdata, level, buf):
- print(f'Log: {buf}')
+ def _on_log(self, client, userdata, level, buf):
+ logger.debug('MQTT paho: %s', buf)
- @staticmethod
- def on_disconnect(client, userdata, dc_flag, rc, properties):
- print(f'Client {client} disconnected: {dc_flag} {rc}')
+ def _on_disconnect(self, client, userdata, dc_flag, rc, properties):
+ self.__is_connected = False
+ if rc == mqtt.MQTT_ERR_SUCCESS:
+ logger.info('MQTT disconnected cleanly from %s:%s', self.__url, self.__port)
+ else:
+ logger.warning('MQTT unexpected disconnect from %s:%s rc=%s (%s) — will attempt reconnect',
+ self.__url, self.__port, rc, mqtt.error_string(rc))
def connect(self, keepalive=60):
- # print(f'Connecting to {self.__url}:{self.__port}')
- self.__client.connect(self.__url, self.__port, keepalive=keepalive)
+ logger.info('MQTT connecting to %s:%s', self.__url, self.__port)
+ try:
+ self.__client.connect(self.__url, self.__port, keepalive=keepalive)
+ if self.__reconnect_delay > 0:
+ self.__client.reconnect_delay_set(min_delay=1, max_delay=self.__reconnect_delay)
+ except Exception as exc:
+ logger.error('MQTT connect failed: %s', exc)
+ raise
def subscribe(self, topic, qos=0, msg_callback=None):
"""
@@ -77,15 +100,23 @@ def subscribe(self, topic, qos=0, msg_callback=None):
:param msg_callback: callback with the form: callback(client, userdata, msg)
:return:
"""
+ if not self.__is_connected:
+ logger.warning('MQTT subscribe called on %s while not connected — message will be queued by paho', topic)
self.__client.subscribe(topic, qos)
if msg_callback is not None:
self.__client.message_callback_add(topic, msg_callback)
+ logger.debug('MQTT subscribed to topic: %s (qos=%s)', topic, qos)
def publish(self, topic, payload=None, qos=0, retain=False):
- self.__client.publish(topic, payload, qos, retain=retain)
+ if not self.__is_connected:
+ logger.warning('MQTT publish called on %s while not connected — message may be lost', topic)
+ result = self.__client.publish(topic, payload, qos, retain=retain)
+ if result.rc != mqtt.MQTT_ERR_SUCCESS:
+ logger.error('MQTT publish error on %s: rc=%s (%s)', topic, result.rc, mqtt.error_string(result.rc))
def unsubscribe(self, topic):
self.__client.unsubscribe(topic)
+ logger.debug('MQTT unsubscribed from topic: %s', topic)
def disconnect(self):
self.__client.disconnect()
@@ -165,7 +196,7 @@ def set_on_message_callback(self, sub, on_message_callback):
def start(self):
"""
- Start the MQTT client in a separate thread. This is required for the client to be able to receive messages.
+ Start the MQTT client network loop in a background thread.
:return:
"""
@@ -173,25 +204,14 @@ def start(self):
def stop(self):
"""
- Stop the MQTT client.
+ Stop the MQTT client network loop and disconnect cleanly.
:return:
"""
self.__client.loop_stop()
- def __toggle_is_connected(self):
- self.__is_connected = not self.__is_connected
-
def is_connected(self):
return self.__is_connected
- @staticmethod
- def publish_single(self, topic, msg):
- self.__client.single(topic, msg, 0)
-
- @staticmethod
- def publish_multiple(self, topic, msgs):
- self.__client.multiple(msgs, )
-
def tls_set(self):
self.__client.tls_set()
diff --git a/src/oshconnect/datastore.py b/src/oshconnect/datastore.py
index ecbc38c..9a310f7 100644
--- a/src/oshconnect/datastore.py
+++ b/src/oshconnect/datastore.py
@@ -5,6 +5,142 @@
# Contact Email: ian@botts-inc.com
# ==============================================================================
-class DataStore:
- def __init__(self):
- pass
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING, Optional
+
+if TYPE_CHECKING:
+ from .streamableresource import Node, System, Datastream, ControlStream, SessionManager
+
+
+class DataStore(ABC):
+ """Abstract interface for persisting OSHConnect resource graphs.
+
+ Implementations must provide CRUD operations for Node, System, Datastream,
+ and ControlStream objects. Observations are out of scope.
+
+ The ``load_all`` / ``load_node`` / ``load_all_nodes`` methods accept an
+ optional *session_manager* so that deserialized Nodes can register a client
+ session — required because ``StreamableResource.__init__`` calls
+ ``node.register_streamable()``, which needs an active session.
+ """
+
+ # ------------------------------------------------------------------
+ # Node
+ # ------------------------------------------------------------------
+
+ @abstractmethod
+ def save_node(self, node: Node) -> None:
+ """Persist a Node (upsert semantics)."""
+ ...
+
+ @abstractmethod
+ def load_node(self, node_id: str, session_manager: SessionManager = None) -> Optional[Node]:
+ """Load a single Node by its string ID. Returns None if not found."""
+ ...
+
+ @abstractmethod
+ def load_all_nodes(self, session_manager: SessionManager = None) -> list[Node]:
+ """Load all stored Nodes."""
+ ...
+
+ @abstractmethod
+ def delete_node(self, node_id: str) -> None:
+ """Delete a Node row. Does NOT cascade-delete child resources."""
+ ...
+
+ # ------------------------------------------------------------------
+ # System
+ # ------------------------------------------------------------------
+
+ @abstractmethod
+ def save_system(self, system: System, node: Node) -> None:
+ """Persist a System under the given Node (upsert)."""
+ ...
+
+ @abstractmethod
+ def load_system(self, system_id: str, node: Node) -> Optional[System]:
+ """Load a single System by its internal UUID string. Returns None if not found."""
+ ...
+
+ @abstractmethod
+ def load_systems_for_node(self, node_id: str, node: Node) -> list[System]:
+ """Load all Systems stored under *node_id*."""
+ ...
+
+ @abstractmethod
+ def delete_system(self, system_id: str) -> None:
+ """Delete a System row."""
+ ...
+
+ # ------------------------------------------------------------------
+ # Datastream
+ # ------------------------------------------------------------------
+
+ @abstractmethod
+ def save_datastream(self, datastream: Datastream, node: Node) -> None:
+ """Persist a Datastream (upsert)."""
+ ...
+
+ @abstractmethod
+ def load_datastream(self, datastream_id: str, node: Node) -> Optional[Datastream]:
+ """Load a single Datastream by its internal UUID string."""
+ ...
+
+ @abstractmethod
+ def load_datastreams_for_system(self, system_id: str, node: Node) -> list[Datastream]:
+ """Load all Datastreams whose *parent_resource_id* matches *system_id*."""
+ ...
+
+ @abstractmethod
+ def delete_datastream(self, datastream_id: str) -> None:
+ """Delete a Datastream row."""
+ ...
+
+ # ------------------------------------------------------------------
+ # ControlStream
+ # ------------------------------------------------------------------
+
+ @abstractmethod
+ def save_controlstream(self, controlstream: ControlStream, node: Node) -> None:
+ """Persist a ControlStream (upsert)."""
+ ...
+
+ @abstractmethod
+ def load_controlstream(self, controlstream_id: str, node: Node) -> Optional[ControlStream]:
+ """Load a single ControlStream by its internal UUID string."""
+ ...
+
+ @abstractmethod
+ def load_controlstreams_for_system(self, system_id: str, node: Node) -> list[ControlStream]:
+ """Load all ControlStreams whose *parent_resource_id* matches *system_id*."""
+ ...
+
+ @abstractmethod
+ def delete_controlstream(self, controlstream_id: str) -> None:
+ """Delete a ControlStream row."""
+ ...
+
+ # ------------------------------------------------------------------
+ # Bulk operations
+ # ------------------------------------------------------------------
+
+ @abstractmethod
+ def save_all(self, nodes: list[Node]) -> None:
+ """Persist an entire Node graph (nodes + their systems + streams)."""
+ ...
+
+ @abstractmethod
+ def load_all(self, session_manager: SessionManager = None) -> list[Node]:
+ """Reconstruct the full graph from storage, returning top-level Nodes.
+
+ Pass *session_manager* so reconstructed Nodes can register a client
+ session — required for their child resources to initialise correctly.
+ """
+ ...
+
+ @abstractmethod
+ def close(self) -> None:
+ """Release any held resources (file handles, connections)."""
+ ...
diff --git a/src/oshconnect/datastores/__init__.py b/src/oshconnect/datastores/__init__.py
new file mode 100644
index 0000000..66318b0
--- /dev/null
+++ b/src/oshconnect/datastores/__init__.py
@@ -0,0 +1,7 @@
+# ==============================================================================
+# Copyright (c) 2024 Botts Innovative Research, Inc.
+# ==============================================================================
+
+from .sqlite_store import SQLiteDataStore
+
+__all__ = ["SQLiteDataStore"]
diff --git a/src/oshconnect/datastores/sqlite_store.py b/src/oshconnect/datastores/sqlite_store.py
new file mode 100644
index 0000000..6062bb8
--- /dev/null
+++ b/src/oshconnect/datastores/sqlite_store.py
@@ -0,0 +1,254 @@
+# ==============================================================================
+# Copyright (c) 2024 Botts Innovative Research, Inc.
+# Date: 2024/5/28
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# ==============================================================================
+
+from __future__ import annotations
+
+import json
+import sqlite3
+from pathlib import Path
+from typing import Optional
+
+from ..datastore import DataStore
+from ..streamableresource import (
+ ControlStream,
+ Datastream,
+ Node,
+ SessionManager,
+ System,
+)
+
+
+class SQLiteDataStore(DataStore):
+ """SQLite-backed DataStore implementation using Python's stdlib ``sqlite3``.
+
+ Pass ``db_path=":memory:"`` for in-process testing with no file I/O.
+
+ Schema notes
+ ------------
+ Each resource type is stored as a single JSON blob (the output of its
+ ``serialize()`` method) alongside a primary-key string ID and any foreign-key
+ columns needed for filtered lookups. Using blobs means new Pydantic fields
+ do not require schema migrations.
+
+ *Bulk operations* (``save_all`` / ``load_all``) work at the Node level:
+ ``save_all`` persists every resource separately for individual lookups;
+ ``load_all`` reconstructs the full hierarchy from the *nodes* table only
+ (``Node.deserialize`` handles the embedded systems/streams), avoiding
+ duplication.
+ """
+
+ def __init__(self, db_path: str | Path = "oshconnect.db") -> None:
+ self._db_path = Path(db_path) if db_path != ":memory:" else db_path
+ self._conn: sqlite3.Connection = sqlite3.connect(
+ str(self._db_path), check_same_thread=False
+ )
+ self._conn.row_factory = sqlite3.Row
+ self._create_tables()
+
+ # ------------------------------------------------------------------
+ # Internal helpers
+ # ------------------------------------------------------------------
+
+ def _create_tables(self) -> None:
+ self._conn.executescript("""
+ CREATE TABLE IF NOT EXISTS nodes (
+ id TEXT PRIMARY KEY,
+ data TEXT NOT NULL
+ );
+ CREATE TABLE IF NOT EXISTS systems (
+ id TEXT PRIMARY KEY,
+ node_id TEXT NOT NULL,
+ data TEXT NOT NULL
+ );
+ CREATE TABLE IF NOT EXISTS datastreams (
+ id TEXT PRIMARY KEY,
+ system_id TEXT,
+ node_id TEXT NOT NULL,
+ data TEXT NOT NULL
+ );
+ CREATE TABLE IF NOT EXISTS controlstreams (
+ id TEXT PRIMARY KEY,
+ system_id TEXT,
+ node_id TEXT NOT NULL,
+ data TEXT NOT NULL
+ );
+ """)
+ self._conn.commit()
+
+ def _execute(self, sql: str, params: tuple = ()) -> sqlite3.Cursor:
+ return self._conn.execute(sql, params)
+
+ # ------------------------------------------------------------------
+ # Node
+ # ------------------------------------------------------------------
+
+ def save_node(self, node: Node) -> None:
+ data = json.dumps(node.serialize())
+ self._execute(
+ "INSERT OR REPLACE INTO nodes (id, data) VALUES (?, ?)",
+ (node.get_id(), data),
+ )
+ self._conn.commit()
+
+ def load_node(
+ self, node_id: str, session_manager: Optional[SessionManager] = None
+ ) -> Optional[Node]:
+ row = self._execute(
+ "SELECT data FROM nodes WHERE id = ?", (node_id,)
+ ).fetchone()
+ if row is None:
+ return None
+ return Node.deserialize(json.loads(row["data"]), session_manager=session_manager)
+
+ def load_all_nodes(
+ self, session_manager: Optional[SessionManager] = None
+ ) -> list[Node]:
+ rows = self._execute("SELECT data FROM nodes").fetchall()
+ return [
+ Node.deserialize(json.loads(r["data"]), session_manager=session_manager)
+ for r in rows
+ ]
+
+ def delete_node(self, node_id: str) -> None:
+ self._execute("DELETE FROM nodes WHERE id = ?", (node_id,))
+ self._conn.commit()
+
+ # ------------------------------------------------------------------
+ # System
+ # ------------------------------------------------------------------
+
+ def save_system(self, system: System, node: Node) -> None:
+ system_id = str(system.get_internal_id())
+ data = json.dumps(system.serialize())
+ self._execute(
+ "INSERT OR REPLACE INTO systems (id, node_id, data) VALUES (?, ?, ?)",
+ (system_id, node.get_id(), data),
+ )
+ self._conn.commit()
+
+ def load_system(self, system_id: str, node: Node) -> Optional[System]:
+ row = self._execute(
+ "SELECT data FROM systems WHERE id = ?", (system_id,)
+ ).fetchone()
+ if row is None:
+ return None
+ return System.deserialize(json.loads(row["data"]), node)
+
+ def load_systems_for_node(self, node_id: str, node: Node) -> list[System]:
+ rows = self._execute(
+ "SELECT data FROM systems WHERE node_id = ?", (node_id,)
+ ).fetchall()
+ return [System.deserialize(json.loads(r["data"]), node) for r in rows]
+
+ def delete_system(self, system_id: str) -> None:
+ self._execute("DELETE FROM systems WHERE id = ?", (system_id,))
+ self._conn.commit()
+
+ # ------------------------------------------------------------------
+ # Datastream
+ # ------------------------------------------------------------------
+
+ def save_datastream(self, datastream: Datastream, node: Node) -> None:
+ ds_id = str(datastream.get_internal_id())
+ system_id = datastream.get_parent_resource_id()
+ data = json.dumps(datastream.serialize())
+ self._execute(
+ "INSERT OR REPLACE INTO datastreams (id, system_id, node_id, data) VALUES (?, ?, ?, ?)",
+ (ds_id, system_id, node.get_id(), data),
+ )
+ self._conn.commit()
+
+ def load_datastream(self, datastream_id: str, node: Node) -> Optional[Datastream]:
+ row = self._execute(
+ "SELECT data FROM datastreams WHERE id = ?", (datastream_id,)
+ ).fetchone()
+ if row is None:
+ return None
+ return Datastream.deserialize(json.loads(row["data"]), node)
+
+ def load_datastreams_for_system(self, system_id: str, node: Node) -> list[Datastream]:
+ rows = self._execute(
+ "SELECT data FROM datastreams WHERE system_id = ?", (system_id,)
+ ).fetchall()
+ return [Datastream.deserialize(json.loads(r["data"]), node) for r in rows]
+
+ def delete_datastream(self, datastream_id: str) -> None:
+ self._execute("DELETE FROM datastreams WHERE id = ?", (datastream_id,))
+ self._conn.commit()
+
+ # ------------------------------------------------------------------
+ # ControlStream
+ # ------------------------------------------------------------------
+
+ def save_controlstream(self, controlstream: ControlStream, node: Node) -> None:
+ cs_id = str(controlstream.get_internal_id())
+ system_id = controlstream.get_parent_resource_id()
+ data = json.dumps(controlstream.serialize())
+ self._execute(
+ "INSERT OR REPLACE INTO controlstreams (id, system_id, node_id, data) VALUES (?, ?, ?, ?)",
+ (cs_id, system_id, node.get_id(), data),
+ )
+ self._conn.commit()
+
+ def load_controlstream(self, controlstream_id: str, node: Node) -> Optional[ControlStream]:
+ row = self._execute(
+ "SELECT data FROM controlstreams WHERE id = ?", (controlstream_id,)
+ ).fetchone()
+ if row is None:
+ return None
+ return ControlStream.deserialize(json.loads(row["data"]), node)
+
+ def load_controlstreams_for_system(self, system_id: str, node: Node) -> list[ControlStream]:
+ rows = self._execute(
+ "SELECT data FROM controlstreams WHERE system_id = ?", (system_id,)
+ ).fetchall()
+ return [ControlStream.deserialize(json.loads(r["data"]), node) for r in rows]
+
+ def delete_controlstream(self, controlstream_id: str) -> None:
+ self._execute("DELETE FROM controlstreams WHERE id = ?", (controlstream_id,))
+ self._conn.commit()
+
+ # ------------------------------------------------------------------
+ # Bulk operations
+ # ------------------------------------------------------------------
+
+ def save_all(self, nodes: list[Node]) -> None:
+ """Walk the full Node graph and persist every resource individually."""
+ for node in nodes:
+ self.save_node(node)
+ for system in node.systems():
+ self.save_system(system, node)
+ for ds in system.datastreams:
+ self.save_datastream(ds, node)
+ for cs in system.control_channels:
+ self.save_controlstream(cs, node)
+
+ def load_all(
+ self, session_manager: Optional[SessionManager] = None
+ ) -> list[Node]:
+ """Reconstruct the full resource graph from the nodes table.
+
+ ``Node.deserialize`` handles the embedded systems/datastreams/
+ controlstreams hierarchy, so only the *nodes* table is used here.
+ The individual resource tables (systems, datastreams, controlstreams)
+ exist for targeted single-resource lookups and are not consulted here
+ to avoid double-instantiation.
+ """
+ return self.load_all_nodes(session_manager=session_manager)
+
+ def clear(self) -> None:
+ """Delete all persisted resources from every table."""
+ self._conn.executescript("""
+ DELETE FROM controlstreams;
+ DELETE FROM datastreams;
+ DELETE FROM systems;
+ DELETE FROM nodes;
+ """)
+ self._conn.commit()
+
+ def close(self) -> None:
+ self._conn.close()
diff --git a/src/oshconnect/eventbus.py b/src/oshconnect/eventbus.py
index 308f5ac..e447103 100644
--- a/src/oshconnect/eventbus.py
+++ b/src/oshconnect/eventbus.py
@@ -1,31 +1,25 @@
# =============================================================================
# Copyright (c) 2025 Botts Innovative Research Inc.
-# Date: 2025/10/6
-# Author: Ian Patterson
-# Contact Email: ian@botts-inc.com
# =============================================================================
-import collections
-from typing import Any
-from uuid import UUID
-from abc import ABC
+#
+# Backward-compatibility shim — all event symbols now live in the `events`
+# sub-package. Importing from this module continues to work but prefer:
+#
+# from oshconnect.events import EventHandler, DefaultEventTypes, ...
+#
+# -----------------------------------------------------------------------------
+from .events.core import Event, DefaultEventTypes, AtomicEventTypes
+from .events.handler import EventHandler
+from .events.listeners import IEventListener, CallbackListener
+from .events.builder import EventBuilder
-class Event(ABC):
- """
- A base class for events in the event bus system.
- """
- id: UUID
- topic: str
- payload: Any
-
- def __init__(self, id: UUID, topic: str, payload: Any):
- self.id = id
- self.topic = topic
- self.payload = payload
-
-
-class EventBus(ABC):
- """
- A base class for an event bus system.
- """
- _deque: collections.deque
+__all__ = [
+ "Event",
+ "DefaultEventTypes",
+ "AtomicEventTypes",
+ "EventHandler",
+ "IEventListener",
+ "CallbackListener",
+ "EventBuilder",
+]
diff --git a/src/oshconnect/events/__init__.py b/src/oshconnect/events/__init__.py
new file mode 100644
index 0000000..4560623
--- /dev/null
+++ b/src/oshconnect/events/__init__.py
@@ -0,0 +1,21 @@
+# =============================================================================
+# Copyright (c) 2025 Botts Innovative Research Inc.
+# Date: 2025/10/6
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+
+from .core import Event, DefaultEventTypes, AtomicEventTypes
+from .handler import EventHandler
+from .listeners import IEventListener, CallbackListener
+from .builder import EventBuilder
+
+__all__ = [
+ "Event",
+ "DefaultEventTypes",
+ "AtomicEventTypes",
+ "EventHandler",
+ "IEventListener",
+ "CallbackListener",
+ "EventBuilder",
+]
diff --git a/src/oshconnect/events/builder.py b/src/oshconnect/events/builder.py
new file mode 100644
index 0000000..e7df60f
--- /dev/null
+++ b/src/oshconnect/events/builder.py
@@ -0,0 +1,60 @@
+# =============================================================================
+# Copyright (c) 2025 Botts Innovative Research Inc.
+# Date: 2025/10/6
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+
+from __future__ import annotations
+
+import datetime
+from abc import ABC
+from typing import Any, Union
+
+from .core import DefaultEventTypes, Event
+
+
+class EventBuilder(ABC):
+ _event: Event
+
+ def __init__(self):
+ self._event: Event = Event.blank_event()
+
+ def with_type(self, event_type: DefaultEventTypes) -> EventBuilder:
+ self._event.type = event_type
+ return self
+
+ def with_topic(self, topic: str) -> EventBuilder:
+ self._event.topic = topic
+ return self
+
+ def with_data(self, data: Any) -> EventBuilder:
+ self._event.data = data
+ return self
+
+ def with_producer(self, producer: Any) -> EventBuilder:
+ self._event.producer = producer
+ return self
+
+ def with_timestamp(self, timestamp: datetime.datetime) -> EventBuilder:
+ self._event.timestamp = timestamp
+ return self
+
+ def build(self) -> Event:
+ # Shallow copy: we want a fresh Event so reset() can't mutate it, but
+ # `data` and `producer` are references the consumer cares about (often
+ # not pickleable, e.g. holding a sqlite3.Connection), so we don't clone
+ # them.
+ built = self._event.model_copy(deep=False)
+ self.reset()
+ return built
+
+ def reset(self) -> None:
+ self._event = Event.blank_event()
+
+ @staticmethod
+ def create_topic(base_topic: DefaultEventTypes, resource_id: Union[str, None] = None) -> str:
+ if resource_id:
+ return f"{base_topic.value}/{resource_id}"
+ else:
+ return base_topic.value
diff --git a/src/oshconnect/events/core.py b/src/oshconnect/events/core.py
new file mode 100644
index 0000000..6d2af83
--- /dev/null
+++ b/src/oshconnect/events/core.py
@@ -0,0 +1,70 @@
+# =============================================================================
+# Copyright (c) 2025 Botts Innovative Research Inc.
+# Date: 2025/10/6
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+
+from __future__ import annotations
+
+import datetime
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, ConfigDict
+
+
+class DefaultEventTypes(Enum):
+ ADD_NODE: str = "add_node"
+ REMOVE_NODE: str = "remove_node"
+ ADD_SYSTEM: str = "add_system"
+ REMOVE_SYSTEM: str = "remove_system"
+ ADD_DATASTREAM: str = "add_datastream"
+ REMOVE_DATASTREAM: str = "remove_datastream"
+ ADD_CONTROLSTREAM: str = "add_controlstream"
+ REMOVE_CONTROLSTREAM: str = "remove_controlstream"
+ NEW_OBSERVATION: str = "new_observation"
+ NEW_COMMAND: str = "new_command"
+ NEW_COMMAND_STATUS: str = "new_command_status"
+
+
+class AtomicEventTypes(Enum):
+ """
+ Defines atomic event types for local resource operations.
+
+ Attributes:
+ CREATE (str): Creating a resource within OSHConnect (local, in-app).
+ POST (str): Posting a resource to an external server.
+ GET (str): Retrieving a resource from an external server.
+ MODIFY (str): Modifying a resource within OSHConnect (local, in-app).
+ UPDATE (str): Updating a resource on an external server.
+ REMOVE (str): Removing a resource within OSHConnect (local, in-app).
+ DELETE (str): Deleting a resource from an external server.
+ """
+ CREATE: str = "create"
+ POST: str = "post"
+ GET: str = "get"
+ MODIFY: str = "modify"
+ UPDATE: str = "update"
+ REMOVE: str = "remove"
+ DELETE: str = "delete"
+
+
+class Event(BaseModel):
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+
+ timestamp: datetime.datetime
+ type: DefaultEventTypes
+ topic: str
+ data: Any
+ producer: Any
+
+ @classmethod
+ def blank_event(cls) -> Event:
+ return cls(
+ timestamp=datetime.datetime.now(),
+ type=DefaultEventTypes.NEW_OBSERVATION,
+ topic="",
+ data=None,
+ producer=None
+ )
diff --git a/src/oshconnect/events/handler.py b/src/oshconnect/events/handler.py
new file mode 100644
index 0000000..7a340d4
--- /dev/null
+++ b/src/oshconnect/events/handler.py
@@ -0,0 +1,154 @@
+# =============================================================================
+# Copyright (c) 2025 Botts Innovative Research Inc.
+# Date: 2025/10/6
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+
+from __future__ import annotations
+
+import logging
+from collections import deque
+from typing import Callable
+
+from .core import DefaultEventTypes, Event
+from .listeners import CallbackListener, IEventListener
+
+
+class EventHandler(object):
+ """
+ Singleton event bus. Manages listener registration and event dispatch.
+
+ Listeners are filtered by type and topic before dispatch — a listener only
+ receives events whose type is in ``listener.types`` (empty = all types) AND
+ whose topic is in ``listener.topics`` (empty = all topics).
+
+ Usage — functional style (no subclassing)::
+
+ handler = EventHandler()
+
+ def on_obs(event: Event):
+ print(event.data)
+
+ listener = handler.subscribe(on_obs, types=[DefaultEventTypes.NEW_OBSERVATION])
+ # later: handler.unregister_listener(listener)
+
+ Usage — subclass style::
+
+ class MyListener(IEventListener):
+ def handle_events(self, event: Event):
+ ...
+
+ handler.register_listener(MyListener(types=[DefaultEventTypes.ADD_SYSTEM]))
+ """
+
+ listeners: list[IEventListener] = []
+ to_add: list[IEventListener] = []
+ to_remove: list[IEventListener] = []
+ event_queue: deque[Event] = deque()
+ publish_lock: bool = False
+
+ def __new__(cls):
+ if not hasattr(cls, "instance"):
+ cls.instance = super(EventHandler, cls).__new__(cls)
+ return cls.instance
+
+ # ------------------------------------------------------------------
+ # Registration
+ # ------------------------------------------------------------------
+
+ def register_listener(self, listener: IEventListener):
+ if listener not in self.listeners:
+ if not self.publish_lock:
+ self.listeners.append(listener)
+ else:
+ self.to_add.append(listener)
+
+ def unregister_listener(self, listener: IEventListener):
+ if not self.publish_lock:
+ if listener in self.listeners:
+ self.listeners.remove(listener)
+ else:
+ self.to_remove.append(listener)
+
+ def subscribe(
+ self,
+ callback: Callable[[Event], None],
+ types: list[DefaultEventTypes] = None,
+ topics: list[str] = None,
+ ) -> CallbackListener:
+ """
+ Register a plain callable as a listener.
+
+ :param callback: Function to call when a matching event is published.
+ :param types: Event types to filter on. ``None`` / empty = all types.
+ :param topics: MQTT/event topics to filter on. ``None`` / empty = all topics.
+ :returns: The ``CallbackListener`` — keep a reference to unregister later.
+ """
+ listener = CallbackListener(
+ topics=topics or [],
+ types=types or [],
+ callback=callback,
+ )
+ self.register_listener(listener)
+ return listener
+
+ # ------------------------------------------------------------------
+ # Publishing
+ # ------------------------------------------------------------------
+
+ def _matches(self, listener: IEventListener, evt: Event) -> bool:
+ """Return True if *evt* passes the listener's type and topic filters."""
+ type_match = not listener.types or evt.type in listener.types
+ topic_match = not listener.topics or evt.topic in listener.topics
+ return type_match and topic_match
+
+ def publish(self, evt: Event):
+ if self.publish_lock:
+ self.event_queue.append(evt)
+ return
+
+ self.publish_lock = True
+ try:
+ for listener in self.listeners:
+ if self._matches(listener, evt):
+ try:
+ listener.handle_events(evt)
+ except Exception as e:
+ logging.error("Error in event listener %s: %s", listener, e)
+ finally:
+ self.publish_lock = False
+ self.commit_changes()
+
+ # ------------------------------------------------------------------
+ # Deferred add/remove bookkeeping
+ # ------------------------------------------------------------------
+
+ def commit_changes(self):
+ self.commit_removes()
+ self.commit_adds()
+ while self.event_queue:
+ self.publish(self.event_queue.popleft())
+
+ def commit_adds(self):
+ for listener in self.to_add:
+ self.listeners.append(listener)
+ self.to_add.clear()
+
+ def commit_removes(self):
+ for listener in self.to_remove:
+ if listener in self.listeners:
+ self.listeners.remove(listener)
+ self.to_remove.clear()
+
+ # ------------------------------------------------------------------
+ # Utilities
+ # ------------------------------------------------------------------
+
+ def clear_listeners(self):
+ self.listeners.clear()
+ self.to_add.clear()
+ self.to_remove.clear()
+
+ def get_num_listeners(self) -> int:
+ return len(self.listeners)
diff --git a/src/oshconnect/events/listeners.py b/src/oshconnect/events/listeners.py
new file mode 100644
index 0000000..4b311b2
--- /dev/null
+++ b/src/oshconnect/events/listeners.py
@@ -0,0 +1,52 @@
+# =============================================================================
+# Copyright (c) 2025 Botts Innovative Research Inc.
+# Date: 2025/10/6
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+from dataclasses import dataclass, field
+from typing import Callable
+
+from .core import DefaultEventTypes, Event
+
+
+@dataclass
+class IEventListener(ABC):
+ """
+ Interface for event listeners. Subscribe to specific event types and/or topics.
+ Empty lists mean "subscribe to all" — the handler filters before dispatching.
+ """
+ topics: list[str] = field(default_factory=list)
+ types: list[DefaultEventTypes] = field(default_factory=list)
+
+ @abstractmethod
+ def handle_events(self, event: Event):
+ pass
+
+
+@dataclass
+class CallbackListener(IEventListener):
+ """
+ Concrete IEventListener that wraps a Python callable.
+ The primary user-facing subscription mechanism — no subclassing required.
+
+ Example::
+
+ def my_handler(event: Event):
+ print(event.data)
+
+ listener = CallbackListener(
+ types=[DefaultEventTypes.NEW_OBSERVATION],
+ callback=my_handler,
+ )
+ EventHandler().register_listener(listener)
+ """
+ callback: Callable[[Event], None] = field(default=None)
+
+ def handle_events(self, event: Event):
+ if self.callback is not None:
+ self.callback(event)
diff --git a/src/oshconnect/oshconnectapi.py b/src/oshconnect/oshconnectapi.py
index 2c88871..a50f802 100644
--- a/src/oshconnect/oshconnectapi.py
+++ b/src/oshconnect/oshconnectapi.py
@@ -5,9 +5,12 @@
# Contact email: ian@botts-inc.com
# ==============================================================================
import logging
-import shelve
+import json
+from typing import Callable
from uuid import UUID
+from .events import EventHandler, DefaultEventTypes, CallbackListener
+from .events.builder import EventBuilder
from .csapi4py.default_api_helpers import APIHelper
from .datastore import DataStore
from .resource_datamodels import DatastreamResource
@@ -17,29 +20,42 @@
class OSHConnect:
- _name: str = None
- datastore: DataStore = None
- styling: Styling = None
- timestream: TimeManagement = None
- _nodes: list[Node] = []
- _systems: list[System] = []
- _cs_api_builder: APIHelper = None
- # _datasource_handler: DataStreamHandler = None
- _datastreams: list[Datastream] = []
- _datataskers: list[DataStore] = []
- _datagroups: list = []
- _tasks: list = []
- _playback_mode: TemporalModes = TemporalModes.REAL_TIME
- _session_manager: SessionManager = None
-
- def __init__(self, name: str, **kwargs):
- """
- :param name: name of the OSHConnect instance, in the event that
+ _name: str
+ datastore: DataStore
+ styling: Styling
+ timestream: TimeManagement
+ _nodes: list[Node]
+ _systems: list[System]
+ _cs_api_builder: APIHelper
+ _datastreams: list[Datastream]
+ _controlstreams: list[ControlStream]
+ _datagroups: list
+ _tasks: list
+ _playback_mode: TemporalModes
+ _session_manager: SessionManager
+ _event_bus: EventHandler
+
+ def __init__(self, name: str, datastore: DataStore = None, **kwargs):
+ """
+ :param name: name of the OSHConnect instance
+ :param datastore: optional DataStore backend for persisting the resource graph
:param kwargs:
"""
self._name = name
+ self.datastore = datastore
+ self.styling = None
+ self.timestream = None
+ self._nodes = []
+ self._systems = []
+ self._cs_api_builder = None
+ self._datastreams = []
+ self._controlstreams = []
+ self._datagroups = []
+ self._tasks = []
+ self._playback_mode = TemporalModes.REAL_TIME
logging.info(f"OSHConnect instance {name} created")
self._session_manager = SessionManager()
+ self._event_bus = EventHandler()
def get_name(self):
"""
@@ -56,6 +72,11 @@ def add_node(self, node: Node):
"""
node.register_with_session_manager(self._session_manager)
self._nodes.append(node)
+ self._event_bus.publish(
+ EventBuilder().with_type(DefaultEventTypes.ADD_NODE)
+ .with_topic(EventBuilder.create_topic(DefaultEventTypes.ADD_NODE, node.get_id()))
+ .with_data(node).with_producer(self).build()
+ )
def remove_node(self, node_id: str):
"""
@@ -67,19 +88,62 @@ def remove_node(self, node_id: str):
# list of nodes in our node list that do not have the id of the node we want to remove
self._nodes = [node for node in self._nodes if
node.get_id() != node_id]
+ self._event_bus.publish(
+ EventBuilder().with_type(DefaultEventTypes.REMOVE_NODE)
+ .with_topic(EventBuilder.create_topic(DefaultEventTypes.REMOVE_NODE, node_id))
+ .with_data(node_id).with_producer(self).build()
+ )
- def save_config(self, config: dict):
+ def save_config(self):
logging.info(f"Saving configuration for {self._name}")
- with shelve.open(f"{self._name}_config") as db:
- db['app_config'] = self
- db.close()
+
+ data = {}
+ for node in self._nodes:
+ node_dict = node.serialize()
+ data.update({node.get_id(): node_dict})
+
+ # write to JSON file
+ file_path = f"{self._name}_config.json"
+ with open(file_path, 'w', encoding='utf-8') as f:
+ json.dump({"app_config": data}, f, ensure_ascii=False, indent=2)
@classmethod
def load_config(cls, file_name: str) -> 'OSHConnect':
- with shelve.open(file_name, 'r') as db:
- app = db['app_config']
- db.close()
- return app
+ """Load configuration data from a JSON file and return the stored config dict.
+ Note: Despite the return type hint, this returns the configuration dictionary.
+ """
+ with open(file_name, 'r', encoding='utf-8') as f:
+ obj = json.load(f)
+ return obj.get('app_config', obj)
+
+ def save_to_store(self) -> None:
+ """Persist the full node graph to the configured datastore.
+
+ :raises RuntimeError: if no datastore has been configured.
+ """
+ if self.datastore is None:
+ raise RuntimeError(
+ "No datastore configured. Pass a DataStore instance to OSHConnect()."
+ )
+ self.datastore.save_all(self._nodes)
+
+ def load_from_store(self) -> None:
+ """Restore the node graph from the configured datastore into this instance.
+
+ Reconstructed Nodes are registered with this instance's SessionManager so
+ their child resources (Systems, Datastreams, ControlStreams) can initialise
+ correctly. Calling this method appends to any already-loaded nodes.
+
+ :raises RuntimeError: if no datastore has been configured.
+ """
+ if self.datastore is None:
+ raise RuntimeError(
+ "No datastore configured. Pass a DataStore instance to OSHConnect()."
+ )
+ nodes = self.datastore.load_all(session_manager=self._session_manager)
+ for node in nodes:
+ self._nodes.append(node)
+ self._systems.extend(node.systems())
def share_config(self, config: dict):
pass
@@ -114,18 +178,6 @@ def visualize_streams(self, streams: list):
def get_visualization_recommendations(self, streams: list):
pass
- def discover_datastreams(self):
- for system in self._systems:
- res_datastreams = system.discover_datastreams()
- datastreams = list(
- map(lambda ds: Datastream(parent_node=system.get_parent_node(), id=ds.ds_id, datastream_resource=ds),
- res_datastreams))
-
- for ds in datastreams:
- ds.set_parent_resource_id(system.get_underlying_resource().system_id)
- # datastreams = [ds.set_parent_resource_id(system.get_underlying_resource().system_id) for ds in datastreams]
- self._datastreams.extend(datastreams)
-
def discover_systems(self, nodes: list[str] = None):
"""
Discover systems from the nodes that have been added to the OSHConnect instance. They are associated with the
@@ -141,16 +193,37 @@ def discover_systems(self, nodes: list[str] = None):
for node in search_nodes:
res_systems = node.discover_systems()
self._systems.extend(res_systems)
+ for system in res_systems:
+ self._event_bus.publish(
+ EventBuilder().with_type(DefaultEventTypes.ADD_SYSTEM)
+ .with_topic(EventBuilder.create_topic(DefaultEventTypes.ADD_SYSTEM,
+ getattr(system, '_resource_id', None)))
+ .with_data(system).with_producer(self).build()
+ )
+
+ def discover_datastreams(self):
+ for system in self._systems:
+ datastreams = system.discover_datastreams()
+ self._datastreams.extend(datastreams)
+ for ds in datastreams:
+ self._event_bus.publish(
+ EventBuilder().with_type(DefaultEventTypes.ADD_DATASTREAM)
+ .with_topic(EventBuilder.create_topic(DefaultEventTypes.ADD_DATASTREAM,
+ getattr(ds, '_resource_id', None)))
+ .with_data(ds).with_producer(self).build()
+ )
def discover_controlstreams(self, streams: list):
for system in self._systems:
- res_controlstreams = system.discover_controlstreams()
- controlstreams = list(
- map(lambda cs: ControlStream(parent_node=system.get_parent_node(), id=cs.cs_id,
- controlstream_resource=cs), res_controlstreams))
+ controlstreams = system.discover_controlstreams()
+ self._controlstreams.extend(controlstreams)
for cs in controlstreams:
- cs.set_parent_resource_id(system.get_underlying_resource().system_id)
- self._datataskers.extend(controlstreams)
+ self._event_bus.publish(
+ EventBuilder().with_type(DefaultEventTypes.ADD_CONTROLSTREAM)
+ .with_topic(EventBuilder.create_topic(DefaultEventTypes.ADD_CONTROLSTREAM,
+ getattr(cs, '_resource_id', None)))
+ .with_data(cs).with_producer(self).build()
+ )
def authenticate_user(self, user: dict):
pass
@@ -303,3 +376,57 @@ def start_systems(self, sysid_list: list = None):
systems = self.get_resource_group(sysid_list)[0]
for system in systems:
system.start()
+
+ # ------------------------------------------------------------------
+ # Event subscription convenience methods
+ # ------------------------------------------------------------------
+
+ def on_observation(self, callback: Callable, datastream_id: str = None) -> CallbackListener:
+ """
+ Subscribe to incoming observation events.
+
+ :param callback: ``fn(event: Event)`` called for each matching event.
+ :param datastream_id: When provided, only events from that datastream are
+ delivered (matched via the datastream's MQTT data topic). When omitted,
+ all observation events are delivered.
+ :returns: ``CallbackListener`` — pass to ``event_bus.unregister_listener()`` to cancel.
+ """
+ topic_filter = []
+ if datastream_id is not None:
+ ds = next((ds for ds in self._datastreams if ds.get_id() == datastream_id), None)
+ if ds is not None and getattr(ds, '_topic', None):
+ topic_filter = [ds._topic]
+ return self._event_bus.subscribe(callback, types=[DefaultEventTypes.NEW_OBSERVATION],
+ topics=topic_filter)
+
+ def on_system_added(self, callback: Callable) -> CallbackListener:
+ """
+ Subscribe to system-discovered / system-added events.
+
+ :param callback: ``fn(event: Event)`` where ``event.data`` is the ``System``.
+ :returns: ``CallbackListener`` for later removal.
+ """
+ return self._event_bus.subscribe(callback, types=[DefaultEventTypes.ADD_SYSTEM])
+
+ def on_command(self, callback: Callable, controlstream_id: str = None) -> CallbackListener:
+ """
+ Subscribe to incoming command events.
+
+ :param callback: ``fn(event: Event)`` called for each matching event.
+ :param controlstream_id: When provided, only events from that control stream are
+ delivered. When omitted, all command events are delivered.
+ :returns: ``CallbackListener`` for later removal.
+ """
+ topic_filter = []
+ if controlstream_id is not None:
+ cs = next((cs for cs in self._controlstreams
+ if getattr(cs, '_resource_id', None) == controlstream_id), None)
+ if cs is not None and getattr(cs, '_topic', None):
+ topic_filter = [cs._topic]
+ return self._event_bus.subscribe(callback, types=[DefaultEventTypes.NEW_COMMAND],
+ topics=topic_filter)
+
+ @property
+ def event_bus(self) -> EventHandler:
+ """Direct access to the EventHandler for advanced subscriptions."""
+ return self._event_bus
diff --git a/src/oshconnect/schema_datamodels.py b/src/oshconnect/schema_datamodels.py
index b9407f5..a1ff338 100644
--- a/src/oshconnect/schema_datamodels.py
+++ b/src/oshconnect/schema_datamodels.py
@@ -9,13 +9,13 @@
from datetime import datetime
from typing import Union, List
-from pydantic import BaseModel, Field, SerializeAsAny, field_validator, HttpUrl, ConfigDict
+from pydantic import BaseModel, Field, SerializeAsAny, field_validator, model_validator, HttpUrl, ConfigDict
from .api_utils import Link, URI
from .csapi4py.constants import ObservationFormat
from .encoding import Encoding
from .geometry import Geometry
-from .swe_components import AnyComponentSchema
+from .swe_components import AnyComponent, check_named
"""
In many of the top level resource models there is a "schema" field of some description. These models are meant to ease
@@ -51,7 +51,12 @@ class SWEJSONCommandSchema(CommandSchema):
command_format: str = Field("application/swe+json", alias='commandFormat')
encoding: SerializeAsAny[Encoding] = Field(...)
- record_schema: SerializeAsAny[AnyComponentSchema] = Field(..., serialization_alias='recordSchema')
+ record_schema: AnyComponent = Field(..., alias='recordSchema')
+
+ @model_validator(mode="after")
+ def _root_record_schema_requires_name(self):
+ check_named(self.record_schema, "SWEJSONCommandSchema.recordSchema")
+ return self
class JSONCommandSchema(CommandSchema):
@@ -61,9 +66,18 @@ class JSONCommandSchema(CommandSchema):
model_config = ConfigDict(populate_by_name=True)
command_format: str = Field("application/json", alias='commandFormat')
- params_schema: SerializeAsAny[AnyComponentSchema] = Field(..., alias='parametersSchema')
- result_schema: SerializeAsAny[AnyComponentSchema] = Field(None, alias='resultSchema')
- feasibility_schema: SerializeAsAny[AnyComponentSchema] = Field(None, alias='feasibilityResultSchema')
+ params_schema: AnyComponent = Field(..., alias='parametersSchema')
+ result_schema: AnyComponent = Field(None, alias='resultSchema')
+ feasibility_schema: AnyComponent = Field(None, alias='feasibilityResultSchema')
+
+ @model_validator(mode="after")
+ def _root_schemas_require_name(self):
+ check_named(self.params_schema, "JSONCommandSchema.parametersSchema")
+ if self.result_schema is not None:
+ check_named(self.result_schema, "JSONCommandSchema.resultSchema")
+ if self.feasibility_schema is not None:
+ check_named(self.feasibility_schema, "JSONCommandSchema.feasibilityResultSchema")
+ return self
class DatastreamRecordSchema(BaseModel):
@@ -75,10 +89,14 @@ class DatastreamRecordSchema(BaseModel):
obs_format: str = Field(..., alias='obsFormat')
+# `encoding` is required per CS API Part 2 §16.2.3 Requirement 109.B, but the
+# OSH server omits it from /datastreams/{id}/schema responses. We accept it as
+# optional to be able to parse what the server returns. See
+# docs/osh_spec_deviations.md (swe-json-missing-encoding).
class SWEDatastreamRecordSchema(DatastreamRecordSchema):
model_config = ConfigDict(populate_by_name=True)
- encoding: SerializeAsAny[Encoding] = Field(...)
- record_schema: SerializeAsAny[AnyComponentSchema] = Field(..., serialization_alias='recordSchema')
+ encoding: SerializeAsAny[Encoding] = Field(None)
+ record_schema: AnyComponent = Field(..., alias='recordSchema')
@field_validator('obs_format')
@classmethod
@@ -88,6 +106,44 @@ def check_check_obs_format(cls, v):
raise ValueError('obsFormat must be on of the SWE formats')
return v
+ @model_validator(mode="after")
+ def _root_record_schema_requires_name(self):
+ check_named(self.record_schema, "SWEDatastreamRecordSchema.recordSchema")
+ return self
+
+
+class JSONDatastreamRecordSchema(DatastreamRecordSchema):
+ """Datastream observation schema for the JSON media types
+ (`application/json`, `application/om+json`).
+
+ Per CS API Part 2 §16.1.4, this form does not carry a SWE `encoding`
+ block; structure is fully described by `resultSchema` (inline result)
+ or `resultLink` (out-of-band). `parametersSchema` is optional.
+ """
+ model_config = ConfigDict(populate_by_name=True)
+
+ obs_format: str = Field(ObservationFormat.JSON.value, alias='obsFormat')
+ result_schema: AnyComponent = Field(None, alias='resultSchema')
+ parameters_schema: AnyComponent = Field(None, alias='parametersSchema')
+ result_link: dict = Field(None, alias='resultLink')
+
+ @field_validator('obs_format')
+ @classmethod
+ def _check_obs_format(cls, v):
+ if v not in (ObservationFormat.JSON.value, "application/json"):
+ raise ValueError(
+ f"obsFormat must be 'application/json' or '{ObservationFormat.JSON.value}'"
+ )
+ return v
+
+ @model_validator(mode="after")
+ def _root_schemas_require_name(self):
+ if self.result_schema is not None:
+ check_named(self.result_schema, "JSONDatastreamRecordSchema.resultSchema")
+ if self.parameters_schema is not None:
+ check_named(self.parameters_schema, "JSONDatastreamRecordSchema.parametersSchema")
+ return self
+
class ObservationOMJSONInline(BaseModel):
"""
diff --git a/src/oshconnect/streamableresource.py b/src/oshconnect/streamableresource.py
index c4ba112..ecd6c56 100644
--- a/src/oshconnect/streamableresource.py
+++ b/src/oshconnect/streamableresource.py
@@ -15,7 +15,6 @@
import traceback
import uuid
from abc import ABC
-from argparse import ArgumentError
from dataclasses import dataclass, field
from enum import Enum
from multiprocessing import Process
@@ -24,9 +23,11 @@
from uuid import UUID, uuid4
from collections import deque
-from pydantic.v1.utils import to_lower_camel
+from pydantic.alias_generators import to_camel
from .csapi4py.constants import ContentTypes
+from .events import EventHandler, DefaultEventTypes
+from .events.builder import EventBuilder
from .schema_datamodels import JSONCommandSchema
from .csapi4py.mqtt import MQTTCommClient
from .csapi4py.constants import APIResourceTypes, ObservationFormat
@@ -125,6 +126,7 @@ class Node:
def __init__(self, protocol: str, address: str, port: int,
username: str = None, password: str = None, server_root: str = 'sensorhub',
+ api_root: str = 'api', mqtt_topic_root: str = None,
session_manager: SessionManager = None,
**kwargs):
self._id = f'node-{uuid.uuid4()}'
@@ -141,7 +143,9 @@ def __init__(self, protocol: str, address: str, port: int,
protocol=self.protocol,
port=self.port,
server_root=self.server_root,
- api_root='api', username=username,
+ api_root=api_root,
+ mqtt_topic_root=mqtt_topic_root,
+ username=username,
password=password)
if self.is_secure:
self._api_helper.user_auth = True
@@ -154,6 +158,7 @@ def __init__(self, protocol: str, address: str, port: int,
if kwargs.get('mqtt_port') is not None:
self._mqtt_port = kwargs.get('mqtt_port')
self._mqtt_client = MQTTCommClient(url=self.address, port=self._mqtt_port,
+ username=username, password=password,
client_id_suffix=uuid.uuid4().hex, )
self._mqtt_client.connect()
self._mqtt_client.start()
@@ -183,7 +188,7 @@ def get_decoded_auth(self):
# return BasicAuth(self._api_helper.username, self._api_helper.password)
def get_mqtt_client(self) -> MQTTCommClient:
- return self._mqtt_client
+ return getattr(self, '_mqtt_client', None)
def discover_systems(self):
result = self._api_helper.retrieve_resource(APIResourceTypes.SYSTEM,
@@ -194,10 +199,10 @@ def discover_systems(self):
print(system_objs)
for system_json in system_objs:
print(system_json)
- system = SystemResource.model_validate(system_json)
+ system = SystemResource.model_validate(system_json, by_alias=True)
sys_obj = System(label=system.properties['name'],
- name=to_lower_camel(system.properties['name'].replace(" ", "_")),
- urn=system.properties['uid'], parent_node=self)
+ name=to_camel(system.properties['name'].replace(" ", "_")),
+ urn=system.properties['uid'], parent_node=self, resource_id=system.system_id)
self._systems.append(sys_obj)
new_systems.append(sys_obj)
@@ -214,17 +219,16 @@ def get_api_helper(self) -> APIHelper:
# System Management
- def add_system(self, system: System, target_node: Node, insert_resource: bool = False):
+ def add_system(self, system: System, insert_resource: bool = False):
"""
Add a system to the target node.
:param system: System object
- :param target_node: Node object
:param insert_resource: Whether to insert the system into the target node's server, default is False
:return:
"""
if insert_resource:
system.insert_self()
- target_node.add_new_system(system)
+ self.add_new_system(system)
self._systems.append(system)
return system
@@ -247,6 +251,71 @@ def register_streamable(self, streamable: StreamableResource):
def get_session(self) -> OSHClientSession:
return self._client_session
+ def serialize(self) -> dict:
+ data = {
+ "_id": self._id,
+ "protocol": self.protocol,
+ "address": self.address,
+ "port": self.port,
+ "server_root": self.server_root,
+ "api_root": getattr(self._api_helper, "api_root", "api"),
+ "mqtt_topic_root": getattr(self._api_helper, "mqtt_topic_root", None),
+ "is_secure": self.is_secure,
+ "username": getattr(self._api_helper, "username", None),
+ "password": getattr(self._api_helper, "password", None),
+ "_systems": [system.serialize() for system in self._systems] if self._systems is not None else None,
+ }
+ data["name"] = getattr(self, "name", None)
+ data["label"] = getattr(self, "label", None)
+ data["urn"] = getattr(self, "urn", None)
+ data["description"] = getattr(self, "description", None)
+ datastreams = getattr(self, "datastreams", None)
+ if datastreams is not None:
+ data["datastreams"] = [ds.serialize() for ds in datastreams]
+ else:
+ data["datastreams"] = None
+ control_channels = getattr(self, "control_channels", None)
+ if control_channels is not None:
+ data["control_channels"] = [cc.serialize() for cc in control_channels]
+ else:
+ data["control_channels"] = None
+ underlying = getattr(self, "_underlying_resource", None)
+ if underlying is not None:
+ dump = getattr(underlying, 'model_dump', None)
+ if callable(dump):
+ data["underlying_resource"] = underlying.model_dump(by_alias=True, exclude_none=True, mode='json')
+ elif hasattr(underlying, 'to_dict'):
+ data["underlying_resource"] = underlying.to_dict()
+ else:
+ data["underlying_resource"] = str(underlying)
+ else:
+ data["underlying_resource"] = None
+ # Remove any 'resource' key if present
+ data.pop("resource", None)
+ return data
+
+ @classmethod
+ def deserialize(cls, data: dict, session_manager: 'SessionManager' = None) -> 'Node':
+ node = cls(
+ protocol=data["protocol"],
+ address=data["address"],
+ port=data["port"],
+ username=data.get("username"),
+ password=data.get("password"),
+ server_root=data.get("server_root", "sensorhub"),
+ api_root=data.get("api_root", "api"),
+ mqtt_topic_root=data.get("mqtt_topic_root"),
+ )
+ node._id = data["_id"]
+ node.is_secure = data.get("is_secure", False)
+ # Register with the session manager before deserializing child resources,
+ # because StreamableResource.__init__ calls node.register_streamable().
+ if session_manager is not None:
+ node.register_with_session_manager(session_manager)
+ node._systems = [System.deserialize(sys, node) for sys in data.get("_systems", [])] if data.get(
+ "_systems") is not None else []
+ return node
+
class Status(Enum):
INITIALIZING = "initializing"
@@ -269,7 +338,7 @@ class StreamableModes(Enum):
class StreamableResource(Generic[T], ABC):
_id: UUID
_resource_id: str
- _canonical_link: str
+ # _canonical_link: str
_topic: str
_status: str = Status.STOPPED.value
ws_url: str
@@ -293,6 +362,7 @@ def __init__(self, node: Node, connection_mode: StreamableModes = StreamableMode
self._connection_mode = connection_mode
self._inbound_deque = deque()
self._outbound_deque = deque()
+ self._parent_resource_id = None
def get_streamable_id(self) -> UUID:
return self._id
@@ -352,13 +422,15 @@ def init_mqtt(self):
# self.get_mqtt_topic()
def _default_on_subscribe(self, client, userdata, mid, granted_qos, properties):
- print("OSH Subscribed: " + str(mid) + " " + str(granted_qos))
+ logging.debug("OSH Subscribed: mid=%s granted_qos=%s", mid, granted_qos)
- def get_mqtt_topic(self, subresource: APIResourceTypes | None = None):
+ def get_mqtt_topic(self, subresource: APIResourceTypes | None = None, data_topic: bool = True):
"""
- Retrieves the MQTT topic for this streamable resource based on its underlying resource type. By default, the topic
- is actually for listening to subresources of a default type
- :param subresource : Optional subresource type to get the topic for, defaults to None
+ Retrieves the MQTT topic for this streamable resource based on its underlying resource type. By default,
+ returns a Resource Data Topic (`:data` suffix per CS API Part 3).
+ :param subresource: Optional subresource type to get the topic for, defaults to None
+ :param data_topic: If True (default), produces a Resource Data Topic with ':data' suffix. Set False for
+ Resource Event Topics.
"""
resource_type = None
parent_res_type = None
@@ -398,9 +470,52 @@ def get_mqtt_topic(self, subresource: APIResourceTypes | None = None):
topic = self._parent_node.get_api_helper().get_mqtt_topic(subresource_type=resource_type,
resource_id=parent_id,
- resource_type=parent_res_type)
+ resource_type=parent_res_type,
+ data_topic=data_topic)
return topic
+ def get_event_topic(self) -> str:
+ """
+ Returns the Resource Event Topic for this streamable resource per CS API Part 3. Event topics point to the
+ resource itself (no ':data' suffix) and are used to receive CloudEvents lifecycle notifications
+ (create/update/delete) published by the server.
+
+ For Datastream/ControlStream, includes the parent system path when a parent resource ID is available.
+ """
+ mqtt_root = self._parent_node.get_api_helper().get_mqtt_root()
+
+ if isinstance(self._underlying_resource, DatastreamResource):
+ if self._parent_resource_id:
+ return f'{mqtt_root}/systems/{self._parent_resource_id}/datastreams/{self._resource_id}'
+ return f'{mqtt_root}/datastreams/{self._resource_id}'
+
+ elif isinstance(self._underlying_resource, ControlStreamResource):
+ if self._parent_resource_id:
+ return f'{mqtt_root}/systems/{self._parent_resource_id}/controlstreams/{self._resource_id}'
+ return f'{mqtt_root}/controlstreams/{self._resource_id}'
+
+ elif isinstance(self._underlying_resource, SystemResource):
+ return f'{mqtt_root}/systems/{self._resource_id}'
+
+ raise ValueError(f"Cannot determine event topic for resource type {type(self._underlying_resource)}")
+
+ def subscribe_events(self, callback=None, qos: int = 0) -> str:
+ """
+ Subscribes to the Resource Event Topic for this streamable resource. Event messages are CloudEvents v1.0
+ JSON payloads published by the server when the resource is created, updated, or deleted.
+
+ :param callback: Optional message callback. If None, uses the default handler (appends to inbound deque).
+ :param qos: MQTT Quality of Service level, default 0.
+ :return: The event topic string that was subscribed to.
+ """
+ if self._mqtt_client is None:
+ logging.warning(f"No MQTT client configured for streamable resource {self._id}.")
+ return ""
+ event_topic = self.get_event_topic()
+ cb = callback if callback is not None else self._mqtt_sub_callback
+ self._mqtt_client.subscribe(event_topic, qos=qos, msg_callback=cb)
+ return event_topic
+
async def _read_from_ws(self, ws):
async for msg in ws:
self._message_handler(ws, msg)
@@ -479,24 +594,23 @@ def subscribe_mqtt(self, topic: str, qos: int = 0):
def _publish_mqtt(self, topic, payload):
if self._mqtt_client is None:
- logging.warning(f"No MQTT client configured for streamable resource {self._id}.")
+ logging.warning("No MQTT client configured for streamable resource %s.", self._id)
return
- print(f'Publishing to MQTT topic {topic}: {payload}')
+ logging.debug("Publishing to MQTT topic %s", topic)
self._mqtt_client.publish(topic, payload, qos=0)
async def _write_to_mqtt(self):
- while self._status is Status.STARTED.value:
+ while self._status == Status.STARTED.value:
try:
msg = self._outbound_deque.popleft()
- print(f"Popped message: {msg}, attempting to publish...")
+ logging.debug("Publishing outbound message from %s", self._id)
self._publish_mqtt(self._topic, msg)
except IndexError:
await asyncio.sleep(0.05)
except Exception as e:
- print(f"Error in Write To MQTT {self._id}: {e}")
- print(traceback.format_exc())
- if self._status is Status.STOPPED.value:
- print("MQTT write task stopping as streamable resource is stopped.")
+ logging.error("Error in Write To MQTT %s: %s\n%s", self._id, e, traceback.format_exc())
+ if self._status == Status.STOPPED.value:
+ logging.debug("MQTT write task stopping: resource %s stopped", self._id)
def publish(self, payload, topic: str = None):
"""
@@ -518,7 +632,7 @@ def subscribe(self, topic=None, callback=None, qos=0):
if topic is None:
t = self._topic
else:
- raise ArgumentError("Invalid topic provided, must be None to use default topic.")
+ raise ValueError("Invalid topic provided, must be None to use default topic.")
if callback is None:
self._mqtt_client.subscribe(t, qos=qos, msg_callback=self._mqtt_sub_callback)
@@ -526,9 +640,14 @@ def subscribe(self, topic=None, callback=None, qos=0):
self._mqtt_client.subscribe(t, qos=qos, msg_callback=callback)
def _mqtt_sub_callback(self, client, userdata, msg):
- print(f"Received MQTT message on topic {msg.topic}: {msg.payload}")
+ logging.debug("Received MQTT message on topic %s (%s bytes)", msg.topic, len(msg.payload))
# Appends to right of deque
self._inbound_deque.append(msg.payload)
+ self._emit_inbound_event(msg)
+
+ def _emit_inbound_event(self, msg):
+ """Hook for subclasses to publish EventHandler events on incoming MQTT messages."""
+ pass
def get_inbound_deque(self):
return self._inbound_deque
@@ -536,6 +655,39 @@ def get_inbound_deque(self):
def get_outbound_deque(self):
return self._outbound_deque
+ def serialize(self) -> dict:
+ """Serializes common attributes of StreamableResource, safely handling missing/None attributes."""
+ topic = getattr(self, "_topic", None)
+ status = getattr(self, "_status", None)
+ parent_resource_id = getattr(self, "_parent_resource_id", None)
+ connection_mode = getattr(self, "_connection_mode", None)
+ resource_id = getattr(self, "_resource_id", None)
+ if isinstance(connection_mode, Enum):
+ connection_mode = connection_mode.value
+
+ return {
+ "id": str(getattr(self, "_id", None)),
+ "resource_id": resource_id,
+ # "canonical_link": getattr(self, "_canonical_link", None),
+ "topic": topic,
+ "status": status,
+ "parent_resource_id": parent_resource_id,
+ "connection_mode": connection_mode,
+ }
+
+ @classmethod
+ def deserialize(cls, data: dict, node: 'Node') -> 'StreamableResource':
+ """Deserializes common attributes. Subclasses should override and call super()."""
+ obj = cls(node=node)
+ obj._id = uuid.UUID(data["id"])
+ obj._resource_id = data.get("resource_id")
+ # obj._canonical_link = data.get("canonical_link")
+ obj._topic = data.get("topic")
+ obj._status = data.get("status")
+ obj._parent_resource_id = data.get("parent_resource_id")
+ obj._connection_mode = StreamableModes(data.get("connection_mode", StreamableModes.PUSH.value)),
+ return obj
+
class System(StreamableResource[SystemResource]):
name: str
@@ -567,29 +719,37 @@ def __init__(self, name: str, label: str, urn: str, parent_node: Node, **kwargs)
self._underlying_resource = self.to_system_resource()
- def discover_datastreams(self) -> list[DatastreamResource]:
+ def discover_datastreams(self) -> list[Datastream]:
res = self._parent_node.get_api_helper().get_resource(APIResourceTypes.SYSTEM, self._resource_id,
APIResourceTypes.DATASTREAM)
datastream_json = res.json()['items']
- ds_resources = []
+ datastreams = []
for ds in datastream_json:
- datastream_objs = DatastreamResource.model_validate(ds)
- ds_resources.append(datastream_objs)
+ datastream_objs = DatastreamResource.model_validate(ds, by_alias=True)
+ new_ds = Datastream(self._parent_node, datastream_objs)
+ datastreams.append(new_ds)
+
+ if not [ds.get_underlying_resource() != datastream_objs for ds in self.datastreams]:
+ self.datastreams.append(new_ds)
- return ds_resources
+ return datastreams
- def discover_controlstreams(self) -> list[ControlStreamResource]:
+ def discover_controlstreams(self) -> list[ControlStream]:
res = self._parent_node.get_api_helper().get_resource(APIResourceTypes.SYSTEM, self._resource_id,
APIResourceTypes.CONTROL_CHANNEL)
controlstream_json = res.json()['items']
- cs_resources = []
+ controlstreams = []
+
+ for cs_json in controlstream_json:
+ controlstream_objs = ControlStreamResource.model_validate(cs_json)
+ new_cs = ControlStream(self._parent_node, controlstream_objs)
+ controlstreams.append(new_cs)
- for cs in controlstream_json:
- controlstream_objs = ControlStreamResource.model_validate(cs)
- cs_resources.append(controlstream_objs)
+ if not [cs.get_underlying_resource() != controlstream_objs for cs in self.control_channels]:
+ self.control_channels.append(new_cs)
- return cs_resources
+ return controlstreams
@staticmethod
def from_system_resource(system_resource: SystemResource, parent_node: Node) -> System:
@@ -629,7 +789,9 @@ def get_system_resource(self) -> SystemResource:
def add_insert_datastream(self, datarecord_schema: DataRecordSchema):
"""
Adds a datastream to the system while also inserting it into the system's parent node via HTTP POST.
- :param datarecord_schema: DataRecordSchema to be used to define the datastream
+ :param datarecord_schema: DataRecordSchema to be used to define the datastream. Must carry a `name`
+ matching NameToken (^[A-Za-z][A-Za-z0-9_\\-]*$); SWE Common 3 wraps DataStream.elementType in
+ SoftNamedProperty, so the root component requires a name.
:return:
"""
print(f'Adding datastream: {datarecord_schema.model_dump_json(exclude_none=True, by_alias=True)}')
@@ -671,7 +833,9 @@ def add_and_insert_control_stream(self, control_stream_record_schema: DataRecord
"""
Accepts a DataRecordSchema and creates a JSON encoded schema structure ControlStreamResource, which is inserted
into the parent system via the host node.
- :param control_stream_record_schema: DataRecordSchema to be used for the control stream
+ :param control_stream_record_schema: DataRecordSchema to be used for the control stream. Must carry a `name`
+ matching NameToken (^[A-Za-z][A-Za-z0-9_\\-]*$); JSONCommandSchema.parametersSchema is wrapped in
+ SoftNamedProperty so the root component requires a name.
:param input_name: Name of the input, if None the label of the schema is converted to lower and stripped of whitespace
:return: ControlStream object added to the system
"""
@@ -737,6 +901,54 @@ def retrieve_resource(self):
self._underlying_resource = system_resource
return None
+ def serialize(self) -> dict:
+ data = super().serialize()
+ data["name"] = getattr(self, "name", None)
+ data["label"] = getattr(self, "label", None)
+ data["urn"] = getattr(self, "urn", None)
+ data["description"] = getattr(self, "description", None)
+ datastreams = getattr(self, "datastreams", None)
+ if datastreams is not None:
+ data["datastreams"] = [ds.serialize() for ds in datastreams]
+ else:
+ data["datastreams"] = None
+ control_channels = getattr(self, "control_channels", None)
+ if control_channels is not None:
+ data["control_channels"] = [cc.serialize() for cc in control_channels]
+ else:
+ data["control_channels"] = None
+ underlying = getattr(self, "_underlying_resource", None)
+ if underlying is not None:
+ dump = getattr(underlying, 'model_dump', None)
+ if callable(dump):
+ data["underlying_resource"] = underlying.model_dump(by_alias=True, exclude_none=True, mode='json')
+ elif hasattr(underlying, 'to_dict'):
+ data["underlying_resource"] = underlying.to_dict()
+ else:
+ data["underlying_resource"] = str(underlying)
+ else:
+ data["underlying_resource"] = None
+ # Remove any 'resource' key if present
+ data.pop("resource", None)
+ return data
+
+ @classmethod
+ def deserialize(cls, data: dict, node: 'Node') -> 'System':
+ obj = cls(
+ name=data["name"],
+ label=data["label"],
+ urn=data["urn"],
+ parent_node=node,
+ description=data.get("description"),
+ resource_id=data.get("resource_id")
+ )
+ obj._id = uuid.UUID(data["id"])
+ obj.datastreams = [Datastream.deserialize(ds, node) for ds in data.get("datastreams", [])]
+ obj.control_channels = [ControlStream.deserialize(cc, node) for cc in data.get("control_channels", [])]
+ underlying = data.get("underlying_resource")
+ obj._underlying_resource = SystemResource.model_validate(underlying) if underlying else None
+ return obj
+
class Datastream(StreamableResource[DatastreamResource]):
should_poll: bool
@@ -781,24 +993,30 @@ def insert_observation_dict(self, obs_data: dict):
def start(self):
super().start()
if self._mqtt_client is not None:
- # self._mqtt_client.connect()
-
if self._connection_mode is StreamableModes.PULL or self._connection_mode is StreamableModes.BIDIRECTIONAL:
self._mqtt_client.subscribe(self._topic, msg_callback=self._mqtt_sub_callback)
else:
try:
- loop = asyncio.get_event_loop()
+ loop = asyncio.get_running_loop()
loop.create_task(self._write_to_mqtt())
+ except RuntimeError:
+ logging.warning("No running event loop — MQTT write task for %s not started. "
+ "Call start() from within an async context.", self._id)
except Exception as e:
- # TODO: Use logging instead of print
- print(traceback.format_exc())
- print(f"Error starting MQTT write task: {e}")
-
- # self._mqtt_client.start()
+ logging.error("Error starting MQTT write task for %s: %s\n%s",
+ self._id, e, traceback.format_exc())
def init_mqtt(self):
super().init_mqtt()
- self._topic = self.get_mqtt_topic(subresource=APIResourceTypes.OBSERVATION)
+ self._topic = self.get_mqtt_topic(subresource=APIResourceTypes.OBSERVATION, data_topic=True)
+
+ def _emit_inbound_event(self, msg):
+ evt = (EventBuilder().with_type(DefaultEventTypes.NEW_OBSERVATION)
+ .with_topic(msg.topic)
+ .with_data(msg.payload)
+ .with_producer(self)
+ .build())
+ EventHandler().publish(evt)
def _queue_push(self, msg):
print(f'Pushing message to reader queue: {msg}')
@@ -813,6 +1031,46 @@ def insert(self, data: dict):
encoded = json.dumps(data).encode('utf-8')
self._publish_mqtt(self._topic, encoded)
+ def serialize(self) -> dict:
+ data = super().serialize()
+ data["should_poll"] = getattr(self, "should_poll", None)
+ underlying = getattr(self, "_underlying_resource", None)
+ if underlying is not None:
+ dump = getattr(underlying, 'model_dump', None)
+ if callable(dump):
+ data["underlying_resource"] = underlying.model_dump(by_alias=True, exclude_none=True, mode='json')
+ elif hasattr(underlying, 'to_dict'):
+ data["underlying_resource"] = underlying.to_dict()
+ else:
+ data["underlying_resource"] = str(underlying)
+ else:
+ data["underlying_resource"] = None
+
+ return data
+
+ @classmethod
+ def deserialize(cls, data: dict, node: 'Node') -> 'Datastream':
+ ds_resource = DatastreamResource.model_validate(data["underlying_resource"]) if data.get("underlying_resource") else None
+ obj = cls(parent_node=node, datastream_resource=ds_resource)
+ obj._id = uuid.UUID(data["id"])
+ obj.should_poll = data.get("should_poll", False)
+ return obj
+
+ def subscribe(self, topic=None, callback=None, qos=0):
+ t = None
+
+ if topic is None or topic == APIResourceTypes.OBSERVATION.value:
+ t = self._topic
+ # elif topic == APIResourceTypes.STATUS.value:
+ # t = self._status_topic
+ else:
+ raise ValueError(f"Invalid topic provided {topic}, must be None or 'observation'.")
+
+ if callback is None:
+ self._mqtt_client.subscribe(t, qos=qos, msg_callback=self._mqtt_sub_callback)
+ else:
+ self._mqtt_client.subscribe(t, qos=qos, msg_callback=callback)
+
class ControlStream(StreamableResource[ControlStreamResource]):
_status_topic: str
@@ -833,10 +1091,21 @@ def add_underlying_resource(self, resource: ControlStreamResource):
def init_mqtt(self):
super().init_mqtt()
- self._topic = self.get_mqtt_topic(subresource=APIResourceTypes.COMMAND)
+ self._topic = self.get_mqtt_topic(subresource=APIResourceTypes.COMMAND, data_topic=True)
def get_mqtt_status_topic(self):
- return self.get_mqtt_topic(subresource=APIResourceTypes.STATUS)
+ return self.get_mqtt_topic(subresource=APIResourceTypes.STATUS, data_topic=True)
+
+ def _emit_inbound_event(self, msg):
+ evt_type = (DefaultEventTypes.NEW_COMMAND
+ if msg.topic == self._topic
+ else DefaultEventTypes.NEW_COMMAND_STATUS)
+ evt = (EventBuilder().with_type(evt_type)
+ .with_topic(msg.topic)
+ .with_data(msg.payload)
+ .with_producer(self)
+ .build())
+ EventHandler().publish(evt)
def start(self):
super().start()
@@ -846,11 +1115,14 @@ def start(self):
self._mqtt_client.subscribe(self._topic, msg_callback=self._mqtt_sub_callback)
else:
try:
- loop = asyncio.get_event_loop()
+ loop = asyncio.get_running_loop()
loop.create_task(self._write_to_mqtt())
+ except RuntimeError:
+ logging.warning("No running event loop — MQTT write task for %s not started. "
+ "Call start() from within an async context.", self._id)
except Exception as e:
- print(traceback.format_exc())
- print(f"Error starting MQTT write task: {e}")
+ logging.error("Error starting MQTT write task for %s: %s\n%s",
+ self._id, e, traceback.format_exc())
def get_inbound_deque(self):
return self._inbound_deque
@@ -899,9 +1171,34 @@ def subscribe(self, topic=None, callback=None, qos=0):
elif topic == APIResourceTypes.STATUS.value:
t = self._status_topic
else:
- raise ArgumentError(f"Invalid topic provided {topic}, must be None or one of 'command' or 'status'.")
+ raise ValueError(f"Invalid topic provided {topic}, must be None or one of 'command' or 'status'.")
if callback is None:
self._mqtt_client.subscribe(t, qos=qos, msg_callback=self._mqtt_sub_callback)
else:
self._mqtt_client.subscribe(t, qos=qos, msg_callback=callback)
+
+ def serialize(self) -> dict:
+ data = super().serialize()
+ data["status_topic"] = getattr(self, "_status_topic", None)
+ underlying = getattr(self, "_underlying_resource", None)
+ if underlying is not None:
+ dump = getattr(underlying, 'model_dump', None)
+ if callable(dump):
+ data["underlying_resource"] = underlying.model_dump(by_alias=True, exclude_none=True, mode='json')
+ elif hasattr(underlying, 'to_dict'):
+ data["underlying_resource"] = underlying.to_dict()
+ else:
+ data["underlying_resource"] = str(underlying)
+ else:
+ data["underlying_resource"] = None
+
+ return data
+
+ @classmethod
+ def deserialize(cls, data: dict, node: 'Node') -> 'ControlStream':
+ cs_resource = ControlStreamResource.model_validate(data["underlying_resource"]) if data.get("underlying_resource") else None
+ obj = cls(node=node, controlstream_resource=cs_resource)
+ obj._id = uuid.UUID(data["id"])
+ obj._status_topic = data.get("status_topic")
+ return obj
diff --git a/src/oshconnect/swe_components.py b/src/oshconnect/swe_components.py
index f7220f3..b4ea584 100644
--- a/src/oshconnect/swe_components.py
+++ b/src/oshconnect/swe_components.py
@@ -7,15 +7,34 @@
from __future__ import annotations
+import re
from numbers import Real
-from typing import Union, Any
+from typing import Union, Any, Literal, Annotated
-from pydantic import BaseModel, Field, field_validator, SerializeAsAny
+from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator, SerializeAsAny
from .csapi4py.constants import GeometryTypes
from .api_utils import UCUMCode, URI
from .geometry import Geometry
+# SWE Common 3 NameToken: basicTypes.json#/$defs/NameToken
+_NAME_TOKEN_RE = re.compile(r"^[A-Za-z][A-Za-z0-9_\-]*$")
+
+
+def check_named(component, location: str) -> None:
+ """Validate that a component bound via SoftNamedProperty carries a NameToken `name`."""
+ name = getattr(component, "name", None)
+ if not name:
+ raise ValueError(
+ f"{location}: a component bound here must carry `name` (SWE Common 3 SoftNamedProperty)."
+ )
+ if not _NAME_TOKEN_RE.match(name):
+ raise ValueError(
+ f"{location}: `name` {name!r} does not match NameToken pattern "
+ f"^[A-Za-z][A-Za-z0-9_\\-]*$."
+ )
+
+
"""
NOTE: The following classes are used to represent the Record Schemas that are required for use with Datastreams
The names are likely to change to include a "Schema" suffix to differentiate them from the actual data structures.
@@ -31,8 +50,14 @@
class AnyComponentSchema(BaseModel):
+ model_config = ConfigDict(populate_by_name=True)
type: str = Field(...)
id: str = Field(None)
+ # Wire-format flat carrier for SoftNamedProperty.name. Optional on the component
+ # itself (per AbstractDataComponent.json); enforced as required by parent
+ # binding-context validators (DataRecord/DataChoice/Vector/DataArray/Matrix and
+ # the datastream/controlstream schema wrappers in schema_datamodels.py).
+ name: str = Field(None)
label: str = Field(None)
description: str = Field(None)
updatable: bool = Field(False)
@@ -41,51 +66,81 @@ class AnyComponentSchema(BaseModel):
class DataRecordSchema(AnyComponentSchema):
- type: str = "DataRecord"
- fields: SerializeAsAny[list[AnyComponentSchema]] = Field(...)
+ type: Literal["DataRecord"] = "DataRecord"
+ # DataRecord.json: fields.minItems = 1
+ fields: list["AnyComponent"] = Field(..., min_length=1)
+
+ @model_validator(mode="after")
+ def _fields_require_name(self):
+ for i, f in enumerate(self.fields):
+ check_named(f, f"DataRecord.fields[{i}]")
+ return self
class VectorSchema(AnyComponentSchema):
label: str = Field(...)
- name: str = Field(...)
- type: str = "Vector"
+ type: Literal["Vector"] = "Vector"
definition: str = Field(...)
- reference_frame: str = Field(...)
- local_frame: str = Field(None)
+ reference_frame: str = Field(..., alias='referenceFrame')
+ local_frame: str = Field(None, alias='localFrame')
# TODO: VERIFY might need to be moved further down when these are defined
coordinates: SerializeAsAny[Union[list[CountSchema], list[QuantitySchema], list[TimeSchema]]] = Field(...)
+ @model_validator(mode="after")
+ def _coordinates_require_name(self):
+ for i, c in enumerate(self.coordinates):
+ check_named(c, f"Vector.coordinates[{i}]")
+ return self
+
class DataArraySchema(AnyComponentSchema):
- type: str = "DataArray"
- name: str = Field(...)
- element_count: dict | str | CountSchema = Field(..., serialization_alias='elementCount') # Should type of Count
- element_type: SerializeAsAny[list[AnyComponentSchema]] = Field(..., serialization_alias='elementType')
+ type: Literal["DataArray"] = "DataArray"
+ element_count: dict | str | CountSchema = Field(..., alias='elementCount') # Should type of Count
+ element_type: "AnyComponent" = Field(..., alias='elementType')
encoding: str = Field(...) # TODO: implement an encodings class
values: list = Field(None)
+ @model_validator(mode="after")
+ def _element_type_requires_name(self):
+ check_named(self.element_type, "DataArray.elementType")
+ return self
+
class MatrixSchema(AnyComponentSchema):
- type: str = "Matrix"
- element_count: dict | str | CountSchema = Field(..., serialization_alias='elementCount') # Should be type of Count
- element_type: SerializeAsAny[list[AnyComponentSchema]] = Field(..., serialization_alias='elementType')
+ type: Literal["Matrix"] = "Matrix"
+ element_count: dict | str | CountSchema = Field(..., alias='elementCount') # Should be type of Count
+ # TODO: spec defines Matrix.elementType as a single component (allOf SoftNamedProperty + AnyComponent),
+ # not a list. Cardinality fix is out of scope for the name-validator change.
+ element_type: list["AnyComponent"] = Field(..., alias='elementType')
encoding: str = Field(...) # TODO: implement an encodings class
values: list = Field(None)
reference_frame: str = Field(None)
local_frame: str = Field(None)
+ @model_validator(mode="after")
+ def _element_type_requires_name(self):
+ for i, et in enumerate(self.element_type):
+ check_named(et, f"Matrix.elementType[{i}]")
+ return self
+
class DataChoiceSchema(AnyComponentSchema):
- type: str = "DataChoice"
+ type: Literal["DataChoice"] = "DataChoice"
updatable: bool = Field(False)
optional: bool = Field(False)
- choice_value: CategorySchema = Field(..., serialization_alias='choiceValue') # TODO: Might be called "choiceValues"
- items: SerializeAsAny[list[AnyComponentSchema]] = Field(...)
+ choice_value: CategorySchema = Field(..., alias='choiceValue') # TODO: Might be called "choiceValues"
+ items: list["AnyComponent"] = Field(...)
+
+ @model_validator(mode="after")
+ def _items_require_name(self):
+ for i, item in enumerate(self.items):
+ check_named(item, f"DataChoice.items[{i}]")
+ return self
class GeometrySchema(AnyComponentSchema):
label: str = Field(...)
- type: str = "Geometry"
+ type: Literal["Geometry"] = "Geometry"
updatable: bool = Field(False)
optional: bool = Field(False)
definition: str = Field(...)
@@ -99,7 +154,7 @@ class GeometrySchema(AnyComponentSchema):
GeometryTypes.MULTI_POLYGON.value
]
})
- nil_values: list = Field(None, serialization_alias='nilValues')
+ nil_values: list = Field(None, alias='nilValues')
srs: str = Field(...)
value: Geometry = Field(None)
@@ -111,14 +166,15 @@ class AnySimpleComponentSchema(AnyComponentSchema):
updatable: bool = Field(False)
optional: bool = Field(False)
definition: str = Field(...)
- reference_frame: str = Field(None, serialization_alias='referenceFrame')
- axis_id: str = Field(None, serialization_alias='axisID')
- quality: Union[list[QuantitySchema], list[QuantityRangeSchema], list[CategorySchema], list[TextSchema]] = Field(
- None) # TODO: Union[Quantity, QuantityRange, Category, Text]
- nil_values: list = Field(None, serialization_alias='nilValues')
+ reference_frame: str = Field(None, alias='referenceFrame')
+ axis_id: str = Field(None, alias='axisID')
+ quality: list[Annotated[
+ Union[QuantitySchema, QuantityRangeSchema, CategorySchema, TextSchema],
+ Field(discriminator='type'),
+ ]] = Field(None)
+ nil_values: list = Field(None, alias='nilValues')
constraint: Any = Field(None)
value: Any = Field(None)
- name: str = Field(...)
class AnyScalarComponentSchema(AnySimpleComponentSchema):
@@ -129,17 +185,17 @@ class AnyScalarComponentSchema(AnySimpleComponentSchema):
class BooleanSchema(AnyScalarComponentSchema):
- type: str = "Boolean"
+ type: Literal["Boolean"] = "Boolean"
value: bool = Field(None)
class CountSchema(AnyScalarComponentSchema):
- type: str = "Count"
+ type: Literal["Count"] = "Count"
value: int = Field(None)
class QuantitySchema(AnyScalarComponentSchema):
- type: str = "Quantity"
+ type: Literal["Quantity"] = "Quantity"
value: Union[float, str] = Field(None)
uom: Union[UCUMCode, URI] = Field(...)
@@ -163,45 +219,57 @@ def validate_value(cls, v):
class TimeSchema(AnyScalarComponentSchema):
- type: str = "Time"
+ type: Literal["Time"] = "Time"
value: str = Field(None)
- reference_time: str = Field(None, serialization_alias='referenceTime')
+ reference_time: str = Field(None, alias='referenceTime')
local_frame: str = Field(None)
uom: Union[UCUMCode, URI] = Field(...)
class CategorySchema(AnyScalarComponentSchema):
- type: str = "Category"
+ type: Literal["Category"] = "Category"
value: str = Field(None)
- code_space: str = Field(None, serialization_alias='codeSpace')
+ code_space: str = Field(None, alias='codeSpace')
class TextSchema(AnyScalarComponentSchema):
- type: str = "Text"
+ type: Literal["Text"] = "Text"
value: str = Field(None)
class CountRangeSchema(AnySimpleComponentSchema):
- type: str = "CountRange"
+ type: Literal["CountRange"] = "CountRange"
value: list[int] = Field(None)
uom: Union[UCUMCode, URI] = Field(...)
class QuantityRangeSchema(AnySimpleComponentSchema):
- type: str = "QuantityRange"
+ type: Literal["QuantityRange"] = "QuantityRange"
value: list[Union[float, str]] = Field(None)
uom: Union[UCUMCode, URI] = Field(...)
class TimeRangeSchema(AnySimpleComponentSchema):
- type: str = "TimeRange"
+ type: Literal["TimeRange"] = "TimeRange"
value: list[str] = Field(None)
- reference_time: str = Field(None, serialization_alias='referenceTime')
+ reference_time: str = Field(None, alias='referenceTime')
local_frame: str = Field(None)
uom: Union[UCUMCode, URI] = Field(...)
class CategoryRangeSchema(AnySimpleComponentSchema):
- type: str = "CategoryRange"
+ type: Literal["CategoryRange"] = "CategoryRange"
value: list[str] = Field(None)
- code_space: str = Field(None, serialization_alias='codeSpace')
+ code_space: str = Field(None, alias='codeSpace')
+
+
+AnyComponent = Annotated[
+ Union[
+ DataRecordSchema, VectorSchema, DataArraySchema, MatrixSchema,
+ DataChoiceSchema, GeometrySchema,
+ BooleanSchema, CountSchema, QuantitySchema, TimeSchema,
+ CategorySchema, TextSchema,
+ CountRangeSchema, QuantityRangeSchema, TimeRangeSchema, CategoryRangeSchema,
+ ],
+ Field(discriminator="type"),
+]
diff --git a/src/oshconnect/timemanagement.py b/src/oshconnect/timemanagement.py
index 84c5381..5b5286e 100644
--- a/src/oshconnect/timemanagement.py
+++ b/src/oshconnect/timemanagement.py
@@ -7,6 +7,7 @@
from __future__ import annotations
+import calendar
import re
import time
from datetime import datetime, timezone
@@ -48,7 +49,10 @@ def to_epoch_time(a_time: datetime | str) -> float:
return time.mktime(
datetime.strptime(a_time, "%Y-%m-%d %H:%M:%S.%fZ").timetuple())
elif isinstance(a_time, datetime):
- return time.mktime(a_time.timetuple())
+ if a_time.tzinfo is not None:
+ return a_time.timestamp()
+ else:
+ return float(calendar.timegm(a_time.timetuple()))
@staticmethod
def to_utc_time(a_time: float | str) -> datetime:
@@ -167,11 +171,9 @@ def get_iso_time(self):
@staticmethod
def from_string(utc_time: str):
- # TODO: handle timezones
- if re.match(r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.)(\d+)(Z)', utc_time):
- dt = datetime.strptime(utc_time, "%Y-%m-%dT%H:%M:%S.%fZ")
- else:
- dt = datetime.strptime(utc_time, "%Y-%m-%dT%H:%M:%SZ")
+ # Normalize 'Z' suffix to '+00:00' for fromisoformat(), which handles all
+ # ISO 8601 timezone offsets (Z, +HH:MM, -HH:MM) in Python 3.12+.
+ dt = datetime.fromisoformat(utc_time.replace("Z", "+00:00"))
return TimeInstant(utc_time=dt)
@staticmethod
diff --git a/tests/fixtures/fake_weather_schema_omjson.json b/tests/fixtures/fake_weather_schema_omjson.json
new file mode 100644
index 0000000..86d25cf
--- /dev/null
+++ b/tests/fixtures/fake_weather_schema_omjson.json
@@ -0,0 +1,49 @@
+{
+ "obsFormat": "application/om+json",
+ "resultSchema": {
+ "type": "DataRecord",
+ "name": "weather",
+ "definition": "urn:osh:data:weather",
+ "description": "Weather measurements",
+ "fields": [
+ {
+ "type": "Quantity",
+ "name": "temperature",
+ "definition": "http://mmisw.org/ont/cf/parameter/air_temperature",
+ "label": "Air Temperature",
+ "uom": {
+ "code": "Cel"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "pressure",
+ "definition": "http://mmisw.org/ont/cf/parameter/air_pressure",
+ "label": "Atmospheric Pressure",
+ "uom": {
+ "code": "hPa"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "windSpeed",
+ "definition": "http://mmisw.org/ont/cf/parameter/wind_speed",
+ "label": "Wind Speed",
+ "uom": {
+ "code": "m/s"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "windDirection",
+ "definition": "http://mmisw.org/ont/cf/parameter/wind_from_direction",
+ "label": "Wind Direction",
+ "referenceFrame": "http://www.opengis.net/def/cs/OGC/0/NED",
+ "axisID": "z",
+ "uom": {
+ "code": "deg"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/tests/fixtures/fake_weather_schema_swejson.json b/tests/fixtures/fake_weather_schema_swejson.json
new file mode 100644
index 0000000..7d23cb9
--- /dev/null
+++ b/tests/fixtures/fake_weather_schema_swejson.json
@@ -0,0 +1,59 @@
+{
+ "obsFormat": "application/swe+json",
+ "recordSchema": {
+ "type": "DataRecord",
+ "name": "weather",
+ "definition": "urn:osh:data:weather",
+ "description": "Weather measurements",
+ "fields": [
+ {
+ "type": "Time",
+ "name": "time",
+ "definition": "http://www.opengis.net/def/property/OGC/0/SamplingTime",
+ "label": "Sampling Time",
+ "referenceFrame": "http://www.opengis.net/def/trs/BIPM/0/UTC",
+ "uom": {
+ "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "temperature",
+ "definition": "http://mmisw.org/ont/cf/parameter/air_temperature",
+ "label": "Air Temperature",
+ "uom": {
+ "code": "Cel"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "pressure",
+ "definition": "http://mmisw.org/ont/cf/parameter/air_pressure",
+ "label": "Atmospheric Pressure",
+ "uom": {
+ "code": "hPa"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "windSpeed",
+ "definition": "http://mmisw.org/ont/cf/parameter/wind_speed",
+ "label": "Wind Speed",
+ "uom": {
+ "code": "m/s"
+ }
+ },
+ {
+ "type": "Quantity",
+ "name": "windDirection",
+ "definition": "http://mmisw.org/ont/cf/parameter/wind_from_direction",
+ "label": "Wind Direction",
+ "referenceFrame": "http://www.opengis.net/def/cs/OGC/0/NED",
+ "axisID": "z",
+ "uom": {
+ "code": "deg"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/tests/test_api_helper.py b/tests/test_api_helper.py
index d4128b3..8d4330d 100644
--- a/tests/test_api_helper.py
+++ b/tests/test_api_helper.py
@@ -1,7 +1,8 @@
-from csapi4py.default_api_helpers import APIHelper
+from oshconnect.csapi4py import APIHelper
+
def test_url_generation():
- helper = APIHelper(server_url='localhost', port=8282, protocol='http', username='admin', password='admin', api_root='sensorhub/api')
+ helper = APIHelper(server_url='localhost', port=8282, protocol='http', username='admin', password='admin')
expected_url = "http://localhost:8282/sensorhub/api"
url = helper.get_api_root_url()
assert url == expected_url
diff --git a/tests/test_api_update.py b/tests/test_api_update.py
deleted file mode 100644
index ed1df9d..0000000
--- a/tests/test_api_update.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from oshconnect import OSHConnect, Node
-
-node = Node()
-app = OSHConnect()
\ No newline at end of file
diff --git a/tests/test_datastore.py b/tests/test_datastore.py
new file mode 100644
index 0000000..5edfb0f
--- /dev/null
+++ b/tests/test_datastore.py
@@ -0,0 +1,326 @@
+# ==============================================================================
+# Copyright (c) 2024 Botts Innovative Research, Inc.
+# ==============================================================================
+
+"""Tests for the DataStore layer (SQLiteDataStore) — no live OSH server required.
+
+All tests use SQLiteDataStore(":memory:") so there is no file I/O.
+"""
+
+import pytest
+
+from src.oshconnect import OSHConnect
+from src.oshconnect.datastores import SQLiteDataStore
+from src.oshconnect.resource_datamodels import (
+ ControlStreamResource,
+ DatastreamResource,
+)
+from src.oshconnect.streamableresource import (
+ ControlStream,
+ Datastream,
+ Node,
+ SessionManager,
+ System,
+)
+
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+def make_node(sm: SessionManager = None) -> Node:
+ """Create a real Node registered with *sm* (or a fresh SessionManager)."""
+ if sm is None:
+ sm = SessionManager()
+ node = Node(
+ protocol="http",
+ address="localhost",
+ port=8282,
+ username="admin",
+ password="admin",
+ )
+ node.register_with_session_manager(sm)
+ return node
+
+
+def make_system(node: Node) -> System:
+ return System(
+ name="test_system",
+ label="Test System",
+ urn="urn:test:sensors:sys1",
+ parent_node=node,
+ resource_id="sys001",
+ )
+
+
+def make_datastream(node: Node) -> Datastream:
+ ds_resource = DatastreamResource.model_validate({
+ "id": "ds001",
+ "name": "Test Datastream",
+ "validTime": ["2024-01-01T00:00:00Z", "2025-01-01T00:00:00Z"],
+ })
+ return Datastream(parent_node=node, datastream_resource=ds_resource)
+
+
+def make_controlstream(node: Node) -> ControlStream:
+ cs_resource = ControlStreamResource.model_validate({
+ "id": "cs001",
+ "name": "Test ControlStream",
+ })
+ return ControlStream(node=node, controlstream_resource=cs_resource)
+
+
+# ---------------------------------------------------------------------------
+# Node round-trip
+# ---------------------------------------------------------------------------
+
+class TestNodeRoundTrip:
+ def test_save_and_load_node(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ original_id = node.get_id()
+
+ store.save_node(node)
+ loaded = store.load_node(original_id, session_manager=sm)
+
+ assert loaded is not None
+ assert loaded.get_id() == original_id
+ assert loaded.address == node.address
+ assert loaded.port == node.port
+
+ def test_load_missing_node_returns_none(self):
+ store = SQLiteDataStore(":memory:")
+ assert store.load_node("nonexistent-id") is None
+
+ def test_load_all_nodes(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node1 = make_node(sm)
+ node2 = make_node(sm)
+ store.save_node(node1)
+ store.save_node(node2)
+
+ nodes = store.load_all_nodes(session_manager=sm)
+ assert len(nodes) == 2
+ ids = {n.get_id() for n in nodes}
+ assert node1.get_id() in ids
+ assert node2.get_id() in ids
+
+ def test_delete_node(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ store.save_node(node)
+ store.delete_node(node.get_id())
+ assert store.load_node(node.get_id()) is None
+
+ def test_upsert_overwrites_existing_node(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ store.save_node(node)
+ store.save_node(node) # second save should not raise
+ nodes = store.load_all_nodes(session_manager=sm)
+ assert len(nodes) == 1
+
+
+# ---------------------------------------------------------------------------
+# System CRUD
+# ---------------------------------------------------------------------------
+
+class TestSystemCRUD:
+ def test_save_and_load_system(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ system = make_system(node)
+ system_id = str(system.get_internal_id())
+
+ store.save_system(system, node)
+ loaded = store.load_system(system_id, node)
+
+ assert loaded is not None
+ assert loaded.name == system.name
+ assert loaded.urn == system.urn
+
+ def test_load_missing_system_returns_none(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ assert store.load_system("missing-id", node) is None
+
+ def test_load_systems_for_node(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ sys1 = make_system(node)
+ sys2 = System(
+ name="system_two",
+ label="System Two",
+ urn="urn:test:sensors:sys2",
+ parent_node=node,
+ resource_id="sys002",
+ )
+ store.save_system(sys1, node)
+ store.save_system(sys2, node)
+
+ systems = store.load_systems_for_node(node.get_id(), node)
+ assert len(systems) == 2
+ names = {s.name for s in systems}
+ assert "test_system" in names
+ assert "system_two" in names
+
+ def test_delete_system(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ system = make_system(node)
+ system_id = str(system.get_internal_id())
+ store.save_system(system, node)
+ store.delete_system(system_id)
+ assert store.load_system(system_id, node) is None
+
+
+# ---------------------------------------------------------------------------
+# Datastream CRUD
+# ---------------------------------------------------------------------------
+
+class TestDatastreamCRUD:
+ def test_save_and_load_datastream(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ ds = make_datastream(node)
+ ds_id = str(ds.get_internal_id())
+
+ store.save_datastream(ds, node)
+ loaded = store.load_datastream(ds_id, node)
+
+ assert loaded is not None
+ assert loaded.get_id() == ds.get_id()
+
+ def test_load_missing_datastream_returns_none(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ assert store.load_datastream("missing-id", node) is None
+
+ def test_delete_datastream(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ ds = make_datastream(node)
+ ds_id = str(ds.get_internal_id())
+ store.save_datastream(ds, node)
+ store.delete_datastream(ds_id)
+ assert store.load_datastream(ds_id, node) is None
+
+
+# ---------------------------------------------------------------------------
+# ControlStream CRUD
+# ---------------------------------------------------------------------------
+
+class TestControlStreamCRUD:
+ def test_save_and_load_controlstream(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ cs = make_controlstream(node)
+ cs_id = str(cs.get_internal_id())
+
+ store.save_controlstream(cs, node)
+ loaded = store.load_controlstream(cs_id, node)
+
+ assert loaded is not None
+
+ def test_delete_controlstream(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ cs = make_controlstream(node)
+ cs_id = str(cs.get_internal_id())
+ store.save_controlstream(cs, node)
+ store.delete_controlstream(cs_id)
+ assert store.load_controlstream(cs_id, node) is None
+
+
+# ---------------------------------------------------------------------------
+# Bulk save_all / load_all
+# ---------------------------------------------------------------------------
+
+class TestBulkOperations:
+ def test_save_all_and_load_all(self):
+ store = SQLiteDataStore(":memory:")
+ sm = SessionManager()
+ node = make_node(sm)
+ system = make_system(node)
+ node.add_new_system(system)
+
+ store.save_all([node])
+ nodes = store.load_all(session_manager=sm)
+
+ assert len(nodes) == 1
+ loaded_node = nodes[0]
+ assert loaded_node.get_id() == node.get_id()
+ assert len(loaded_node.systems()) == 1
+ assert loaded_node.systems()[0].name == system.name
+
+ def test_save_all_empty_node_list(self):
+ store = SQLiteDataStore(":memory:")
+ store.save_all([])
+ assert store.load_all() == []
+
+ def test_load_all_empty_store(self):
+ store = SQLiteDataStore(":memory:")
+ assert store.load_all() == []
+
+
+# ---------------------------------------------------------------------------
+# OSHConnect integration
+# ---------------------------------------------------------------------------
+
+class TestOSHConnectIntegration:
+ def test_save_to_store_and_load_from_store(self):
+ store = SQLiteDataStore(":memory:")
+ app = OSHConnect(name="test-app", datastore=store)
+
+ node = Node(
+ protocol="http",
+ address="localhost",
+ port=8282,
+ username="admin",
+ password="admin",
+ )
+ app.add_node(node)
+ system = make_system(node)
+ app.add_system_to_node(system, node)
+
+ app.save_to_store()
+
+ # Restore into a fresh OSHConnect instance using the same in-memory store
+ app2 = OSHConnect(name="test-app-restored", datastore=store)
+ app2.load_from_store()
+
+ assert len(app2._nodes) == 1
+ assert len(app2._systems) == 1
+ assert app2._systems[0].name == system.name
+
+ def test_save_to_store_no_datastore_raises(self):
+ app = OSHConnect(name="no-store-app")
+ with pytest.raises(RuntimeError):
+ app.save_to_store()
+
+ def test_load_from_store_no_datastore_raises(self):
+ app = OSHConnect(name="no-store-app")
+ with pytest.raises(RuntimeError):
+ app.load_from_store()
+
+ def test_multiple_instances_do_not_share_node_list(self):
+ """Regression: class-level mutable defaults used to share _nodes across instances."""
+ app1 = OSHConnect(name="app1")
+ app2 = OSHConnect(name="app2")
+ node = Node(protocol="http", address="localhost", port=8282)
+ app1.add_node(node)
+ assert len(app1._nodes) == 1
+ assert len(app2._nodes) == 0
diff --git a/tests/test_imports.py b/tests/test_imports.py
new file mode 100644
index 0000000..4e25a6e
--- /dev/null
+++ b/tests/test_imports.py
@@ -0,0 +1,147 @@
+# =============================================================================
+# Copyright (c) 2025 Botts Innovative Research Inc.
+# Date: 2025/4/2
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+#
+# Verifies that all public symbols are importable from the top-level package
+# and from the csapi4py subpackage. Run with:
+# uv run pytest tests/test_imports.py
+#
+# Requirements: the package must be installed in the environment first:
+# uv sync (or) pip install -e .
+# =============================================================================
+
+
+# ---------------------------------------------------------------------------
+# Top-level package
+# ---------------------------------------------------------------------------
+
+def test_core_resources_importable():
+ from oshconnect import OSHConnect, Node, System, Datastream, ControlStream
+ assert OSHConnect is not None
+ assert Node is not None
+ assert System is not None
+ assert Datastream is not None
+ assert ControlStream is not None
+
+
+def test_streaming_enums_importable():
+ from oshconnect import StreamableModes, Status
+ assert StreamableModes is not None
+ assert Status is not None
+
+
+def test_time_management_importable():
+ from oshconnect import TimePeriod, TimeInstant, TemporalModes, TimeUtils
+ assert TimePeriod is not None
+ assert TimeInstant is not None
+ assert TemporalModes is not None
+ assert TimeUtils is not None
+
+
+def test_resource_datamodels_importable():
+ from oshconnect import (
+ SystemResource,
+ DatastreamResource,
+ ControlStreamResource,
+ ObservationResource,
+ )
+ assert SystemResource is not None
+ assert DatastreamResource is not None
+ assert ControlStreamResource is not None
+ assert ObservationResource is not None
+
+
+def test_swe_schema_components_importable():
+ from oshconnect import (
+ DataRecordSchema,
+ VectorSchema,
+ QuantitySchema,
+ TimeSchema,
+ BooleanSchema,
+ CountSchema,
+ CategorySchema,
+ TextSchema,
+ QuantityRangeSchema,
+ TimeRangeSchema,
+ )
+ for cls in (DataRecordSchema, VectorSchema, QuantitySchema, TimeSchema,
+ BooleanSchema, CountSchema, CategorySchema, TextSchema,
+ QuantityRangeSchema, TimeRangeSchema):
+ assert cls is not None
+
+
+def test_schema_datamodels_importable():
+ from oshconnect import SWEDatastreamRecordSchema, JSONCommandSchema
+ assert SWEDatastreamRecordSchema is not None
+ assert JSONCommandSchema is not None
+
+
+def test_event_system_importable():
+ from oshconnect import (
+ EventHandler,
+ IEventListener,
+ DefaultEventTypes,
+ AtomicEventTypes,
+ Event,
+ EventBuilder,
+ )
+ assert EventHandler is not None
+ assert IEventListener is not None
+ assert DefaultEventTypes is not None
+ assert AtomicEventTypes is not None
+ assert Event is not None
+ assert EventBuilder is not None
+
+
+def test_csapi_constants_importable():
+ from oshconnect import ObservationFormat, APIResourceTypes, ContentTypes
+ assert ObservationFormat is not None
+ assert APIResourceTypes is not None
+ assert ContentTypes is not None
+
+
+def test_all_list_present_and_complete():
+ import oshconnect
+ assert hasattr(oshconnect, "__all__")
+ assert len(oshconnect.__all__) > 0
+ for name in oshconnect.__all__:
+ assert hasattr(oshconnect, name), f"__all__ lists '{name}' but it is not importable"
+
+
+# ---------------------------------------------------------------------------
+# csapi4py subpackage
+# ---------------------------------------------------------------------------
+
+def test_csapi4py_constants_importable():
+ from oshconnect.csapi4py import APIResourceTypes, ObservationFormat, ContentTypes, APITerms, SystemTypes
+ assert APIResourceTypes is not None
+ assert ObservationFormat is not None
+ assert ContentTypes is not None
+ assert APITerms is not None
+ assert SystemTypes is not None
+
+
+def test_csapi4py_request_builder_importable():
+ from oshconnect.csapi4py import ConnectedSystemsRequestBuilder, ConnectedSystemAPIRequest
+ assert ConnectedSystemsRequestBuilder is not None
+ assert ConnectedSystemAPIRequest is not None
+
+
+def test_csapi4py_mqtt_importable():
+ from oshconnect.csapi4py import MQTTCommClient
+ assert MQTTCommClient is not None
+
+
+def test_csapi4py_api_helper_importable():
+ from oshconnect.csapi4py import APIHelper
+ assert APIHelper is not None
+
+
+def test_csapi4py_all_list_present_and_complete():
+ import oshconnect.csapi4py as csapi4py
+ assert hasattr(csapi4py, "__all__")
+ for name in csapi4py.__all__:
+ assert hasattr(csapi4py, name), f"__all__ lists '{name}' but it is not importable"
\ No newline at end of file
diff --git a/tests/test_mqtt_topics.py b/tests/test_mqtt_topics.py
new file mode 100644
index 0000000..e22d874
--- /dev/null
+++ b/tests/test_mqtt_topics.py
@@ -0,0 +1,260 @@
+"""
+Tests verifying that each resource type produces the correct MQTT topic strings
+per the CS API Part 3 pub/sub conventions.
+
+Topic format (Resource Data Topic):
+ /{api_root}/{resource_type}/{resource_id}/{subresource_type}:data
+
+Event topic format (Resource Event Topic):
+ /{api_root}/{resource_type}/{resource_id}
+ /{api_root}/{parent_type}/{parent_id}/{resource_type}/{resource_id} (with parent)
+"""
+import pytest
+from unittest.mock import MagicMock
+
+from src.oshconnect.csapi4py.constants import APIResourceTypes
+from src.oshconnect.csapi4py.default_api_helpers import APIHelper
+from src.oshconnect.resource_datamodels import DatastreamResource, ControlStreamResource, SystemResource
+from src.oshconnect.streamableresource import Datastream, ControlStream, System
+
+DS_ID = "ds_test_001"
+CS_ID = "cs_test_001"
+SYS_ID = "sys_test_001"
+PARENT_SYS_ID = "sys_parent_001"
+
+
+def make_mock_node(api_root="api", mqtt_topic_root=None):
+ """Returns a mock Node backed by a real APIHelper so topic construction is exercised."""
+ api_helper = APIHelper(
+ server_url="localhost",
+ port=8282,
+ protocol="http",
+ server_root="sensorhub",
+ api_root=api_root,
+ mqtt_topic_root=mqtt_topic_root,
+ )
+ node = MagicMock()
+ node.get_api_helper.return_value = api_helper
+ node.get_mqtt_client.return_value = None
+ return node
+
+
+def make_datastream(node=None):
+ if node is None:
+ node = make_mock_node()
+ ds_resource = DatastreamResource.model_validate({
+ "id": DS_ID,
+ "name": "Test Datastream",
+ "validTime": ["2024-01-01T00:00:00Z", "2025-01-01T00:00:00Z"],
+ })
+ return Datastream(parent_node=node, datastream_resource=ds_resource)
+
+
+def make_controlstream(node=None):
+ if node is None:
+ node = make_mock_node()
+ cs_resource = ControlStreamResource.model_validate({
+ "id": CS_ID,
+ "name": "Test ControlStream",
+ })
+ return ControlStream(node=node, controlstream_resource=cs_resource)
+
+
+def make_system(node=None):
+ if node is None:
+ node = make_mock_node()
+ sys = System(name="test_system", label="Test System", urn="urn:test:system", parent_node=node,
+ resource_id=SYS_ID)
+ return sys
+
+
+class TestDatastreamTopics:
+ def test_observation_data_topic(self):
+ ds = make_datastream()
+ topic = ds.get_mqtt_topic(subresource=APIResourceTypes.OBSERVATION, data_topic=True)
+ assert topic == f"api/datastreams/{DS_ID}/observations:data"
+
+ def test_event_topic_no_parent(self):
+ ds = make_datastream()
+ topic = ds.get_event_topic()
+ assert topic == f"api/datastreams/{DS_ID}"
+
+ def test_event_topic_with_parent_system(self):
+ ds = make_datastream()
+ ds.set_parent_resource_id(PARENT_SYS_ID)
+ topic = ds.get_event_topic()
+ assert topic == f"api/systems/{PARENT_SYS_ID}/datastreams/{DS_ID}"
+
+ def test_init_mqtt_sets_correct_topic(self):
+ node = make_mock_node()
+ mock_mqtt = MagicMock()
+ node.get_mqtt_client.return_value = mock_mqtt
+
+ ds = make_datastream(node)
+ ds.init_mqtt()
+
+ assert ds._topic == f"api/datastreams/{DS_ID}/observations:data"
+
+
+class TestControlStreamTopics:
+ def test_command_data_topic(self):
+ cs = make_controlstream()
+ topic = cs.get_mqtt_topic(subresource=APIResourceTypes.COMMAND, data_topic=True)
+ assert topic == f"api/controlstreams/{CS_ID}/commands:data"
+
+ def test_status_data_topic(self):
+ cs = make_controlstream()
+ topic = cs.get_mqtt_topic(subresource=APIResourceTypes.STATUS, data_topic=True)
+ assert topic == f"api/controlstreams/{CS_ID}/status:data"
+
+ def test_status_topic_set_on_init(self):
+ """_status_topic is assigned in __init__ before any explicit init_mqtt call."""
+ cs = make_controlstream()
+ assert cs._status_topic == f"api/controlstreams/{CS_ID}/status:data"
+
+ def test_init_mqtt_sets_command_topic(self):
+ node = make_mock_node()
+ mock_mqtt = MagicMock()
+ node.get_mqtt_client.return_value = mock_mqtt
+
+ cs = make_controlstream(node)
+ cs.init_mqtt()
+
+ assert cs._topic == f"api/controlstreams/{CS_ID}/commands:data"
+
+ def test_event_topic_no_parent(self):
+ cs = make_controlstream()
+ topic = cs.get_event_topic()
+ assert topic == f"api/controlstreams/{CS_ID}"
+
+ def test_event_topic_with_parent_system(self):
+ cs = make_controlstream()
+ cs.set_parent_resource_id(PARENT_SYS_ID)
+ topic = cs.get_event_topic()
+ assert topic == f"api/systems/{PARENT_SYS_ID}/controlstreams/{CS_ID}"
+
+ def test_publish_routes_command_to_command_topic(self):
+ node = make_mock_node()
+ mock_mqtt = MagicMock()
+ node.get_mqtt_client.return_value = mock_mqtt
+
+ cs = make_controlstream(node)
+ cs.init_mqtt()
+ cs.publish("payload", topic=APIResourceTypes.COMMAND.value)
+
+ mock_mqtt.publish.assert_called_once_with(
+ f"api/controlstreams/{CS_ID}/commands:data", "payload", qos=0
+ )
+
+ def test_publish_routes_status_to_status_topic(self):
+ node = make_mock_node()
+ mock_mqtt = MagicMock()
+ node.get_mqtt_client.return_value = mock_mqtt
+
+ cs = make_controlstream(node)
+ cs.init_mqtt()
+ cs.publish("payload", topic=APIResourceTypes.STATUS.value)
+
+ mock_mqtt.publish.assert_called_once_with(
+ f"api/controlstreams/{CS_ID}/status:data", "payload", qos=0
+ )
+
+
+class TestSystemTopics:
+ def test_system_data_topic(self):
+ sys = make_system()
+ topic = sys.get_mqtt_topic(subresource=None, data_topic=True)
+ assert topic == "api/systems:data"
+
+ def test_system_event_topic(self):
+ sys = make_system()
+ topic = sys.get_event_topic()
+ assert topic == f"api/systems/{SYS_ID}"
+
+ def test_system_datastream_subresource_topic(self):
+ sys = make_system()
+ topic = sys.get_mqtt_topic(subresource=APIResourceTypes.DATASTREAM, data_topic=True)
+ assert topic == f"api/systems/{SYS_ID}/datastreams:data"
+
+ def test_system_controlstream_subresource_topic(self):
+ sys = make_system()
+ topic = sys.get_mqtt_topic(subresource=APIResourceTypes.CONTROL_CHANNEL, data_topic=True)
+ assert topic == f"api/systems/{SYS_ID}/controlstreams:data"
+
+
+class TestCustomApiRoot:
+ """Verify that a non-default api_root (with no separate mqtt_topic_root) propagates into all topic strings."""
+
+ CUSTOM_ROOT = "connected-systems"
+
+ def make_node(self):
+ return make_mock_node(api_root=self.CUSTOM_ROOT)
+
+ def test_datastream_data_topic(self):
+ ds = make_datastream(self.make_node())
+ topic = ds.get_mqtt_topic(subresource=APIResourceTypes.OBSERVATION, data_topic=True)
+ assert topic == f"{self.CUSTOM_ROOT}/datastreams/{DS_ID}/observations:data"
+
+ def test_datastream_event_topic(self):
+ ds = make_datastream(self.make_node())
+ topic = ds.get_event_topic()
+ assert topic == f"{self.CUSTOM_ROOT}/datastreams/{DS_ID}"
+
+ def test_controlstream_command_topic(self):
+ cs = make_controlstream(self.make_node())
+ topic = cs.get_mqtt_topic(subresource=APIResourceTypes.COMMAND, data_topic=True)
+ assert topic == f"{self.CUSTOM_ROOT}/controlstreams/{CS_ID}/commands:data"
+
+ def test_controlstream_status_topic(self):
+ cs = make_controlstream(self.make_node())
+ topic = cs.get_mqtt_topic(subresource=APIResourceTypes.STATUS, data_topic=True)
+ assert topic == f"{self.CUSTOM_ROOT}/controlstreams/{CS_ID}/status:data"
+
+ def test_system_event_topic(self):
+ sys = make_system(self.make_node())
+ topic = sys.get_event_topic()
+ assert topic == f"{self.CUSTOM_ROOT}/systems/{SYS_ID}"
+
+
+class TestIndependentMqttTopicRoot:
+ """
+ Verify that mqtt_topic_root overrides api_root for MQTT topics while leaving
+ the HTTP api_root untouched.
+ """
+
+ HTTP_ROOT = "api"
+ MQTT_ROOT = "sensorhub/mqtt"
+
+ def make_node(self):
+ return make_mock_node(api_root=self.HTTP_ROOT, mqtt_topic_root=self.MQTT_ROOT)
+
+ def test_mqtt_root_used_for_datastream_data_topic(self):
+ ds = make_datastream(self.make_node())
+ topic = ds.get_mqtt_topic(subresource=APIResourceTypes.OBSERVATION, data_topic=True)
+ assert topic == f"{self.MQTT_ROOT}/datastreams/{DS_ID}/observations:data"
+
+ def test_mqtt_root_used_for_datastream_event_topic(self):
+ ds = make_datastream(self.make_node())
+ topic = ds.get_event_topic()
+ assert topic == f"{self.MQTT_ROOT}/datastreams/{DS_ID}"
+
+ def test_mqtt_root_used_for_controlstream_command_topic(self):
+ cs = make_controlstream(self.make_node())
+ topic = cs.get_mqtt_topic(subresource=APIResourceTypes.COMMAND, data_topic=True)
+ assert topic == f"{self.MQTT_ROOT}/controlstreams/{CS_ID}/commands:data"
+
+ def test_mqtt_root_used_for_controlstream_status_topic(self):
+ cs = make_controlstream(self.make_node())
+ topic = cs.get_mqtt_topic(subresource=APIResourceTypes.STATUS, data_topic=True)
+ assert topic == f"{self.MQTT_ROOT}/controlstreams/{CS_ID}/status:data"
+
+ def test_mqtt_root_used_for_system_event_topic(self):
+ sys = make_system(self.make_node())
+ topic = sys.get_event_topic()
+ assert topic == f"{self.MQTT_ROOT}/systems/{SYS_ID}"
+
+ def test_http_api_root_unaffected(self):
+ """api_root must not change when mqtt_topic_root is set independently."""
+ node = self.make_node()
+ assert node.get_api_helper().api_root == self.HTTP_ROOT
+ assert node.get_api_helper().get_mqtt_root() == self.MQTT_ROOT
diff --git a/tests/test_oshconnect.py b/tests/test_oshconnect.py
index e958d33..3ee042a 100644
--- a/tests/test_oshconnect.py
+++ b/tests/test_oshconnect.py
@@ -9,8 +9,8 @@
import os
import websockets
+from oshconnect import TimePeriod, TimeInstant
from src.oshconnect import OSHConnect, Node
-from timemanagement import TimePeriod, TimeInstant
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src')))
@@ -28,10 +28,10 @@ def test_time_period(self):
assert tps.epoch_time == TimeInstant.from_string("2024-06-18T15:46:32Z").epoch_time
assert tpe.epoch_time == TimeInstant.from_string("2024-06-18T20:00:00Z").epoch_time
- tp = TimePeriod(start="now", end="2025-06-18T20:00:00Z")
+ tp = TimePeriod(start="now", end="2099-06-18T20:00:00Z")
assert tp is not None
assert tp.start == "now"
- assert tp.end.epoch_time == TimeInstant.from_string("2025-06-18T20:00:00Z").epoch_time
+ assert tp.end.epoch_time == TimeInstant.from_string("2099-06-18T20:00:00Z").epoch_time
tp = TimePeriod(start="2024-06-18T20:00:00Z", end="now")
assert tp is not None
diff --git a/tests/test_resource_datamodels.py b/tests/test_resource_datamodels.py
index f6c8d66..7da3b1f 100644
--- a/tests/test_resource_datamodels.py
+++ b/tests/test_resource_datamodels.py
@@ -4,7 +4,7 @@
# Author: Ian Patterson
# Contact Email: ian@botts-inc.com
# =============================================================================
-from src.oshconnect.resource_datamodels import ControlStreamResource
+from oshconnect import ControlStreamResource
def test_control_stream_resource():
diff --git a/tests/test_schema_equivalence.py b/tests/test_schema_equivalence.py
new file mode 100644
index 0000000..34aba6b
--- /dev/null
+++ b/tests/test_schema_equivalence.py
@@ -0,0 +1,163 @@
+# =============================================================================
+# Copyright (c) 2026 Botts Innovative Research Inc.
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+"""
+Verify that OSHConnect's datamodels can faithfully represent the datastream schema
+that an OSH server publishes for the FakeWeatherDriver, in both observation
+formats served:
+
+ - application/om+json (CS API Part 2 §16.1.4 shape: obsFormat + resultSchema)
+ - application/swe+json (CS API Part 2 §16.2.3 shape: obsFormat + recordSchema
+ [+ encoding])
+
+Strategy: round-trip the server-supplied schema JSON through the matching
+pydantic model (parse -> re-serialize) and assert structural equivalence. If
+our datamodels can losslessly express what the Node has, then a schema
+*generated* from those same datamodels will match the Node.
+
+Each parametrized case prefers a live node at localhost:8282 (FakeWeatherDriver
+running). If the node is unreachable or no weather system is registered, it
+falls back to the saved fixture at tests/fixtures/fake_weather_schema_.json.
+If neither is available, the case is skipped.
+"""
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import NamedTuple
+
+import pytest
+import requests
+
+from src.oshconnect.schema_datamodels import (
+ JSONDatastreamRecordSchema,
+ SWEDatastreamRecordSchema,
+)
+
+NODE_URL = "http://localhost:8282/sensorhub/api"
+NODE_AUTH = ("admin", "admin")
+LIVE_TIMEOUT = 2.0
+FIXTURES_DIR = Path(__file__).parent / "fixtures"
+
+
+class FormatCase(NamedTuple):
+ obs_format: str
+ model: type
+ fixture_path: Path
+
+
+CASES = [
+ FormatCase(
+ obs_format="application/om+json",
+ model=JSONDatastreamRecordSchema,
+ fixture_path=FIXTURES_DIR / "fake_weather_schema_omjson.json",
+ ),
+ FormatCase(
+ obs_format="application/swe+json",
+ model=SWEDatastreamRecordSchema,
+ fixture_path=FIXTURES_DIR / "fake_weather_schema_swejson.json",
+ ),
+]
+
+
+def _find_weather_system(systems: list[dict]) -> dict | None:
+ """Pick a system whose name/description/uid mentions 'weather'."""
+ for sys_ in systems:
+ props = sys_.get("properties", {}) or {}
+ haystack = " ".join(
+ str(x) for x in (
+ sys_.get("id", ""),
+ props.get("name", ""),
+ props.get("description", ""),
+ props.get("uid", ""),
+ )
+ ).lower()
+ if "weather" in haystack:
+ return sys_
+ return None
+
+
+def _try_live_schema(obs_format: str) -> tuple[str, dict] | None:
+ """Probe the node at localhost:8282 for a FakeWeather datastream and return
+ (source_label, schema_json) for the requested obs_format. Returns None on
+ any failure."""
+ try:
+ sys_resp = requests.get(f"{NODE_URL}/systems?f=json", auth=NODE_AUTH, timeout=LIVE_TIMEOUT)
+ except (requests.ConnectionError, requests.Timeout):
+ return None
+ if not sys_resp.ok:
+ return None
+
+ weather = _find_weather_system(sys_resp.json().get("items", []))
+ if not weather:
+ return None
+
+ sys_id = weather.get("id")
+ if not sys_id:
+ return None
+
+ ds_resp = requests.get(
+ f"{NODE_URL}/systems/{sys_id}/datastreams?f=json",
+ auth=NODE_AUTH, timeout=LIVE_TIMEOUT,
+ )
+ if not ds_resp.ok:
+ return None
+ datastreams = ds_resp.json().get("items", [])
+ if not datastreams:
+ return None
+
+ ds_id = datastreams[0].get("id")
+ schema_resp = requests.get(
+ f"{NODE_URL}/datastreams/{ds_id}/schema",
+ params={"obsFormat": obs_format},
+ auth=NODE_AUTH, timeout=LIVE_TIMEOUT,
+ )
+ if not schema_resp.ok:
+ return None
+
+ return (
+ f"live node 8282 ({obs_format}, system={sys_id}, datastream={ds_id})",
+ schema_resp.json(),
+ )
+
+
+def _try_fixture_schema(path: Path) -> tuple[str, dict] | None:
+ """Load the saved fixture if it exists and is non-empty."""
+ if not path.exists():
+ return None
+ text = path.read_text().strip()
+ if not text or text == "{}":
+ return None
+ data = json.loads(text)
+ if not data:
+ return None
+ return f"fixture {path.name}", data
+
+
+@pytest.mark.parametrize(
+ "case",
+ CASES,
+ ids=lambda c: c.obs_format,
+)
+def test_fake_weather_schema_round_trips_through_datamodels(case: FormatCase):
+ source = _try_live_schema(case.obs_format) or _try_fixture_schema(case.fixture_path)
+ if source is None:
+ pytest.skip(
+ f"No live FakeWeather node at {NODE_URL} for {case.obs_format} and no "
+ f"usable fixture at {case.fixture_path}. To enable: start the "
+ f"FakeWeatherDriver on the node, or paste a schema JSON into the fixture."
+ )
+ label, server_schema = source
+
+ parsed = case.model.model_validate(server_schema)
+ round_tripped = parsed.model_dump(
+ mode='json', by_alias=True, exclude_none=True, exclude_unset=True,
+ )
+
+ assert server_schema == round_tripped, (
+ f"Schema round-trip mismatch (source: {label}, model: {case.model.__name__}).\n"
+ f"server:\n{json.dumps(server_schema, indent=2, sort_keys=True)}\n\n"
+ f"datamodel re-serialization:\n{json.dumps(round_tripped, indent=2, sort_keys=True)}"
+ )
\ No newline at end of file
diff --git a/tests/test_serialization.py b/tests/test_serialization.py
new file mode 100644
index 0000000..71c4530
--- /dev/null
+++ b/tests/test_serialization.py
@@ -0,0 +1,10 @@
+from oshconnect import Node
+
+
+def test_node_password_serialization():
+ node = Node(protocol='http', address='localhost', port=8080, username='user', password='pass')
+ serialized = node.serialize()
+ assert serialized['password'] == 'pass'
+ deserialized = Node.deserialize(serialized)
+ assert deserialized._api_helper.password == 'pass'
+
diff --git a/tests/test_streamable_resources.py b/tests/test_streamable_resources.py
index 2588e04..f5fe182 100644
--- a/tests/test_streamable_resources.py
+++ b/tests/test_streamable_resources.py
@@ -1,4 +1,4 @@
-from src.oshconnect import OSHConnect, Node
+from oshconnect import OSHConnect, Node
def test_streamble_observations():
diff --git a/tests/test_swe_name_validation.py b/tests/test_swe_name_validation.py
new file mode 100644
index 0000000..a0c3cf0
--- /dev/null
+++ b/tests/test_swe_name_validation.py
@@ -0,0 +1,394 @@
+# =============================================================================
+# Copyright (c) 2026 Botts Innovative Research Inc.
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+"""
+SWE Common 3 SoftNamedProperty validation: a `name` is required wherever a
+component is bound via SoftNamedProperty (DataRecord.fields, DataChoice.items,
+Vector.coordinates, DataArray.elementType, Matrix.elementType, and the root
+recordSchema/resultSchema of a datastream/controlstream — i.e.,
+DataStream.elementType). Names must match NameToken: ^[A-Za-z][A-Za-z0-9_\\-]*$.
+
+A standalone component (not bound) does NOT require a name; per the spec,
+`name` is not a property of any data component itself.
+"""
+from __future__ import annotations
+
+import json
+from pathlib import Path
+
+import pytest
+from pydantic import ValidationError
+
+from src.oshconnect.schema_datamodels import (
+ JSONDatastreamRecordSchema,
+ JSONCommandSchema,
+ SWEDatastreamRecordSchema,
+ SWEJSONCommandSchema,
+)
+from src.oshconnect.swe_components import (
+ BooleanSchema,
+ CategorySchema,
+ CountSchema,
+ DataArraySchema,
+ DataChoiceSchema,
+ DataRecordSchema,
+ MatrixSchema,
+ QuantitySchema,
+ TimeSchema,
+ VectorSchema,
+)
+
+FIXTURES_DIR = Path(__file__).parent / "fixtures"
+
+VALID_TIME_FIELD = {
+ "type": "Time",
+ "name": "time",
+ "label": "Sampling Time",
+ "definition": "http://www.opengis.net/def/property/OGC/0/SamplingTime",
+ "uom": {"href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian"},
+}
+VALID_TEMP_FIELD = {
+ "type": "Quantity",
+ "name": "temperature",
+ "label": "Air Temperature",
+ "definition": "http://mmisw.org/ont/cf/parameter/air_temperature",
+ "uom": {"code": "Cel"},
+}
+INVALID_NAMES = ["", "1bad", "with space", "has:colon", "has/slash", "has.dot"]
+
+
+# ---------------------------------------------------------------------------
+# Standalone components do not need a name (positive cases)
+# ---------------------------------------------------------------------------
+
+def test_quantity_standalone_no_name_ok():
+ q = QuantitySchema(
+ label="Air Temperature",
+ definition="http://example.org/temperature",
+ uom={"code": "Cel"},
+ )
+ assert q.name is None
+
+
+def test_vector_standalone_no_name_ok():
+ v = VectorSchema(
+ label="Position",
+ definition="http://example.org/position",
+ referenceFrame="http://example.org/frames/ENU",
+ coordinates=[
+ QuantitySchema(
+ name="x", label="X", definition="http://example.org/x", uom={"code": "m"}
+ ),
+ QuantitySchema(
+ name="y", label="Y", definition="http://example.org/y", uom={"code": "m"}
+ ),
+ ],
+ )
+ assert v.name is None
+
+
+def test_existing_swejson_fixture_round_trips():
+ raw = json.loads((FIXTURES_DIR / "fake_weather_schema_swejson.json").read_text())
+ parsed = SWEDatastreamRecordSchema.model_validate(raw)
+ re_dumped = parsed.model_dump(mode="json", by_alias=True, exclude_none=True)
+ assert re_dumped["recordSchema"]["name"] == "weather"
+ assert {f["name"] for f in re_dumped["recordSchema"]["fields"]} == {
+ "time", "temperature", "pressure", "windSpeed", "windDirection"
+ }
+
+
+def test_existing_omjson_fixture_round_trips():
+ raw = json.loads((FIXTURES_DIR / "fake_weather_schema_omjson.json").read_text())
+ parsed = JSONDatastreamRecordSchema.model_validate(raw)
+ re_dumped = parsed.model_dump(mode="json", by_alias=True, exclude_none=True)
+ assert re_dumped["resultSchema"]["name"] == "weather"
+
+
+# ---------------------------------------------------------------------------
+# DataRecord.fields[*] requires name (negative cases)
+# ---------------------------------------------------------------------------
+
+def test_record_with_named_fields_ok():
+ DataRecordSchema(
+ name="weather",
+ fields=[VALID_TIME_FIELD, VALID_TEMP_FIELD],
+ )
+
+
+def test_record_field_missing_name_raises():
+ with pytest.raises(ValidationError, match="DataRecord.fields"):
+ DataRecordSchema(
+ name="weather",
+ fields=[
+ {
+ "type": "Quantity",
+ "label": "Air Temperature",
+ "definition": "http://example.org/temp",
+ "uom": {"code": "Cel"},
+ }
+ ],
+ )
+
+
+@pytest.mark.parametrize("bad_name", INVALID_NAMES)
+def test_record_field_invalid_name_raises(bad_name):
+ with pytest.raises(ValidationError):
+ DataRecordSchema(
+ name="weather",
+ fields=[
+ {
+ "type": "Quantity",
+ "name": bad_name,
+ "label": "Air Temperature",
+ "definition": "http://example.org/temp",
+ "uom": {"code": "Cel"},
+ }
+ ],
+ )
+
+
+# ---------------------------------------------------------------------------
+# DataChoice.items[*] requires name
+# ---------------------------------------------------------------------------
+
+def test_choice_items_named_ok():
+ DataChoiceSchema(
+ name="alt",
+ choiceValue=CategorySchema(
+ name="picker",
+ label="Picker",
+ definition="http://example.org/picker",
+ value="a",
+ ),
+ items=[
+ {
+ "type": "Quantity",
+ "name": "alt_a",
+ "label": "Option A",
+ "definition": "http://example.org/a",
+ "uom": {"code": "m"},
+ }
+ ],
+ )
+
+
+def test_choice_item_missing_name_raises():
+ with pytest.raises(ValidationError, match="DataChoice.items"):
+ DataChoiceSchema(
+ name="alt",
+ choiceValue=CategorySchema(
+ name="picker",
+ label="Picker",
+ definition="http://example.org/picker",
+ value="a",
+ ),
+ items=[
+ {
+ "type": "Quantity",
+ "label": "Option A",
+ "definition": "http://example.org/a",
+ "uom": {"code": "m"},
+ }
+ ],
+ )
+
+
+# ---------------------------------------------------------------------------
+# Vector.coordinates[*] requires name
+# ---------------------------------------------------------------------------
+
+def test_vector_coordinate_missing_name_raises():
+ with pytest.raises(ValidationError, match="Vector.coordinates"):
+ VectorSchema(
+ label="Position",
+ definition="http://example.org/position",
+ referenceFrame="http://example.org/frames/ENU",
+ coordinates=[
+ {
+ "type": "Quantity",
+ "label": "X",
+ "definition": "http://example.org/x",
+ "uom": {"code": "m"},
+ }
+ ],
+ )
+
+
+# ---------------------------------------------------------------------------
+# DataArray.elementType requires name
+# ---------------------------------------------------------------------------
+
+def test_dataarray_element_type_missing_name_raises():
+ with pytest.raises(ValidationError, match="DataArray.elementType"):
+ DataArraySchema(
+ elementCount={"type": "Count", "name": "n", "label": "n",
+ "definition": "http://example.org/n"},
+ elementType={
+ "type": "Quantity",
+ "label": "X",
+ "definition": "http://example.org/x",
+ "uom": {"code": "m"},
+ },
+ encoding="JSONEncoding",
+ )
+
+
+# ---------------------------------------------------------------------------
+# Matrix.elementType[*] requires name
+# ---------------------------------------------------------------------------
+
+def test_matrix_element_type_missing_name_raises():
+ with pytest.raises(ValidationError, match="Matrix.elementType"):
+ MatrixSchema(
+ elementCount={"type": "Count", "name": "n", "label": "n",
+ "definition": "http://example.org/n"},
+ elementType=[
+ {
+ "type": "Quantity",
+ "label": "X",
+ "definition": "http://example.org/x",
+ "uom": {"code": "m"},
+ }
+ ],
+ encoding="JSONEncoding",
+ )
+
+
+# ---------------------------------------------------------------------------
+# Datastream/Controlstream wrappers: root requires name
+# ---------------------------------------------------------------------------
+
+def test_swe_datastream_root_requires_name():
+ with pytest.raises(ValidationError, match="SWEDatastreamRecordSchema.recordSchema"):
+ SWEDatastreamRecordSchema.model_validate({
+ "obsFormat": "application/swe+json",
+ "recordSchema": {
+ "type": "DataRecord",
+ "definition": "urn:osh:data:weather",
+ "fields": [VALID_TIME_FIELD],
+ },
+ })
+
+
+def test_swe_datastream_root_invalid_name_pattern_raises():
+ with pytest.raises(ValidationError, match="NameToken"):
+ SWEDatastreamRecordSchema.model_validate({
+ "obsFormat": "application/swe+json",
+ "recordSchema": {
+ "type": "DataRecord",
+ "name": "1bad-leading-digit",
+ "definition": "urn:osh:data:weather",
+ "fields": [VALID_TIME_FIELD],
+ },
+ })
+
+
+def test_json_datastream_optional_when_no_schemas_present():
+ # Per CS API Part 2 §16.1.4, JSON form may use resultLink instead of
+ # inline schemas, so neither resultSchema nor parametersSchema is required.
+ JSONDatastreamRecordSchema.model_validate({
+ "obsFormat": "application/json",
+ })
+
+
+def test_json_datastream_result_schema_requires_name_when_present():
+ with pytest.raises(ValidationError, match="JSONDatastreamRecordSchema.resultSchema"):
+ JSONDatastreamRecordSchema.model_validate({
+ "obsFormat": "application/json",
+ "resultSchema": {
+ "type": "DataRecord",
+ "definition": "urn:osh:data:weather",
+ "fields": [VALID_TIME_FIELD],
+ },
+ })
+
+
+def test_swe_command_schema_root_requires_name():
+ with pytest.raises(ValidationError, match="SWEJSONCommandSchema.recordSchema"):
+ SWEJSONCommandSchema.model_validate({
+ "commandFormat": "application/swe+json",
+ "encoding": {"type": "JSONEncoding"},
+ "recordSchema": {
+ "type": "DataRecord",
+ "definition": "urn:osh:control:cmd",
+ "fields": [VALID_TIME_FIELD],
+ },
+ })
+
+
+def test_json_command_schema_params_requires_name():
+ with pytest.raises(ValidationError, match="JSONCommandSchema.parametersSchema"):
+ JSONCommandSchema.model_validate({
+ "commandFormat": "application/json",
+ "parametersSchema": {
+ "type": "DataRecord",
+ "definition": "urn:osh:control:params",
+ "fields": [VALID_TIME_FIELD],
+ },
+ })
+
+
+# ---------------------------------------------------------------------------
+# NameToken pattern coverage
+# ---------------------------------------------------------------------------
+
+def test_nested_aggregate_in_record_fields_validated():
+ # Aggregate-in-aggregate: a DataRecord inside another DataRecord's fields[]. The
+ # inner record must itself be named (it's the bound child); its own fields are then
+ # validated by the inner record's validator independently.
+ DataRecordSchema(
+ name="outer",
+ fields=[
+ {
+ "type": "DataRecord",
+ "name": "inner",
+ "fields": [VALID_TIME_FIELD],
+ }
+ ],
+ )
+ # Inner record present but unnamed → outer's validator catches it.
+ with pytest.raises(ValidationError, match="DataRecord.fields"):
+ DataRecordSchema(
+ name="outer",
+ fields=[
+ {
+ "type": "DataRecord",
+ "fields": [VALID_TIME_FIELD],
+ }
+ ],
+ )
+
+
+@pytest.mark.parametrize("good_name", ["a", "ab", "wind_speed", "wind-speed", "x1", "X_1-y"])
+def test_valid_name_tokens_accepted(good_name):
+ DataRecordSchema(
+ name="root",
+ fields=[
+ {
+ "type": "Quantity",
+ "name": good_name,
+ "label": "X",
+ "definition": "http://example.org/x",
+ "uom": {"code": "m"},
+ }
+ ],
+ )
+
+
+@pytest.mark.parametrize("bad_name", ["1leading", "with space", "with:colon", "with.dot", "with/slash"])
+def test_invalid_name_tokens_rejected(bad_name):
+ with pytest.raises(ValidationError, match="NameToken"):
+ DataRecordSchema(
+ name="root",
+ fields=[
+ {
+ "type": "Quantity",
+ "name": bad_name,
+ "label": "X",
+ "definition": "http://example.org/x",
+ "uom": {"code": "m"},
+ }
+ ],
+ )
diff --git a/tests/test_swe_schema_validation.py b/tests/test_swe_schema_validation.py
new file mode 100644
index 0000000..738f01f
--- /dev/null
+++ b/tests/test_swe_schema_validation.py
@@ -0,0 +1,371 @@
+# =============================================================================
+# Copyright (c) 2026 Botts Innovative Research Inc.
+# Author: Ian Patterson
+# Contact Email: ian@botts-inc.com
+# =============================================================================
+"""
+SWE Common 3 schema-conformance tests beyond the SoftNamedProperty `name` rule:
+
+1. Spec `required` arrays per leaf component type (Quantity needs uom, Vector
+ needs referenceFrame, etc.) — guard against accidental Field(...) → Field(None)
+ regressions.
+2. Discriminator routing: AnyComponent.model_validate dispatches by `type` to
+ the correct concrete class, and rejects unknown types.
+3. Alias / field-name parity: both camelCase wire-format and snake_case Python
+ names parse to identical models.
+4. Round-trip fidelity: parse → dump(by_alias, exclude_none) → re-parse, deep equal.
+5. Vector.coordinates element-type restriction (Count/Quantity/Time only).
+6. DataRecord.fields minItems: 1 (per DataRecord.json).
+"""
+from __future__ import annotations
+
+import json
+from pathlib import Path
+
+import pytest
+from pydantic import TypeAdapter, ValidationError
+
+from src.oshconnect.schema_datamodels import (
+ JSONDatastreamRecordSchema,
+ SWEDatastreamRecordSchema,
+)
+from src.oshconnect.swe_components import (
+ AnyComponent,
+ BooleanSchema,
+ CategoryRangeSchema,
+ CategorySchema,
+ CountRangeSchema,
+ CountSchema,
+ DataArraySchema,
+ DataChoiceSchema,
+ DataRecordSchema,
+ GeometrySchema,
+ MatrixSchema,
+ QuantityRangeSchema,
+ QuantitySchema,
+ TextSchema,
+ TimeRangeSchema,
+ TimeSchema,
+ VectorSchema,
+)
+
+FIXTURES_DIR = Path(__file__).parent / "fixtures"
+ANY_COMPONENT = TypeAdapter(AnyComponent)
+
+
+def _quantity_field(name: str = "x") -> dict:
+ return {
+ "type": "Quantity",
+ "name": name,
+ "label": "X",
+ "definition": "http://example.org/x",
+ "uom": {"code": "m"},
+ }
+
+
+# ---------------------------------------------------------------------------
+# 1. Spec `required` arrays per leaf component type
+# ---------------------------------------------------------------------------
+# Per JSON schemas at:
+# https://github.com/opengeospatial/ogcapi-connected-systems/tree/master/swecommon/schemas/json
+# Required arrays:
+# Quantity: [type, definition, label, uom]
+# Boolean: [type, definition, label]
+# Text: [type, definition, label] (inherited Boolean shape)
+# Vector: [type, definition, referenceFrame, label, coordinates]
+# DataRecord:[type, fields]
+# Geometry: [type, srs, definition, label]
+
+
+def test_quantity_requires_uom():
+ with pytest.raises(ValidationError, match="uom"):
+ QuantitySchema(label="X", definition="http://example.org/x")
+
+
+def test_quantity_requires_label():
+ with pytest.raises(ValidationError, match="label"):
+ QuantitySchema(definition="http://example.org/x", uom={"code": "m"})
+
+
+def test_quantity_requires_definition():
+ with pytest.raises(ValidationError, match="definition"):
+ QuantitySchema(label="X", uom={"code": "m"})
+
+
+def test_boolean_requires_label_and_definition():
+ with pytest.raises(ValidationError, match="label"):
+ BooleanSchema(definition="http://example.org/b")
+ with pytest.raises(ValidationError, match="definition"):
+ BooleanSchema(label="X")
+
+
+def test_text_requires_label_and_definition():
+ with pytest.raises(ValidationError, match="label"):
+ TextSchema(definition="http://example.org/t")
+ with pytest.raises(ValidationError, match="definition"):
+ TextSchema(label="X")
+
+
+def test_vector_requires_label_definition_referenceframe_coordinates():
+ base = dict(
+ label="V",
+ definition="http://example.org/v",
+ referenceFrame="http://example.org/frames/ENU",
+ coordinates=[
+ QuantitySchema(name="x", label="X",
+ definition="http://example.org/x", uom={"code": "m"}),
+ ],
+ )
+ for missing in ("label", "definition", "referenceFrame", "coordinates"):
+ kwargs = {k: v for k, v in base.items() if k != missing}
+ with pytest.raises(ValidationError):
+ VectorSchema(**kwargs)
+
+
+def test_datarecord_requires_fields():
+ with pytest.raises(ValidationError, match="fields"):
+ DataRecordSchema(name="r")
+
+
+def test_geometry_requires_srs_definition_label():
+ base = dict(
+ label="G",
+ definition="http://example.org/g",
+ srs="http://www.opengis.net/def/crs/EPSG/0/4326",
+ )
+ for missing in ("label", "definition", "srs"):
+ kwargs = {k: v for k, v in base.items() if k != missing}
+ with pytest.raises(ValidationError):
+ GeometrySchema(**kwargs)
+
+
+# ---------------------------------------------------------------------------
+# 2. Discriminator routing
+# ---------------------------------------------------------------------------
+
+DISCRIMINATOR_CASES = [
+ # (type literal, minimal-valid dict, expected pydantic class)
+ ("Boolean",
+ {"type": "Boolean", "label": "B", "definition": "http://example.org/b"},
+ BooleanSchema),
+ ("Count",
+ {"type": "Count", "label": "C", "definition": "http://example.org/c"},
+ CountSchema),
+ ("Quantity",
+ {"type": "Quantity", "label": "Q", "definition": "http://example.org/q",
+ "uom": {"code": "m"}},
+ QuantitySchema),
+ ("Time",
+ {"type": "Time", "label": "T", "definition": "http://example.org/t",
+ "uom": {"href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian"}},
+ TimeSchema),
+ ("Category",
+ {"type": "Category", "label": "Cat", "definition": "http://example.org/cat"},
+ CategorySchema),
+ ("Text",
+ {"type": "Text", "label": "Tx", "definition": "http://example.org/tx"},
+ TextSchema),
+ ("CountRange",
+ {"type": "CountRange", "label": "CR", "definition": "http://example.org/cr",
+ "uom": {"code": "1"}},
+ CountRangeSchema),
+ ("QuantityRange",
+ {"type": "QuantityRange", "label": "QR", "definition": "http://example.org/qr",
+ "uom": {"code": "m"}},
+ QuantityRangeSchema),
+ ("TimeRange",
+ {"type": "TimeRange", "label": "TR", "definition": "http://example.org/tr",
+ "uom": {"href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian"}},
+ TimeRangeSchema),
+ ("CategoryRange",
+ {"type": "CategoryRange", "label": "CatR",
+ "definition": "http://example.org/catr"},
+ CategoryRangeSchema),
+ ("DataRecord",
+ {"type": "DataRecord", "fields": [_quantity_field("a")]},
+ DataRecordSchema),
+ ("Vector",
+ {"type": "Vector", "label": "V", "definition": "http://example.org/v",
+ "referenceFrame": "http://example.org/frames/ENU",
+ "coordinates": [_quantity_field("x")]},
+ VectorSchema),
+ ("DataArray",
+ {"type": "DataArray",
+ "elementCount": {"type": "Count", "name": "n", "label": "n",
+ "definition": "http://example.org/n"},
+ "elementType": _quantity_field("e"),
+ "encoding": "JSONEncoding"},
+ DataArraySchema),
+ ("Matrix",
+ {"type": "Matrix",
+ "elementCount": {"type": "Count", "name": "n", "label": "n",
+ "definition": "http://example.org/n"},
+ "elementType": [_quantity_field("e")],
+ "encoding": "JSONEncoding"},
+ MatrixSchema),
+ ("DataChoice",
+ {"type": "DataChoice",
+ "choiceValue": {"type": "Category", "name": "pick", "label": "Pick",
+ "definition": "http://example.org/pick"},
+ "items": [_quantity_field("a")]},
+ DataChoiceSchema),
+ ("Geometry",
+ {"type": "Geometry", "label": "G", "definition": "http://example.org/g",
+ "srs": "http://www.opengis.net/def/crs/EPSG/0/4326"},
+ GeometrySchema),
+]
+
+
+@pytest.mark.parametrize(
+ "type_literal,payload,expected_cls",
+ DISCRIMINATOR_CASES,
+ ids=[c[0] for c in DISCRIMINATOR_CASES],
+)
+def test_anycomponent_discriminator_routes(type_literal, payload, expected_cls):
+ parsed = ANY_COMPONENT.validate_python(payload)
+ assert isinstance(parsed, expected_cls)
+ assert parsed.type == type_literal
+
+
+def test_anycomponent_unknown_type_rejected():
+ with pytest.raises(ValidationError):
+ ANY_COMPONENT.validate_python({"type": "NotAType", "label": "X"})
+
+
+# ---------------------------------------------------------------------------
+# 3. Alias / field-name parity
+# ---------------------------------------------------------------------------
+# OSH wire format is camelCase; our pydantic fields are snake_case with alias=
+# entries. Confirm both inputs produce equivalent models, and dumping by_alias
+# yields the camelCase form.
+
+
+def test_quantity_axis_id_alias_parity():
+ via_alias = QuantitySchema.model_validate({
+ "name": "wd",
+ "label": "Wind Direction",
+ "definition": "http://example.org/wd",
+ "axisID": "z",
+ "uom": {"code": "deg"},
+ })
+ via_python = QuantitySchema(
+ name="wd", label="Wind Direction",
+ definition="http://example.org/wd", axis_id="z", uom={"code": "deg"},
+ )
+ assert via_alias.axis_id == "z" == via_python.axis_id
+ assert "axisID" in via_alias.model_dump(by_alias=True, exclude_none=True)
+
+
+def test_vector_referenceframe_alias_parity():
+ payload = {
+ "label": "V", "definition": "http://example.org/v",
+ "referenceFrame": "http://example.org/frames/ENU",
+ "coordinates": [_quantity_field("x")],
+ }
+ v = VectorSchema.model_validate(payload)
+ assert v.reference_frame == "http://example.org/frames/ENU"
+ dumped = v.model_dump(by_alias=True, exclude_none=True)
+ assert "referenceFrame" in dumped
+ assert "reference_frame" not in dumped
+
+
+def test_swe_datastream_obsformat_recordschema_alias_parity():
+ fixture = json.loads((FIXTURES_DIR / "fake_weather_schema_swejson.json").read_text())
+ parsed_camel = SWEDatastreamRecordSchema.model_validate(fixture)
+ parsed_snake = SWEDatastreamRecordSchema(
+ obs_format=fixture["obsFormat"],
+ record_schema=fixture["recordSchema"],
+ )
+ assert parsed_camel.obs_format == parsed_snake.obs_format
+ assert parsed_camel.record_schema.name == parsed_snake.record_schema.name
+
+
+# ---------------------------------------------------------------------------
+# 4. Round-trip fidelity
+# ---------------------------------------------------------------------------
+# Strongest single guard against serializer regressions: load a fixture,
+# dump it, re-parse the dump, and confirm the second dump matches the first.
+
+
+@pytest.mark.parametrize(
+ "fixture_name,model_cls",
+ [
+ ("fake_weather_schema_swejson.json", SWEDatastreamRecordSchema),
+ ("fake_weather_schema_omjson.json", JSONDatastreamRecordSchema),
+ ],
+)
+def test_fixture_round_trip_stable(fixture_name, model_cls):
+ raw = json.loads((FIXTURES_DIR / fixture_name).read_text())
+ first = model_cls.model_validate(raw)
+ first_dump = first.model_dump(mode="json", by_alias=True, exclude_none=True)
+ second = model_cls.model_validate(first_dump)
+ second_dump = second.model_dump(mode="json", by_alias=True, exclude_none=True)
+ assert first_dump == second_dump
+
+
+def test_anycomponent_round_trip_through_typeadapter():
+ # Stable-dump: parse → dump → reparse → dump, second dump matches first.
+ # (We don't compare against the input dict because pydantic adds explicit
+ # default values like updatable=False / optional=False to the dump.)
+ payload = _quantity_field("temperature")
+ first = ANY_COMPONENT.validate_python(payload)
+ first_dump = ANY_COMPONENT.dump_python(first, mode="json", by_alias=True,
+ exclude_none=True)
+ second = ANY_COMPONENT.validate_python(first_dump)
+ second_dump = ANY_COMPONENT.dump_python(second, mode="json", by_alias=True,
+ exclude_none=True)
+ assert first_dump == second_dump
+ # Sanity: input keys are all preserved in the dump.
+ for k, v in payload.items():
+ assert first_dump[k] == v
+
+
+# ---------------------------------------------------------------------------
+# 5. Vector.coordinates element-type restriction
+# ---------------------------------------------------------------------------
+# Vector.json: coordinates items oneOf [Count, Quantity, Time].
+
+
+def test_vector_rejects_boolean_in_coordinates():
+ with pytest.raises(ValidationError):
+ VectorSchema.model_validate({
+ "label": "V", "definition": "http://example.org/v",
+ "referenceFrame": "http://example.org/frames/ENU",
+ "coordinates": [{
+ "type": "Boolean", "name": "flag", "label": "F",
+ "definition": "http://example.org/f",
+ }],
+ })
+
+
+def test_vector_rejects_record_in_coordinates():
+ with pytest.raises(ValidationError):
+ VectorSchema.model_validate({
+ "label": "V", "definition": "http://example.org/v",
+ "referenceFrame": "http://example.org/frames/ENU",
+ "coordinates": [{
+ "type": "DataRecord", "name": "inner",
+ "fields": [_quantity_field("a")],
+ }],
+ })
+
+
+def test_vector_accepts_count_quantity_time_in_coordinates():
+ VectorSchema.model_validate({
+ "label": "V", "definition": "http://example.org/v",
+ "referenceFrame": "http://example.org/frames/ENU",
+ "coordinates": [
+ {"type": "Quantity", "name": "x", "label": "X",
+ "definition": "http://example.org/x", "uom": {"code": "m"}},
+ ],
+ })
+
+
+# ---------------------------------------------------------------------------
+# 6. DataRecord.fields minItems: 1
+# ---------------------------------------------------------------------------
+
+
+def test_datarecord_empty_fields_rejected():
+ with pytest.raises(ValidationError):
+ DataRecordSchema(name="r", fields=[])
\ No newline at end of file
diff --git a/uv.lock b/uv.lock
index 7415994..e97ee5a 100644
--- a/uv.lock
+++ b/uv.lock
@@ -111,6 +111,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" },
]
+[[package]]
+name = "backrefs"
+version = "7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/a7dd63622beef68cc0d3c3c36d472e143dd95443d5ebf14cd1a5b4dfbf11/backrefs-7.0.tar.gz", hash = "sha256:4989bb9e1e99eb23647c7160ed51fb21d0b41b5d200f2d3017da41e023097e82", size = 7012453, upload-time = "2026-04-28T16:28:04.215Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d4/39/39a31d7eae729ea14ed10c3ccef79371197177b9355a86cb3525709e8502/backrefs-7.0-py310-none-any.whl", hash = "sha256:b57cd227ea556b0aed3dc9b8da4628db4eabc0402c6d7fcfc69283a93955f7e9", size = 380824, upload-time = "2026-04-28T16:27:55.647Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/b5/9302644225ba7dfa934a2ff2b9c7bb85701313a90dddb3dfaf693fa5bae2/backrefs-7.0-py311-none-any.whl", hash = "sha256:a0fa7360c63509e9e077e174ef4e6d3c21c8db94189b9d957289ae6d794b9475", size = 392626, upload-time = "2026-04-28T16:27:57.42Z" },
+ { url = "https://files.pythonhosted.org/packages/36/da/87912ddec6e06feffbaa3d7aa18fc6352bee2e8f1fee185d7d1690f8f4e8/backrefs-7.0-py312-none-any.whl", hash = "sha256:ca42ce6a49ace3d75684dfa9937f3373902a63284ecb385ce36d15e5dcb41c12", size = 398537, upload-time = "2026-04-28T16:27:58.913Z" },
+ { url = "https://files.pythonhosted.org/packages/00/bb/90ba423612b6aa0adccc6b1874bcd4a9b44b660c0c16f346611e00f64ac3/backrefs-7.0-py313-none-any.whl", hash = "sha256:f2c52955d631b9e1ac4cd56209f0a3a946d592b98e7790e77699339ae01c102a", size = 400491, upload-time = "2026-04-28T16:28:00.928Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/5c/fb93d3092640a24dfb7bd7727a24016d7c01774ca013e60efd3f683c8002/backrefs-7.0-py314-none-any.whl", hash = "sha256:a6448b28180e3ca01134c9cf09dcebafad8531072e09903c5451748a05f24bc9", size = 412349, upload-time = "2026-04-28T16:28:02.412Z" },
+]
+
[[package]]
name = "certifi"
version = "2025.1.31"
@@ -155,6 +168,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" },
]
+[[package]]
+name = "click"
+version = "8.3.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bb/63/f9e1ea081ce35720d8b92acde70daaedace594dc93b693c869e0d5910718/click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2", size = 328061, upload-time = "2026-04-22T15:11:27.506Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ae/44/c1221527f6a71a01ec6fbad7fa78f1d50dfa02217385cf0fa3eec7087d59/click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613", size = 110502, upload-time = "2026-04-22T15:11:25.044Z" },
+]
+
[[package]]
name = "colorama"
version = "0.4.6"
@@ -247,6 +272,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" },
]
+[[package]]
+name = "ghp-import"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943, upload-time = "2022-05-02T15:47:16.11Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" },
+]
+
+[[package]]
+name = "griffelib"
+version = "2.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/9d/82/74f4a3310cdabfbb10da554c3a672847f1ed33c6f61dd472681ce7f1fe67/griffelib-2.0.2.tar.gz", hash = "sha256:3cf20b3bc470e83763ffbf236e0076b1211bac1bc67de13daf494640f2de707e", size = 166461, upload-time = "2026-03-27T11:34:51.091Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/11/8c/c9138d881c79aa0ea9ed83cbd58d5ca75624378b38cee225dcf5c42cc91f/griffelib-2.0.2-py3-none-any.whl", hash = "sha256:925c857658fb1ba40c0772c37acbc2ab650bd794d9c1b9726922e36ea4117ea1", size = 142357, upload-time = "2026-03-27T11:34:46.275Z" },
+]
+
[[package]]
name = "idna"
version = "3.10"
@@ -286,6 +332,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
]
+[[package]]
+name = "markdown"
+version = "3.10.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" },
+]
+
[[package]]
name = "markupsafe"
version = "3.0.2"
@@ -333,6 +388,134 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" },
]
+[[package]]
+name = "mergedeep"
+version = "1.3.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661, upload-time = "2021-02-05T18:55:30.623Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
+]
+
+[[package]]
+name = "mkdocs"
+version = "1.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "ghp-import" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mergedeep" },
+ { name = "mkdocs-get-deps" },
+ { name = "packaging" },
+ { name = "pathspec" },
+ { name = "pyyaml" },
+ { name = "pyyaml-env-tag" },
+ { name = "watchdog" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159, upload-time = "2024-08-30T12:24:06.899Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451, upload-time = "2024-08-30T12:24:05.054Z" },
+]
+
+[[package]]
+name = "mkdocs-autorefs"
+version = "1.4.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mkdocs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/52/c0/f641843de3f612a6b48253f39244165acff36657a91cc903633d456ae1ac/mkdocs_autorefs-1.4.4.tar.gz", hash = "sha256:d54a284f27a7346b9c38f1f852177940c222da508e66edc816a0fa55fc6da197", size = 56588, upload-time = "2026-02-10T15:23:55.105Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/28/de/a3e710469772c6a89595fc52816da05c1e164b4c866a89e3cb82fb1b67c5/mkdocs_autorefs-1.4.4-py3-none-any.whl", hash = "sha256:834ef5408d827071ad1bc69e0f39704fa34c7fc05bc8e1c72b227dfdc5c76089", size = 25530, upload-time = "2026-02-10T15:23:53.817Z" },
+]
+
+[[package]]
+name = "mkdocs-get-deps"
+version = "0.2.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mergedeep" },
+ { name = "platformdirs" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ce/25/b3cccb187655b9393572bde9b09261d267c3bf2f2cdabe347673be5976a6/mkdocs_get_deps-0.2.2.tar.gz", hash = "sha256:8ee8d5f316cdbbb2834bc1df6e69c08fe769a83e040060de26d3c19fad3599a1", size = 11047, upload-time = "2026-03-10T02:46:33.632Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/29/744136411e785c4b0b744d5413e56555265939ab3a104c6a4b719dad33fd/mkdocs_get_deps-0.2.2-py3-none-any.whl", hash = "sha256:e7878cbeac04860b8b5e0ca31d3abad3df9411a75a32cde82f8e44b6c16ff650", size = 9555, upload-time = "2026-03-10T02:46:32.256Z" },
+]
+
+[[package]]
+name = "mkdocs-material"
+version = "9.7.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "babel" },
+ { name = "backrefs" },
+ { name = "colorama" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "mkdocs" },
+ { name = "mkdocs-material-extensions" },
+ { name = "paginate" },
+ { name = "pygments" },
+ { name = "pymdown-extensions" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/45/29/6d2bcf41ae40802c4beda2432396fff97b8456fb496371d1bc7aad6512ec/mkdocs_material-9.7.6.tar.gz", hash = "sha256:00bdde50574f776d328b1862fe65daeaf581ec309bd150f7bff345a098c64a69", size = 4097959, upload-time = "2026-03-19T15:41:58.161Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/01/bc663630c510822c95c47a66af9fa7a443c295b47d5f041e5e6ae62ef659/mkdocs_material-9.7.6-py3-none-any.whl", hash = "sha256:71b84353921b8ea1ba84fe11c50912cc512da8fe0881038fcc9a0761c0e635ba", size = 9305470, upload-time = "2026-03-19T15:41:55.217Z" },
+]
+
+[[package]]
+name = "mkdocs-material-extensions"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/79/9b/9b4c96d6593b2a541e1cb8b34899a6d021d208bb357042823d4d2cabdbe7/mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443", size = 11847, upload-time = "2023-11-22T19:09:45.208Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5b/54/662a4743aa81d9582ee9339d4ffa3c8fd40a4965e033d77b9da9774d3960/mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31", size = 8728, upload-time = "2023-11-22T19:09:43.465Z" },
+]
+
+[[package]]
+name = "mkdocstrings"
+version = "1.0.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mkdocs" },
+ { name = "mkdocs-autorefs" },
+ { name = "pymdown-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/1d/5d/f888d4d3eb31359b327bc9b17a212d6ef03fe0b0682fbb3fc2cb849fb12b/mkdocstrings-1.0.4.tar.gz", hash = "sha256:3969a6515b77db65fd097b53c1b7aa4ae840bd71a2ee62a6a3e89503446d7172", size = 100088, upload-time = "2026-04-15T09:16:53.376Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6e/94/be70f8ee9c45f2f62b39a1f0e9303bc20e138a8f3b8e50ffd89498e177e1/mkdocstrings-1.0.4-py3-none-any.whl", hash = "sha256:63464b4b29053514f32a1dbbf604e52876d5e638111b0c295ab7ed3cac73ca9b", size = 35560, upload-time = "2026-04-15T09:16:51.436Z" },
+]
+
+[package.optional-dependencies]
+python = [
+ { name = "mkdocstrings-python" },
+]
+
+[[package]]
+name = "mkdocstrings-python"
+version = "2.0.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffelib" },
+ { name = "mkdocs-autorefs" },
+ { name = "mkdocstrings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/29/33/c225eaf898634bdda489a6766fc35d1683c640bffe0e0acd10646b13536d/mkdocstrings_python-2.0.3.tar.gz", hash = "sha256:c518632751cc869439b31c9d3177678ad2bfa5c21b79b863956ad68fc92c13b8", size = 199083, upload-time = "2026-02-20T10:38:36.368Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/32/28/79f0f8de97cce916d5ae88a7bee1ad724855e83e6019c0b4d5b3fabc80f3/mkdocstrings_python-2.0.3-py3-none-any.whl", hash = "sha256:0b83513478bdfd803ff05aa43e9b1fca9dd22bcd9471f09ca6257f009bc5ee12", size = 104779, upload-time = "2026-02-20T10:38:34.517Z" },
+]
+
[[package]]
name = "multidict"
version = "6.6.4"
@@ -436,7 +619,7 @@ wheels = [
[[package]]
name = "oshconnect"
-version = "0.3.0a5.post1"
+version = "0.4.0a1"
source = { virtual = "." }
dependencies = [
{ name = "aiohttp" },
@@ -447,31 +630,36 @@ dependencies = [
{ name = "websockets" },
]
-[package.dev-dependencies]
+[package.optional-dependencies]
dev = [
{ name = "flake8" },
+ { name = "mkdocs-material" },
+ { name = "mkdocstrings", extra = ["python"] },
{ name = "pytest" },
{ name = "sphinx" },
{ name = "sphinx-rtd-theme" },
]
+tinydb = [
+ { name = "tinydb" },
+]
[package.metadata]
requires-dist = [
{ name = "aiohttp", specifier = ">=3.12.15" },
+ { name = "flake8", marker = "extra == 'dev'", specifier = ">=7.2.0" },
+ { name = "mkdocs-material", marker = "extra == 'dev'", specifier = ">=9.5.0" },
+ { name = "mkdocstrings", extras = ["python"], marker = "extra == 'dev'", specifier = ">=0.26.0" },
{ name = "paho-mqtt", specifier = ">=2.1.0" },
- { name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
+ { name = "pydantic", specifier = ">=2.12.5,<3.0.0" },
+ { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.3.5" },
{ name = "requests" },
- { name = "shapely", specifier = ">=2.0.4,<3.0.0" },
+ { name = "shapely", specifier = ">=2.1.2,<3.0.0" },
+ { name = "sphinx", marker = "extra == 'dev'", specifier = ">=7.4.7" },
+ { name = "sphinx-rtd-theme", marker = "extra == 'dev'", specifier = ">=2.0.0" },
+ { name = "tinydb", marker = "extra == 'tinydb'", specifier = ">=4.8.0,<5.0.0" },
{ name = "websockets", specifier = ">=12.0,<16.0" },
]
-
-[package.metadata.requires-dev]
-dev = [
- { name = "flake8", specifier = ">=7.2.0" },
- { name = "pytest", specifier = ">=8.3.5" },
- { name = "sphinx", specifier = ">=7.4.7" },
- { name = "sphinx-rtd-theme", specifier = ">=2.0.0" },
-]
+provides-extras = ["dev", "tinydb"]
[[package]]
name = "packaging"
@@ -482,6 +670,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" },
]
+[[package]]
+name = "paginate"
+version = "0.5.7"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ec/46/68dde5b6bc00c1296ec6466ab27dddede6aec9af1b99090e1107091b3b84/paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", size = 19252, upload-time = "2024-08-25T14:17:24.139Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746, upload-time = "2024-08-25T14:17:22.55Z" },
+]
+
[[package]]
name = "paho-mqtt"
version = "2.1.0"
@@ -491,6 +688,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c4/cb/00451c3cf31790287768bb12c6bec834f5d292eaf3022afc88e14b8afc94/paho_mqtt-2.1.0-py3-none-any.whl", hash = "sha256:6db9ba9b34ed5bc6b6e3812718c7e06e2fd7444540df2455d2c51bd58808feee", size = 67219, upload-time = "2024-04-29T19:52:48.345Z" },
]
+[[package]]
+name = "pathspec"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5a/82/42f767fc1c1143d6fd36efb827202a2d997a375e160a71eb2888a925aac1/pathspec-1.1.1.tar.gz", hash = "sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a", size = 135180, upload-time = "2026-04-27T01:46:08.907Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/d9/7fb5aa316bc299258e68c73ba3bddbc499654a07f151cba08f6153988714/pathspec-1.1.1-py3-none-any.whl", hash = "sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189", size = 57328, upload-time = "2026-04-27T01:46:07.06Z" },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.9.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/9f/4a/0883b8e3802965322523f0b200ecf33d31f10991d0401162f4b23c698b42/platformdirs-4.9.6.tar.gz", hash = "sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a", size = 29400, upload-time = "2026-04-09T00:04:10.812Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/75/a6/a0a304dc33b49145b21f4808d763822111e67d1c3a32b524a1baf947b6e1/platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917", size = 21348, upload-time = "2026-04-09T00:04:09.463Z" },
+]
+
[[package]]
name = "pluggy"
version = "1.5.0"
@@ -568,7 +783,7 @@ wheels = [
[[package]]
name = "pydantic"
-version = "2.11.3"
+version = "2.12.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
@@ -576,51 +791,80 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload-time = "2025-04-08T13:27:06.399Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload-time = "2025-04-08T13:27:03.789Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
]
[[package]]
name = "pydantic-core"
-version = "2.33.1"
+version = "2.41.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload-time = "2025-04-02T09:49:41.8Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload-time = "2025-04-02T09:47:25.394Z" },
- { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload-time = "2025-04-02T09:47:27.417Z" },
- { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload-time = "2025-04-02T09:47:29.006Z" },
- { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload-time = "2025-04-02T09:47:33.464Z" },
- { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload-time = "2025-04-02T09:47:34.812Z" },
- { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload-time = "2025-04-02T09:47:37.315Z" },
- { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload-time = "2025-04-02T09:47:39.013Z" },
- { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload-time = "2025-04-02T09:47:40.427Z" },
- { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload-time = "2025-04-02T09:47:42.01Z" },
- { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload-time = "2025-04-02T09:47:43.425Z" },
- { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload-time = "2025-04-02T09:47:44.979Z" },
- { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload-time = "2025-04-02T09:47:46.843Z" },
- { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload-time = "2025-04-02T09:47:48.404Z" },
- { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload-time = "2025-04-02T09:47:49.839Z" },
- { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload-time = "2025-04-02T09:47:51.648Z" },
- { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload-time = "2025-04-02T09:47:53.149Z" },
- { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload-time = "2025-04-02T09:47:55.006Z" },
- { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload-time = "2025-04-02T09:47:56.532Z" },
- { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload-time = "2025-04-02T09:47:58.088Z" },
- { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload-time = "2025-04-02T09:47:59.591Z" },
- { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload-time = "2025-04-02T09:48:01.397Z" },
- { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload-time = "2025-04-02T09:48:03.056Z" },
- { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload-time = "2025-04-02T09:48:04.662Z" },
- { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload-time = "2025-04-02T09:48:06.226Z" },
- { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload-time = "2025-04-02T09:48:08.114Z" },
- { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload-time = "2025-04-02T09:48:09.708Z" },
- { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload-time = "2025-04-02T09:48:11.288Z" },
- { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload-time = "2025-04-02T09:48:12.861Z" },
- { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload-time = "2025-04-02T09:48:14.553Z" },
- { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload-time = "2025-04-02T09:48:16.222Z" },
- { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload-time = "2025-04-02T09:48:17.97Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
+ { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
+ { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
+ { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
+ { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
+ { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
+ { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
+ { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+ { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
]
[[package]]
@@ -641,6 +885,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" },
]
+[[package]]
+name = "pymdown-extensions"
+version = "10.21.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/08/f1c908c581fd11913da4711ea7ba32c0eee40b0190000996bb863b0c9349/pymdown_extensions-10.21.2.tar.gz", hash = "sha256:c3f55a5b8a1d0edf6699e35dcbea71d978d34ff3fa79f3d807b8a5b3fa90fbdc", size = 853922, upload-time = "2026-03-29T15:01:55.233Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/27/a2fc51a4a122dfd1015e921ae9d22fee3d20b0b8080d9a704578bf9deece/pymdown_extensions-10.21.2-py3-none-any.whl", hash = "sha256:5c0fd2a2bea14eb39af8ff284f1066d898ab2187d81b889b75d46d4348c01638", size = 268901, upload-time = "2026-03-29T15:01:53.244Z" },
+]
+
[[package]]
name = "pytest"
version = "8.3.5"
@@ -656,6 +913,76 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" },
]
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
+ { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
+ { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
+ { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
+ { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
+]
+
+[[package]]
+name = "pyyaml-env-tag"
+version = "1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/79c822141bfd05a853236b504869ebc6b70159afc570e1d5a20641782eaa/pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff", size = 5737, upload-time = "2025-05-13T15:24:01.64Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/11/432f32f8097b03e3cd5fe57e88efb685d964e2e5178a48ed61e841f7fdce/pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04", size = 4722, upload-time = "2025-05-13T15:23:59.629Z" },
+]
+
[[package]]
name = "requests"
version = "2.32.3"
@@ -673,37 +1000,62 @@ wheels = [
[[package]]
name = "shapely"
-version = "2.1.0"
+version = "2.1.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/fb/fe/3b0d2f828ffaceadcdcb51b75b9c62d98e62dd95ce575278de35f24a1c20/shapely-2.1.0.tar.gz", hash = "sha256:2cbe90e86fa8fc3ca8af6ffb00a77b246b918c7cf28677b7c21489b678f6b02e", size = 313617, upload-time = "2025-04-03T09:15:05.725Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/4e/d1/6a9371ec39d3ef08e13225594e6c55b045209629afd9e6d403204507c2a8/shapely-2.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53e7ee8bd8609cf12ee6dce01ea5affe676976cf7049315751d53d8db6d2b4b2", size = 1830732, upload-time = "2025-04-03T09:14:25.047Z" },
- { url = "https://files.pythonhosted.org/packages/32/87/799e3e48be7ce848c08509b94d2180f4ddb02e846e3c62d0af33da4d78d3/shapely-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3cab20b665d26dbec0b380e15749bea720885a481fa7b1eedc88195d4a98cfa4", size = 1638404, upload-time = "2025-04-03T09:14:26.456Z" },
- { url = "https://files.pythonhosted.org/packages/85/00/6665d77f9dd09478ab0993b8bc31668aec4fd3e5f1ddd1b28dd5830e47be/shapely-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a38b39a09340273c3c92b3b9a374272a12cc7e468aeeea22c1c46217a03e5c", size = 2945316, upload-time = "2025-04-03T09:14:28.266Z" },
- { url = "https://files.pythonhosted.org/packages/34/49/738e07d10bbc67cae0dcfe5a484c6e518a517f4f90550dda2adf3a78b9f2/shapely-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edaec656bdd9b71278b98e6f77c464b1c3b2daa9eace78012ff0f0b4b5b15b04", size = 3063099, upload-time = "2025-04-03T09:14:30.067Z" },
- { url = "https://files.pythonhosted.org/packages/88/b8/138098674559362ab29f152bff3b6630de423378fbb0324812742433a4ef/shapely-2.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c8a732ddd9b25e7a54aa748e7df8fd704e23e5d5d35b7d376d80bffbfc376d04", size = 3887873, upload-time = "2025-04-03T09:14:31.912Z" },
- { url = "https://files.pythonhosted.org/packages/67/a8/fdae7c2db009244991d86f4d2ca09d2f5ccc9d41c312c3b1ee1404dc55da/shapely-2.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9c93693ad8adfdc9138a5a2d42da02da94f728dd2e82d2f0f442f10e25027f5f", size = 4067004, upload-time = "2025-04-03T09:14:33.976Z" },
- { url = "https://files.pythonhosted.org/packages/ed/78/17e17d91b489019379df3ee1afc4bd39787b232aaa1d540f7d376f0280b7/shapely-2.1.0-cp312-cp312-win32.whl", hash = "sha256:d8ac6604eefe807e71a908524de23a37920133a1729fe3a4dfe0ed82c044cbf4", size = 1527366, upload-time = "2025-04-03T09:14:35.348Z" },
- { url = "https://files.pythonhosted.org/packages/b8/bd/9249bd6dda948441e25e4fb14cbbb5205146b0fff12c66b19331f1ff2141/shapely-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:f4f47e631aa4f9ec5576eac546eb3f38802e2f82aeb0552f9612cb9a14ece1db", size = 1708265, upload-time = "2025-04-03T09:14:36.878Z" },
- { url = "https://files.pythonhosted.org/packages/8d/77/4e368704b2193e74498473db4461d697cc6083c96f8039367e59009d78bd/shapely-2.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b64423295b563f43a043eb786e7a03200ebe68698e36d2b4b1c39f31dfb50dfb", size = 1830029, upload-time = "2025-04-03T09:14:38.795Z" },
- { url = "https://files.pythonhosted.org/packages/71/3c/d888597bda680e4de987316b05ca9db07416fa29523beff64f846503302f/shapely-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1b5578f45adc25b235b22d1ccb9a0348c8dc36f31983e57ea129a88f96f7b870", size = 1637999, upload-time = "2025-04-03T09:14:40.209Z" },
- { url = "https://files.pythonhosted.org/packages/03/8d/ee0e23b7ef88fba353c63a81f1f329c77f5703835db7b165e7c0b8b7f839/shapely-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a7e83d383b27f02b684e50ab7f34e511c92e33b6ca164a6a9065705dd64bcb", size = 2929348, upload-time = "2025-04-03T09:14:42.11Z" },
- { url = "https://files.pythonhosted.org/packages/d1/a7/5c9cb413e4e2ce52c16be717e94abd40ce91b1f8974624d5d56154c5d40b/shapely-2.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:942031eb4d8f7b3b22f43ba42c09c7aa3d843aa10d5cc1619fe816e923b66e55", size = 3048973, upload-time = "2025-04-03T09:14:43.841Z" },
- { url = "https://files.pythonhosted.org/packages/84/23/45b90c0bd2157b238490ca56ef2eedf959d3514c7d05475f497a2c88b6d9/shapely-2.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d2843c456a2e5627ee6271800f07277c0d2652fb287bf66464571a057dbc00b3", size = 3873148, upload-time = "2025-04-03T09:14:45.924Z" },
- { url = "https://files.pythonhosted.org/packages/c0/bc/ed7d5d37f5395166042576f0c55a12d7e56102799464ba7ea3a72a38c769/shapely-2.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8c4b17469b7f39a5e6a7cfea79f38ae08a275427f41fe8b48c372e1449147908", size = 4052655, upload-time = "2025-04-03T09:14:47.475Z" },
- { url = "https://files.pythonhosted.org/packages/c0/8f/a1dafbb10d20d1c569f2db3fb1235488f624dafe8469e8ce65356800ba31/shapely-2.1.0-cp313-cp313-win32.whl", hash = "sha256:30e967abd08fce49513d4187c01b19f139084019f33bec0673e8dbeb557c45e4", size = 1526600, upload-time = "2025-04-03T09:14:48.952Z" },
- { url = "https://files.pythonhosted.org/packages/e3/f0/9f8cdf2258d7aed742459cea51c70d184de92f5d2d6f5f7f1ded90a18c31/shapely-2.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:1dc8d4364483a14aba4c844b7bd16a6fa3728887e2c33dfa1afa34a3cf4d08a5", size = 1707115, upload-time = "2025-04-03T09:14:50.445Z" },
- { url = "https://files.pythonhosted.org/packages/75/ed/32952df461753a65b3e5d24c8efb361d3a80aafaef0b70d419063f6f2c11/shapely-2.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:673e073fea099d1c82f666fb7ab0a00a77eff2999130a69357ce11941260d855", size = 1824847, upload-time = "2025-04-03T09:14:52.358Z" },
- { url = "https://files.pythonhosted.org/packages/ff/b9/2284de512af30b02f93ddcdd2e5c79834a3cf47fa3ca11b0f74396feb046/shapely-2.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6d1513f915a56de67659fe2047c1ad5ff0f8cbff3519d1e74fced69c9cb0e7da", size = 1631035, upload-time = "2025-04-03T09:14:53.739Z" },
- { url = "https://files.pythonhosted.org/packages/35/16/a59f252a7e736b73008f10d0950ffeeb0d5953be7c0bdffd39a02a6ba310/shapely-2.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d6a7043178890b9e028d80496ff4c79dc7629bff4d78a2f25323b661756bab8", size = 2968639, upload-time = "2025-04-03T09:14:55.674Z" },
- { url = "https://files.pythonhosted.org/packages/a5/0a/6a20eca7b0092cfa243117e8e145a58631a4833a0a519ec9b445172e83a0/shapely-2.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb638378dc3d76f7e85b67d7e2bb1366811912430ac9247ac00c127c2b444cdc", size = 3055713, upload-time = "2025-04-03T09:14:57.564Z" },
- { url = "https://files.pythonhosted.org/packages/fb/44/eeb0c7583b1453d1cf7a319a1d738e08f98a5dc993fa1ef3c372983e4cb5/shapely-2.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:737124e87d91d616acf9a911f74ac55e05db02a43a6a7245b3d663817b876055", size = 3890478, upload-time = "2025-04-03T09:14:59.139Z" },
- { url = "https://files.pythonhosted.org/packages/5d/6e/37ff3c6af1d408cacb0a7d7bfea7b8ab163a5486e35acb08997eae9d8756/shapely-2.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e6c229e7bb87aae5df82fa00b6718987a43ec168cc5affe095cca59d233f314", size = 4036148, upload-time = "2025-04-03T09:15:01.328Z" },
- { url = "https://files.pythonhosted.org/packages/c8/6a/8c0b7de3aeb5014a23f06c5e9d3c7852ebcf0d6b00fe660b93261e310e24/shapely-2.1.0-cp313-cp313t-win32.whl", hash = "sha256:a9580bda119b1f42f955aa8e52382d5c73f7957e0203bc0c0c60084846f3db94", size = 1535993, upload-time = "2025-04-03T09:15:02.973Z" },
- { url = "https://files.pythonhosted.org/packages/a8/91/ae80359a58409d52e4d62c7eacc7eb3ddee4b9135f1db884b6a43cf2e174/shapely-2.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e8ff4e5cfd799ba5b6f37b5d5527dbd85b4a47c65b6d459a03d0962d2a9d4d10", size = 1717777, upload-time = "2025-04-03T09:15:04.461Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" },
+ { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" },
+ { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" },
+ { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/90/98ef257c23c46425dc4d1d31005ad7c8d649fe423a38b917db02c30f1f5a/shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8", size = 1832644, upload-time = "2025-09-24T13:50:44.886Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/ab/0bee5a830d209adcd3a01f2d4b70e587cdd9fd7380d5198c064091005af8/shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a", size = 1642887, upload-time = "2025-09-24T13:50:46.735Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/5e/7d7f54ba960c13302584c73704d8c4d15404a51024631adb60b126a4ae88/shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e", size = 2970931, upload-time = "2025-09-24T13:50:48.374Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/a2/83fc37e2a58090e3d2ff79175a95493c664bcd0b653dd75cb9134645a4e5/shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6", size = 3082855, upload-time = "2025-09-24T13:50:50.037Z" },
+ { url = "https://files.pythonhosted.org/packages/44/2b/578faf235a5b09f16b5f02833c53822294d7f21b242f8e2d0cf03fb64321/shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af", size = 3979960, upload-time = "2025-09-24T13:50:51.74Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/04/167f096386120f692cc4ca02f75a17b961858997a95e67a3cb6a7bbd6b53/shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd", size = 4142851, upload-time = "2025-09-24T13:50:53.49Z" },
+ { url = "https://files.pythonhosted.org/packages/48/74/fb402c5a6235d1c65a97348b48cdedb75fb19eca2b1d66d04969fc1c6091/shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350", size = 1541890, upload-time = "2025-09-24T13:50:55.337Z" },
+ { url = "https://files.pythonhosted.org/packages/41/47/3647fe7ad990af60ad98b889657a976042c9988c2807cf322a9d6685f462/shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715", size = 1722151, upload-time = "2025-09-24T13:50:57.153Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/49/63953754faa51ffe7d8189bfbe9ca34def29f8c0e34c67cbe2a2795f269d/shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40", size = 1834130, upload-time = "2025-09-24T13:50:58.49Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/ee/dce001c1984052970ff60eb4727164892fb2d08052c575042a47f5a9e88f/shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b", size = 1642802, upload-time = "2025-09-24T13:50:59.871Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e7/fc4e9a19929522877fa602f705706b96e78376afb7fad09cad5b9af1553c/shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801", size = 3018460, upload-time = "2025-09-24T13:51:02.08Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/18/7519a25db21847b525696883ddc8e6a0ecaa36159ea88e0fef11466384d0/shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0", size = 3095223, upload-time = "2025-09-24T13:51:04.472Z" },
+ { url = "https://files.pythonhosted.org/packages/48/de/b59a620b1f3a129c3fecc2737104a0a7e04e79335bd3b0a1f1609744cf17/shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c", size = 4030760, upload-time = "2025-09-24T13:51:06.455Z" },
+ { url = "https://files.pythonhosted.org/packages/96/b3/c6655ee7232b417562bae192ae0d3ceaadb1cc0ffc2088a2ddf415456cc2/shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99", size = 4170078, upload-time = "2025-09-24T13:51:08.584Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/8e/605c76808d73503c9333af8f6cbe7e1354d2d238bda5f88eea36bfe0f42a/shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf", size = 1559178, upload-time = "2025-09-24T13:51:10.73Z" },
+ { url = "https://files.pythonhosted.org/packages/36/f7/d317eb232352a1f1444d11002d477e54514a4a6045536d49d0c59783c0da/shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c", size = 1739756, upload-time = "2025-09-24T13:51:12.105Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/c4/3ce4c2d9b6aabd27d26ec988f08cb877ba9e6e96086eff81bfea93e688c7/shapely-2.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223", size = 1831290, upload-time = "2025-09-24T13:51:13.56Z" },
+ { url = "https://files.pythonhosted.org/packages/17/b9/f6ab8918fc15429f79cb04afa9f9913546212d7fb5e5196132a2af46676b/shapely-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c", size = 1641463, upload-time = "2025-09-24T13:51:14.972Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/57/91d59ae525ca641e7ac5551c04c9503aee6f29b92b392f31790fcb1a4358/shapely-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df", size = 2970145, upload-time = "2025-09-24T13:51:16.961Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cb/4948be52ee1da6927831ab59e10d4c29baa2a714f599f1f0d1bc747f5777/shapely-2.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf", size = 3073806, upload-time = "2025-09-24T13:51:18.712Z" },
+ { url = "https://files.pythonhosted.org/packages/03/83/f768a54af775eb41ef2e7bec8a0a0dbe7d2431c3e78c0a8bdba7ab17e446/shapely-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4", size = 3980803, upload-time = "2025-09-24T13:51:20.37Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/cb/559c7c195807c91c79d38a1f6901384a2878a76fbdf3f1048893a9b7534d/shapely-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc", size = 4133301, upload-time = "2025-09-24T13:51:21.887Z" },
+ { url = "https://files.pythonhosted.org/packages/80/cd/60d5ae203241c53ef3abd2ef27c6800e21afd6c94e39db5315ea0cbafb4a/shapely-2.1.2-cp314-cp314-win32.whl", hash = "sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566", size = 1583247, upload-time = "2025-09-24T13:51:23.401Z" },
+ { url = "https://files.pythonhosted.org/packages/74/d4/135684f342e909330e50d31d441ace06bf83c7dc0777e11043f99167b123/shapely-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c", size = 1773019, upload-time = "2025-09-24T13:51:24.873Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/05/a44f3f9f695fa3ada22786dc9da33c933da1cbc4bfe876fe3a100bafe263/shapely-2.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a", size = 1834137, upload-time = "2025-09-24T13:51:26.665Z" },
+ { url = "https://files.pythonhosted.org/packages/52/7e/4d57db45bf314573427b0a70dfca15d912d108e6023f623947fa69f39b72/shapely-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076", size = 1642884, upload-time = "2025-09-24T13:51:28.029Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/27/4e29c0a55d6d14ad7422bf86995d7ff3f54af0eba59617eb95caf84b9680/shapely-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1", size = 3018320, upload-time = "2025-09-24T13:51:29.903Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/bb/992e6a3c463f4d29d4cd6ab8963b75b1b1040199edbd72beada4af46bde5/shapely-2.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0", size = 3094931, upload-time = "2025-09-24T13:51:32.699Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/16/82e65e21070e473f0ed6451224ed9fa0be85033d17e0c6e7213a12f59d12/shapely-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26", size = 4030406, upload-time = "2025-09-24T13:51:34.189Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/75/c24ed871c576d7e2b64b04b1fe3d075157f6eb54e59670d3f5ffb36e25c7/shapely-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0", size = 4169511, upload-time = "2025-09-24T13:51:36.297Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/f7/b3d1d6d18ebf55236eec1c681ce5e665742aab3c0b7b232720a7d43df7b6/shapely-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735", size = 1602607, upload-time = "2025-09-24T13:51:37.757Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/f6/f09272a71976dfc138129b8faf435d064a811ae2f708cb147dccdf7aacdb/shapely-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9", size = 1796682, upload-time = "2025-09-24T13:51:39.233Z" },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
]
[[package]]
@@ -822,25 +1174,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" },
]
+[[package]]
+name = "tinydb"
+version = "4.8.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a0/79/4af51e2bb214b6ea58f857c51183d92beba85b23f7ba61c983ab3de56c33/tinydb-4.8.2.tar.gz", hash = "sha256:f7dfc39b8d7fda7a1ca62a8dbb449ffd340a117c1206b68c50b1a481fb95181d", size = 32566, upload-time = "2024-10-12T15:24:01.13Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/17/853354204e1ca022d6b7d011ca7f3206c4f8faa3cc743e92609b49c1d83f/tinydb-4.8.2-py3-none-any.whl", hash = "sha256:f97030ee5cbc91eeadd1d7af07ab0e48ceb04aa63d4a983adbaca4cba16e86c3", size = 24888, upload-time = "2024-10-12T15:23:59.833Z" },
+]
+
[[package]]
name = "typing-extensions"
-version = "4.13.2"
+version = "4.15.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" },
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
[[package]]
name = "typing-inspection"
-version = "0.4.0"
+version = "0.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
]
[[package]]
@@ -852,6 +1213,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" },
]
+[[package]]
+name = "watchdog"
+version = "6.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" },
+ { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" },
+ { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" },
+ { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" },
+ { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
+ { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" },
+ { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" },
+ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" },
+]
+
[[package]]
name = "websockets"
version = "12.0"