forked from iarv/meshcore-hub
Implement Phase 1: Foundation for MeshCore Hub
This commit establishes the complete foundation for the MeshCore Hub project: - Project setup with pyproject.toml (Python 3.11+, all dependencies) - Development tools: black, flake8, mypy, pytest configuration - Pre-commit hooks for code quality - Package structure with all components (interface, collector, api, web) Common package includes: - Pydantic settings for all component configurations - SQLAlchemy models for nodes, messages, advertisements, traces, telemetry - Pydantic schemas for events, API requests/responses, commands - MQTT client utilities with topic builder - Logging configuration Database infrastructure: - Alembic setup with initial migration for all tables - Database manager with session handling CLI entry point: - Click-based CLI with subcommands for all components - Database migration commands (upgrade, downgrade, revision) Tests: - Basic test suite for config and models - pytest fixtures for in-memory database testing
This commit is contained in:
77
.env.example
Normal file
77
.env.example
Normal file
@@ -0,0 +1,77 @@
|
||||
# MeshCore Hub - Environment Configuration Example
|
||||
# Copy this file to .env and customize values
|
||||
|
||||
# ===================
|
||||
# Common Settings
|
||||
# ===================
|
||||
|
||||
# Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
LOG_LEVEL=INFO
|
||||
|
||||
# MQTT Broker Settings
|
||||
MQTT_HOST=localhost
|
||||
MQTT_PORT=1883
|
||||
MQTT_USERNAME=
|
||||
MQTT_PASSWORD=
|
||||
MQTT_PREFIX=meshcore
|
||||
|
||||
# ===================
|
||||
# Interface Settings
|
||||
# ===================
|
||||
|
||||
# Mode of operation (RECEIVER or SENDER)
|
||||
INTERFACE_MODE=RECEIVER
|
||||
|
||||
# Serial port for MeshCore device
|
||||
SERIAL_PORT=/dev/ttyUSB0
|
||||
SERIAL_BAUD=115200
|
||||
|
||||
# Use mock device for testing (true/false)
|
||||
MOCK_DEVICE=false
|
||||
|
||||
# ===================
|
||||
# Collector Settings
|
||||
# ===================
|
||||
|
||||
# Database connection URL
|
||||
# SQLite: sqlite:///./meshcore.db
|
||||
# PostgreSQL: postgresql://user:password@localhost/meshcore
|
||||
DATABASE_URL=sqlite:///./meshcore.db
|
||||
|
||||
# ===================
|
||||
# API Settings
|
||||
# ===================
|
||||
|
||||
# API Server binding
|
||||
API_HOST=0.0.0.0
|
||||
API_PORT=8000
|
||||
|
||||
# API Keys for authentication
|
||||
# Generate secure keys for production!
|
||||
API_READ_KEY=
|
||||
API_ADMIN_KEY=
|
||||
|
||||
# ===================
|
||||
# Web Dashboard Settings
|
||||
# ===================
|
||||
|
||||
# Web Server binding
|
||||
WEB_HOST=0.0.0.0
|
||||
WEB_PORT=8080
|
||||
|
||||
# API connection for web dashboard
|
||||
API_BASE_URL=http://localhost:8000
|
||||
API_KEY=
|
||||
|
||||
# Network Information (displayed on web dashboard)
|
||||
NETWORK_DOMAIN=
|
||||
NETWORK_NAME=MeshCore Network
|
||||
NETWORK_CITY=
|
||||
NETWORK_COUNTRY=
|
||||
NETWORK_LOCATION=
|
||||
NETWORK_RADIO_CONFIG=
|
||||
NETWORK_CONTACT_EMAIL=
|
||||
NETWORK_CONTACT_DISCORD=
|
||||
|
||||
# Path to members JSON file
|
||||
MEMBERS_FILE=members.json
|
||||
16
.flake8
Normal file
16
.flake8
Normal file
@@ -0,0 +1,16 @@
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203, E501, W503
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
.venv,
|
||||
venv,
|
||||
build,
|
||||
dist,
|
||||
*.egg-info,
|
||||
alembic/versions,
|
||||
.mypy_cache,
|
||||
.pytest_cache
|
||||
per-file-ignores =
|
||||
__init__.py: F401
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -205,3 +205,8 @@ cython_debug/
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
|
||||
# MeshCore Hub specific
|
||||
*.db
|
||||
meshcore.db
|
||||
members.json
|
||||
|
||||
38
.pre-commit-config.yaml
Normal file
38
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,38 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: check-merge-conflict
|
||||
- id: check-toml
|
||||
- id: debug-statements
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.11
|
||||
args: ["--line-length=88"]
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-bugbear
|
||||
- flake8-comprehensions
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
additional_dependencies:
|
||||
- pydantic>=2.0.0
|
||||
- pydantic-settings>=2.0.0
|
||||
- sqlalchemy>=2.0.0
|
||||
- fastapi>=0.100.0
|
||||
- types-paho-mqtt>=1.6.0
|
||||
args: ["--ignore-missing-imports"]
|
||||
87
alembic.ini
Normal file
87
alembic.ini
Normal file
@@ -0,0 +1,87 @@
|
||||
# A generic, single database configuration for Alembic.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = src
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = UTC
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during the 'revision' command
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without a source .py file
|
||||
# to be detected as revisions in the versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version location specification; This defaults to alembic/versions.
|
||||
version_locations = %(here)s/alembic/versions
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - can be overridden by environment variable
|
||||
sqlalchemy.url = sqlite:///./meshcore.db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
# format using "black" - only if black is installed
|
||||
hooks = black
|
||||
black.type = console_scripts
|
||||
black.entrypoint = black
|
||||
black.options = -q
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
85
alembic/env.py
Normal file
85
alembic/env.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Alembic environment configuration."""
|
||||
|
||||
import os
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from meshcore_hub.common.models import Base
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Model's MetaData object for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def get_database_url() -> str:
|
||||
"""Get database URL from environment or config."""
|
||||
# First try environment variable
|
||||
url = os.environ.get("DATABASE_URL")
|
||||
if url:
|
||||
return url
|
||||
# Fall back to alembic.ini
|
||||
return config.get_main_option("sqlalchemy.url", "sqlite:///./meshcore.db")
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
"""
|
||||
url = get_database_url()
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True, # SQLite batch mode for ALTER TABLE
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
"""
|
||||
configuration = config.get_section(config.config_ini_section, {})
|
||||
configuration["sqlalchemy.url"] = get_database_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True, # SQLite batch mode for ALTER TABLE
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
244
alembic/versions/20241202_0001_001_initial_schema.py
Normal file
244
alembic/versions/20241202_0001_001_initial_schema.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""Initial database schema
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2024-12-02
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "001"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create nodes table
|
||||
op.create_table(
|
||||
"nodes",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("public_key", sa.String(64), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=True),
|
||||
sa.Column("adv_type", sa.String(20), nullable=True),
|
||||
sa.Column("flags", sa.Integer(), nullable=True),
|
||||
sa.Column("first_seen", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("last_seen", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("public_key"),
|
||||
)
|
||||
op.create_index("ix_nodes_public_key", "nodes", ["public_key"])
|
||||
op.create_index("ix_nodes_last_seen", "nodes", ["last_seen"])
|
||||
op.create_index("ix_nodes_adv_type", "nodes", ["adv_type"])
|
||||
|
||||
# Create node_tags table
|
||||
op.create_table(
|
||||
"node_tags",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("node_id", sa.String(), nullable=False),
|
||||
sa.Column("key", sa.String(100), nullable=False),
|
||||
sa.Column("value", sa.Text(), nullable=True),
|
||||
sa.Column("value_type", sa.String(20), nullable=False, server_default="string"),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(["node_id"], ["nodes.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("node_id", "key", name="uq_node_tags_node_key"),
|
||||
)
|
||||
op.create_index("ix_node_tags_node_id", "node_tags", ["node_id"])
|
||||
op.create_index("ix_node_tags_key", "node_tags", ["key"])
|
||||
|
||||
# Create messages table
|
||||
op.create_table(
|
||||
"messages",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("receiver_node_id", sa.String(), nullable=True),
|
||||
sa.Column("message_type", sa.String(20), nullable=False),
|
||||
sa.Column("pubkey_prefix", sa.String(12), nullable=True),
|
||||
sa.Column("channel_idx", sa.Integer(), nullable=True),
|
||||
sa.Column("text", sa.Text(), nullable=False),
|
||||
sa.Column("path_len", sa.Integer(), nullable=True),
|
||||
sa.Column("txt_type", sa.Integer(), nullable=True),
|
||||
sa.Column("signature", sa.String(8), nullable=True),
|
||||
sa.Column("snr", sa.Float(), nullable=True),
|
||||
sa.Column("sender_timestamp", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("received_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(["receiver_node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_messages_receiver_node_id", "messages", ["receiver_node_id"])
|
||||
op.create_index("ix_messages_message_type", "messages", ["message_type"])
|
||||
op.create_index("ix_messages_pubkey_prefix", "messages", ["pubkey_prefix"])
|
||||
op.create_index("ix_messages_channel_idx", "messages", ["channel_idx"])
|
||||
op.create_index("ix_messages_received_at", "messages", ["received_at"])
|
||||
|
||||
# Create advertisements table
|
||||
op.create_table(
|
||||
"advertisements",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("receiver_node_id", sa.String(), nullable=True),
|
||||
sa.Column("node_id", sa.String(), nullable=True),
|
||||
sa.Column("public_key", sa.String(64), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=True),
|
||||
sa.Column("adv_type", sa.String(20), nullable=True),
|
||||
sa.Column("flags", sa.Integer(), nullable=True),
|
||||
sa.Column("received_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(["receiver_node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.ForeignKeyConstraint(["node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_advertisements_receiver_node_id", "advertisements", ["receiver_node_id"])
|
||||
op.create_index("ix_advertisements_node_id", "advertisements", ["node_id"])
|
||||
op.create_index("ix_advertisements_public_key", "advertisements", ["public_key"])
|
||||
op.create_index("ix_advertisements_received_at", "advertisements", ["received_at"])
|
||||
|
||||
# Create trace_paths table
|
||||
op.create_table(
|
||||
"trace_paths",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("receiver_node_id", sa.String(), nullable=True),
|
||||
sa.Column("initiator_tag", sa.BigInteger(), nullable=False),
|
||||
sa.Column("path_len", sa.Integer(), nullable=True),
|
||||
sa.Column("flags", sa.Integer(), nullable=True),
|
||||
sa.Column("auth", sa.Integer(), nullable=True),
|
||||
sa.Column("path_hashes", sa.JSON(), nullable=True),
|
||||
sa.Column("snr_values", sa.JSON(), nullable=True),
|
||||
sa.Column("hop_count", sa.Integer(), nullable=True),
|
||||
sa.Column("received_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(["receiver_node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_trace_paths_receiver_node_id", "trace_paths", ["receiver_node_id"])
|
||||
op.create_index("ix_trace_paths_initiator_tag", "trace_paths", ["initiator_tag"])
|
||||
op.create_index("ix_trace_paths_received_at", "trace_paths", ["received_at"])
|
||||
|
||||
# Create telemetry table
|
||||
op.create_table(
|
||||
"telemetry",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("receiver_node_id", sa.String(), nullable=True),
|
||||
sa.Column("node_id", sa.String(), nullable=True),
|
||||
sa.Column("node_public_key", sa.String(64), nullable=False),
|
||||
sa.Column("lpp_data", sa.LargeBinary(), nullable=True),
|
||||
sa.Column("parsed_data", sa.JSON(), nullable=True),
|
||||
sa.Column("received_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(["receiver_node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.ForeignKeyConstraint(["node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_telemetry_receiver_node_id", "telemetry", ["receiver_node_id"])
|
||||
op.create_index("ix_telemetry_node_id", "telemetry", ["node_id"])
|
||||
op.create_index("ix_telemetry_node_public_key", "telemetry", ["node_public_key"])
|
||||
op.create_index("ix_telemetry_received_at", "telemetry", ["received_at"])
|
||||
|
||||
# Create events_log table
|
||||
op.create_table(
|
||||
"events_log",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("receiver_node_id", sa.String(), nullable=True),
|
||||
sa.Column("event_type", sa.String(50), nullable=False),
|
||||
sa.Column("payload", sa.JSON(), nullable=True),
|
||||
sa.Column("received_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.func.now(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(["receiver_node_id"], ["nodes.id"], ondelete="SET NULL"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index("ix_events_log_receiver_node_id", "events_log", ["receiver_node_id"])
|
||||
op.create_index("ix_events_log_event_type", "events_log", ["event_type"])
|
||||
op.create_index("ix_events_log_received_at", "events_log", ["received_at"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("events_log")
|
||||
op.drop_table("telemetry")
|
||||
op.drop_table("trace_paths")
|
||||
op.drop_table("advertisements")
|
||||
op.drop_table("messages")
|
||||
op.drop_table("node_tags")
|
||||
op.drop_table("nodes")
|
||||
143
pyproject.toml
Normal file
143
pyproject.toml
Normal file
@@ -0,0 +1,143 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=68.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "meshcore-hub"
|
||||
version = "0.1.0"
|
||||
description = "Python monorepo for managing and orchestrating MeshCore mesh networks"
|
||||
readme = "README.md"
|
||||
license = {text = "MIT"}
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{name = "MeshCore Hub Contributors"}
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Communications",
|
||||
"Topic :: System :: Networking",
|
||||
]
|
||||
keywords = ["meshcore", "mesh", "network", "mqtt", "lora"]
|
||||
dependencies = [
|
||||
"click>=8.1.0",
|
||||
"pydantic>=2.0.0",
|
||||
"pydantic-settings>=2.0.0",
|
||||
"sqlalchemy>=2.0.0",
|
||||
"alembic>=1.12.0",
|
||||
"fastapi>=0.100.0",
|
||||
"uvicorn[standard]>=0.23.0",
|
||||
"paho-mqtt>=2.0.0",
|
||||
"jinja2>=3.1.0",
|
||||
"python-multipart>=0.0.6",
|
||||
"httpx>=0.25.0",
|
||||
"aiosqlite>=0.19.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4.0",
|
||||
"pytest-asyncio>=0.21.0",
|
||||
"pytest-cov>=4.1.0",
|
||||
"black>=23.0.0",
|
||||
"flake8>=6.1.0",
|
||||
"mypy>=1.5.0",
|
||||
"pre-commit>=3.4.0",
|
||||
"types-paho-mqtt>=1.6.0",
|
||||
]
|
||||
postgres = [
|
||||
"asyncpg>=0.28.0",
|
||||
"psycopg2-binary>=2.9.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
meshcore-hub = "meshcore_hub.__main__:main"
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/meshcore-dev/meshcore-hub"
|
||||
Documentation = "https://github.com/meshcore-dev/meshcore-hub#readme"
|
||||
Repository = "https://github.com/meshcore-dev/meshcore-hub"
|
||||
Issues = "https://github.com/meshcore-dev/meshcore-hub/issues"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
meshcore_hub = ["py.typed", "templates/**/*", "static/**/*"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
include = '\.pyi?$'
|
||||
extend-exclude = '''
|
||||
/(
|
||||
\.eggs
|
||||
| \.git
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
| alembic/versions
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
warn_return_any = true
|
||||
warn_unused_ignores = true
|
||||
disallow_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
check_untyped_defs = true
|
||||
strict_optional = true
|
||||
plugins = ["pydantic.mypy"]
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"paho.*",
|
||||
"uvicorn.*",
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "7.0"
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
python_files = ["test_*.py"]
|
||||
python_classes = ["Test*"]
|
||||
python_functions = ["test_*"]
|
||||
addopts = [
|
||||
"-ra",
|
||||
"-q",
|
||||
"--strict-markers",
|
||||
"--cov=meshcore_hub",
|
||||
"--cov-report=term-missing",
|
||||
]
|
||||
filterwarnings = [
|
||||
"ignore::DeprecationWarning",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
source = ["src/meshcore_hub"]
|
||||
branch = true
|
||||
omit = [
|
||||
"*/tests/*",
|
||||
"*/__pycache__/*",
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"def __repr__",
|
||||
"raise NotImplementedError",
|
||||
"if TYPE_CHECKING:",
|
||||
"if __name__ == .__main__.:",
|
||||
]
|
||||
3
src/meshcore_hub/__init__.py
Normal file
3
src/meshcore_hub/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""MeshCore Hub - Python monorepo for managing MeshCore mesh networks."""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
409
src/meshcore_hub/__main__.py
Normal file
409
src/meshcore_hub/__main__.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""MeshCore Hub CLI entry point."""
|
||||
|
||||
import click
|
||||
|
||||
from meshcore_hub import __version__
|
||||
from meshcore_hub.common.config import LogLevel
|
||||
from meshcore_hub.common.logging import configure_logging
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(version=__version__, prog_name="meshcore-hub")
|
||||
@click.option(
|
||||
"--log-level",
|
||||
type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]),
|
||||
default="INFO",
|
||||
envvar="LOG_LEVEL",
|
||||
help="Set logging level",
|
||||
)
|
||||
@click.pass_context
|
||||
def cli(ctx: click.Context, log_level: str) -> None:
|
||||
"""MeshCore Hub - Mesh network management and orchestration.
|
||||
|
||||
A Python monorepo for managing and orchestrating MeshCore mesh networks.
|
||||
Provides components for interfacing with devices, collecting data,
|
||||
REST API access, and web dashboard visualization.
|
||||
"""
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["log_level"] = LogLevel(log_level)
|
||||
configure_logging(level=ctx.obj["log_level"])
|
||||
|
||||
|
||||
@cli.group()
|
||||
def interface() -> None:
|
||||
"""Interface component for MeshCore device communication.
|
||||
|
||||
Runs in RECEIVER or SENDER mode to bridge between
|
||||
MeshCore devices and MQTT broker.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@interface.command("run")
|
||||
@click.option(
|
||||
"--mode",
|
||||
type=click.Choice(["RECEIVER", "SENDER"]),
|
||||
required=True,
|
||||
envvar="INTERFACE_MODE",
|
||||
help="Interface mode: RECEIVER or SENDER",
|
||||
)
|
||||
@click.option(
|
||||
"--port",
|
||||
type=str,
|
||||
default="/dev/ttyUSB0",
|
||||
envvar="SERIAL_PORT",
|
||||
help="Serial port path",
|
||||
)
|
||||
@click.option(
|
||||
"--baud",
|
||||
type=int,
|
||||
default=115200,
|
||||
envvar="SERIAL_BAUD",
|
||||
help="Serial baud rate",
|
||||
)
|
||||
@click.option(
|
||||
"--mock",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
envvar="MOCK_DEVICE",
|
||||
help="Use mock device for testing",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-host",
|
||||
type=str,
|
||||
default="localhost",
|
||||
envvar="MQTT_HOST",
|
||||
help="MQTT broker host",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-port",
|
||||
type=int,
|
||||
default=1883,
|
||||
envvar="MQTT_PORT",
|
||||
help="MQTT broker port",
|
||||
)
|
||||
@click.option(
|
||||
"--prefix",
|
||||
type=str,
|
||||
default="meshcore",
|
||||
envvar="MQTT_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
def interface_run(
|
||||
mode: str,
|
||||
port: str,
|
||||
baud: int,
|
||||
mock: bool,
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
prefix: str,
|
||||
) -> None:
|
||||
"""Run the interface component."""
|
||||
click.echo(f"Starting interface in {mode} mode...")
|
||||
click.echo(f"Serial port: {port} (baud: {baud})")
|
||||
click.echo(f"Mock device: {mock}")
|
||||
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {prefix})")
|
||||
click.echo("Interface component not yet implemented.")
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
"--mqtt-host",
|
||||
type=str,
|
||||
default="localhost",
|
||||
envvar="MQTT_HOST",
|
||||
help="MQTT broker host",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-port",
|
||||
type=int,
|
||||
default=1883,
|
||||
envvar="MQTT_PORT",
|
||||
help="MQTT broker port",
|
||||
)
|
||||
@click.option(
|
||||
"--prefix",
|
||||
type=str,
|
||||
default="meshcore",
|
||||
envvar="MQTT_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default="sqlite:///./meshcore.db",
|
||||
envvar="DATABASE_URL",
|
||||
help="Database connection URL",
|
||||
)
|
||||
def collector(
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
prefix: str,
|
||||
database_url: str,
|
||||
) -> None:
|
||||
"""Run the collector component.
|
||||
|
||||
Subscribes to MQTT broker and stores events in database.
|
||||
"""
|
||||
click.echo("Starting collector...")
|
||||
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {prefix})")
|
||||
click.echo(f"Database: {database_url}")
|
||||
click.echo("Collector component not yet implemented.")
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
"--host",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
envvar="API_HOST",
|
||||
help="API server host",
|
||||
)
|
||||
@click.option(
|
||||
"--port",
|
||||
type=int,
|
||||
default=8000,
|
||||
envvar="API_PORT",
|
||||
help="API server port",
|
||||
)
|
||||
@click.option(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default="sqlite:///./meshcore.db",
|
||||
envvar="DATABASE_URL",
|
||||
help="Database connection URL",
|
||||
)
|
||||
@click.option(
|
||||
"--read-key",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="API_READ_KEY",
|
||||
help="Read-only API key",
|
||||
)
|
||||
@click.option(
|
||||
"--admin-key",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="API_ADMIN_KEY",
|
||||
help="Admin API key",
|
||||
)
|
||||
@click.option(
|
||||
"--reload",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Enable auto-reload for development",
|
||||
)
|
||||
def api(
|
||||
host: str,
|
||||
port: int,
|
||||
database_url: str,
|
||||
read_key: str | None,
|
||||
admin_key: str | None,
|
||||
reload: bool,
|
||||
) -> None:
|
||||
"""Run the REST API server.
|
||||
|
||||
Provides REST API endpoints for querying data and sending commands.
|
||||
"""
|
||||
click.echo("Starting API server...")
|
||||
click.echo(f"Listening on: {host}:{port}")
|
||||
click.echo(f"Database: {database_url}")
|
||||
click.echo(f"Read key configured: {read_key is not None}")
|
||||
click.echo(f"Admin key configured: {admin_key is not None}")
|
||||
click.echo("API component not yet implemented.")
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
"--host",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
envvar="WEB_HOST",
|
||||
help="Web server host",
|
||||
)
|
||||
@click.option(
|
||||
"--port",
|
||||
type=int,
|
||||
default=8080,
|
||||
envvar="WEB_PORT",
|
||||
help="Web server port",
|
||||
)
|
||||
@click.option(
|
||||
"--api-url",
|
||||
type=str,
|
||||
default="http://localhost:8000",
|
||||
envvar="API_BASE_URL",
|
||||
help="API server base URL",
|
||||
)
|
||||
@click.option(
|
||||
"--api-key",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="API_KEY",
|
||||
help="API key for queries",
|
||||
)
|
||||
@click.option(
|
||||
"--network-name",
|
||||
type=str,
|
||||
default="MeshCore Network",
|
||||
envvar="NETWORK_NAME",
|
||||
help="Network display name",
|
||||
)
|
||||
@click.option(
|
||||
"--reload",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Enable auto-reload for development",
|
||||
)
|
||||
def web(
|
||||
host: str,
|
||||
port: int,
|
||||
api_url: str,
|
||||
api_key: str | None,
|
||||
network_name: str,
|
||||
reload: bool,
|
||||
) -> None:
|
||||
"""Run the web dashboard.
|
||||
|
||||
Provides a web interface for visualizing network status.
|
||||
"""
|
||||
click.echo("Starting web dashboard...")
|
||||
click.echo(f"Listening on: {host}:{port}")
|
||||
click.echo(f"API URL: {api_url}")
|
||||
click.echo(f"Network name: {network_name}")
|
||||
click.echo("Web dashboard not yet implemented.")
|
||||
|
||||
|
||||
@cli.group()
|
||||
def db() -> None:
|
||||
"""Database migration commands.
|
||||
|
||||
Manage database schema migrations using Alembic.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@db.command("upgrade")
|
||||
@click.option(
|
||||
"--revision",
|
||||
type=str,
|
||||
default="head",
|
||||
help="Target revision (default: head)",
|
||||
)
|
||||
@click.option(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="DATABASE_URL",
|
||||
help="Database connection URL",
|
||||
)
|
||||
def db_upgrade(revision: str, database_url: str | None) -> None:
|
||||
"""Upgrade database to a later version."""
|
||||
import os
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
click.echo(f"Upgrading database to revision: {revision}")
|
||||
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
if database_url:
|
||||
os.environ["DATABASE_URL"] = database_url
|
||||
|
||||
command.upgrade(alembic_cfg, revision)
|
||||
click.echo("Database upgrade complete.")
|
||||
|
||||
|
||||
@db.command("downgrade")
|
||||
@click.option(
|
||||
"--revision",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Target revision",
|
||||
)
|
||||
@click.option(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="DATABASE_URL",
|
||||
help="Database connection URL",
|
||||
)
|
||||
def db_downgrade(revision: str, database_url: str | None) -> None:
|
||||
"""Revert database to a previous version."""
|
||||
import os
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
click.echo(f"Downgrading database to revision: {revision}")
|
||||
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
if database_url:
|
||||
os.environ["DATABASE_URL"] = database_url
|
||||
|
||||
command.downgrade(alembic_cfg, revision)
|
||||
click.echo("Database downgrade complete.")
|
||||
|
||||
|
||||
@db.command("revision")
|
||||
@click.option(
|
||||
"-m",
|
||||
"--message",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Revision message",
|
||||
)
|
||||
@click.option(
|
||||
"--autogenerate",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Autogenerate migration from models",
|
||||
)
|
||||
def db_revision(message: str, autogenerate: bool) -> None:
|
||||
"""Create a new database migration."""
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
click.echo(f"Creating new revision: {message}")
|
||||
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
command.revision(alembic_cfg, message=message, autogenerate=autogenerate)
|
||||
click.echo("Revision created.")
|
||||
|
||||
|
||||
@db.command("current")
|
||||
@click.option(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="DATABASE_URL",
|
||||
help="Database connection URL",
|
||||
)
|
||||
def db_current(database_url: str | None) -> None:
|
||||
"""Show current database revision."""
|
||||
import os
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
if database_url:
|
||||
os.environ["DATABASE_URL"] = database_url
|
||||
|
||||
command.current(alembic_cfg)
|
||||
|
||||
|
||||
@db.command("history")
|
||||
def db_history() -> None:
|
||||
"""Show database migration history."""
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
command.history(alembic_cfg)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main entry point."""
|
||||
cli()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
src/meshcore_hub/api/__init__.py
Normal file
1
src/meshcore_hub/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""REST API component for querying data and sending commands."""
|
||||
1
src/meshcore_hub/api/routes/__init__.py
Normal file
1
src/meshcore_hub/api/routes/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API route handlers."""
|
||||
1
src/meshcore_hub/collector/__init__.py
Normal file
1
src/meshcore_hub/collector/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Collector component for storing MeshCore events from MQTT."""
|
||||
1
src/meshcore_hub/collector/handlers/__init__.py
Normal file
1
src/meshcore_hub/collector/handlers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Event handlers for processing MQTT messages."""
|
||||
1
src/meshcore_hub/common/__init__.py
Normal file
1
src/meshcore_hub/common/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Common utilities, models and configurations used by all components."""
|
||||
192
src/meshcore_hub/common/config.py
Normal file
192
src/meshcore_hub/common/config.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""Pydantic Settings for MeshCore Hub configuration."""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class LogLevel(str, Enum):
|
||||
"""Log level enumeration."""
|
||||
|
||||
DEBUG = "DEBUG"
|
||||
INFO = "INFO"
|
||||
WARNING = "WARNING"
|
||||
ERROR = "ERROR"
|
||||
CRITICAL = "CRITICAL"
|
||||
|
||||
|
||||
class InterfaceMode(str, Enum):
|
||||
"""Interface component mode."""
|
||||
|
||||
RECEIVER = "RECEIVER"
|
||||
SENDER = "SENDER"
|
||||
|
||||
|
||||
class CommonSettings(BaseSettings):
|
||||
"""Common settings shared by all components."""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
# Logging
|
||||
log_level: LogLevel = Field(default=LogLevel.INFO, description="Logging level")
|
||||
|
||||
# MQTT Broker
|
||||
mqtt_host: str = Field(default="localhost", description="MQTT broker host")
|
||||
mqtt_port: int = Field(default=1883, description="MQTT broker port")
|
||||
mqtt_username: Optional[str] = Field(
|
||||
default=None, description="MQTT username (optional)"
|
||||
)
|
||||
mqtt_password: Optional[str] = Field(
|
||||
default=None, description="MQTT password (optional)"
|
||||
)
|
||||
mqtt_prefix: str = Field(
|
||||
default="meshcore", description="MQTT topic prefix"
|
||||
)
|
||||
|
||||
|
||||
class InterfaceSettings(CommonSettings):
|
||||
"""Settings for the Interface component."""
|
||||
|
||||
# Mode
|
||||
interface_mode: InterfaceMode = Field(
|
||||
default=InterfaceMode.RECEIVER,
|
||||
description="Interface mode: RECEIVER or SENDER",
|
||||
)
|
||||
|
||||
# Serial connection
|
||||
serial_port: str = Field(
|
||||
default="/dev/ttyUSB0", description="Serial port path"
|
||||
)
|
||||
serial_baud: int = Field(default=115200, description="Serial baud rate")
|
||||
|
||||
# Mock device
|
||||
mock_device: bool = Field(
|
||||
default=False, description="Use mock device for testing"
|
||||
)
|
||||
|
||||
|
||||
class CollectorSettings(CommonSettings):
|
||||
"""Settings for the Collector component."""
|
||||
|
||||
# Database
|
||||
database_url: str = Field(
|
||||
default="sqlite:///./meshcore.db",
|
||||
description="SQLAlchemy database URL",
|
||||
)
|
||||
|
||||
@field_validator("database_url")
|
||||
@classmethod
|
||||
def validate_database_url(cls, v: str) -> str:
|
||||
"""Validate database URL format."""
|
||||
if not v:
|
||||
raise ValueError("Database URL cannot be empty")
|
||||
return v
|
||||
|
||||
|
||||
class APISettings(CommonSettings):
|
||||
"""Settings for the API component."""
|
||||
|
||||
# Server binding
|
||||
api_host: str = Field(default="0.0.0.0", description="API server host")
|
||||
api_port: int = Field(default=8000, description="API server port")
|
||||
|
||||
# Database
|
||||
database_url: str = Field(
|
||||
default="sqlite:///./meshcore.db",
|
||||
description="SQLAlchemy database URL",
|
||||
)
|
||||
|
||||
# Authentication
|
||||
api_read_key: Optional[str] = Field(
|
||||
default=None, description="Read-only API key"
|
||||
)
|
||||
api_admin_key: Optional[str] = Field(
|
||||
default=None, description="Admin API key (full access)"
|
||||
)
|
||||
|
||||
@field_validator("database_url")
|
||||
@classmethod
|
||||
def validate_database_url(cls, v: str) -> str:
|
||||
"""Validate database URL format."""
|
||||
if not v:
|
||||
raise ValueError("Database URL cannot be empty")
|
||||
return v
|
||||
|
||||
|
||||
class WebSettings(CommonSettings):
|
||||
"""Settings for the Web Dashboard component."""
|
||||
|
||||
# Server binding
|
||||
web_host: str = Field(default="0.0.0.0", description="Web server host")
|
||||
web_port: int = Field(default=8080, description="Web server port")
|
||||
|
||||
# API connection
|
||||
api_base_url: str = Field(
|
||||
default="http://localhost:8000",
|
||||
description="API server base URL",
|
||||
)
|
||||
api_key: Optional[str] = Field(
|
||||
default=None, description="API key for queries"
|
||||
)
|
||||
|
||||
# Network information
|
||||
network_domain: Optional[str] = Field(
|
||||
default=None, description="Network domain name"
|
||||
)
|
||||
network_name: str = Field(
|
||||
default="MeshCore Network", description="Network display name"
|
||||
)
|
||||
network_city: Optional[str] = Field(
|
||||
default=None, description="Network city location"
|
||||
)
|
||||
network_country: Optional[str] = Field(
|
||||
default=None, description="Network country (ISO 3166-1 alpha-2)"
|
||||
)
|
||||
network_location: Optional[str] = Field(
|
||||
default=None, description="Network location (lat,lon)"
|
||||
)
|
||||
network_radio_config: Optional[str] = Field(
|
||||
default=None, description="Radio configuration details"
|
||||
)
|
||||
network_contact_email: Optional[str] = Field(
|
||||
default=None, description="Contact email address"
|
||||
)
|
||||
network_contact_discord: Optional[str] = Field(
|
||||
default=None, description="Discord server link"
|
||||
)
|
||||
|
||||
# Members file
|
||||
members_file: str = Field(
|
||||
default="members.json", description="Path to members JSON file"
|
||||
)
|
||||
|
||||
|
||||
def get_common_settings() -> CommonSettings:
|
||||
"""Get common settings instance."""
|
||||
return CommonSettings()
|
||||
|
||||
|
||||
def get_interface_settings() -> InterfaceSettings:
|
||||
"""Get interface settings instance."""
|
||||
return InterfaceSettings()
|
||||
|
||||
|
||||
def get_collector_settings() -> CollectorSettings:
|
||||
"""Get collector settings instance."""
|
||||
return CollectorSettings()
|
||||
|
||||
|
||||
def get_api_settings() -> APISettings:
|
||||
"""Get API settings instance."""
|
||||
return APISettings()
|
||||
|
||||
|
||||
def get_web_settings() -> WebSettings:
|
||||
"""Get web settings instance."""
|
||||
return WebSettings()
|
||||
186
src/meshcore_hub/common/database.py
Normal file
186
src/meshcore_hub/common/database.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""Database connection and session management."""
|
||||
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from meshcore_hub.common.models.base import Base
|
||||
|
||||
|
||||
def create_database_engine(
|
||||
database_url: str,
|
||||
echo: bool = False,
|
||||
) -> Engine:
|
||||
"""Create a SQLAlchemy database engine.
|
||||
|
||||
Args:
|
||||
database_url: SQLAlchemy database URL
|
||||
echo: Enable SQL query logging
|
||||
|
||||
Returns:
|
||||
SQLAlchemy Engine instance
|
||||
"""
|
||||
connect_args = {}
|
||||
|
||||
# SQLite-specific configuration
|
||||
if database_url.startswith("sqlite"):
|
||||
connect_args["check_same_thread"] = False
|
||||
|
||||
engine = create_engine(
|
||||
database_url,
|
||||
echo=echo,
|
||||
connect_args=connect_args,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
|
||||
# Enable foreign keys for SQLite
|
||||
if database_url.startswith("sqlite"):
|
||||
@event.listens_for(engine, "connect")
|
||||
def set_sqlite_pragma(dbapi_connection, connection_record): # type: ignore
|
||||
cursor = dbapi_connection.cursor()
|
||||
cursor.execute("PRAGMA foreign_keys=ON")
|
||||
cursor.close()
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
def create_session_factory(engine: Engine) -> sessionmaker[Session]:
|
||||
"""Create a session factory for the given engine.
|
||||
|
||||
Args:
|
||||
engine: SQLAlchemy Engine instance
|
||||
|
||||
Returns:
|
||||
Session factory
|
||||
"""
|
||||
return sessionmaker(
|
||||
bind=engine,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
def create_tables(engine: Engine) -> None:
|
||||
"""Create all database tables.
|
||||
|
||||
Args:
|
||||
engine: SQLAlchemy Engine instance
|
||||
"""
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
def drop_tables(engine: Engine) -> None:
|
||||
"""Drop all database tables.
|
||||
|
||||
Args:
|
||||
engine: SQLAlchemy Engine instance
|
||||
"""
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database connection manager.
|
||||
|
||||
Manages database engine and session creation for a component.
|
||||
"""
|
||||
|
||||
def __init__(self, database_url: str, echo: bool = False):
|
||||
"""Initialize the database manager.
|
||||
|
||||
Args:
|
||||
database_url: SQLAlchemy database URL
|
||||
echo: Enable SQL query logging
|
||||
"""
|
||||
self.database_url = database_url
|
||||
self.engine = create_database_engine(database_url, echo=echo)
|
||||
self.session_factory = create_session_factory(self.engine)
|
||||
|
||||
def create_tables(self) -> None:
|
||||
"""Create all database tables."""
|
||||
create_tables(self.engine)
|
||||
|
||||
def drop_tables(self) -> None:
|
||||
"""Drop all database tables."""
|
||||
drop_tables(self.engine)
|
||||
|
||||
def get_session(self) -> Session:
|
||||
"""Get a new database session.
|
||||
|
||||
Returns:
|
||||
New Session instance
|
||||
"""
|
||||
return self.session_factory()
|
||||
|
||||
@contextmanager
|
||||
def session_scope(self) -> Generator[Session, None, None]:
|
||||
"""Provide a transactional scope around a series of operations.
|
||||
|
||||
Yields:
|
||||
Session instance
|
||||
|
||||
Example:
|
||||
with db.session_scope() as session:
|
||||
session.add(node)
|
||||
session.commit()
|
||||
"""
|
||||
session = self.get_session()
|
||||
try:
|
||||
yield session
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def dispose(self) -> None:
|
||||
"""Dispose of the database engine and connection pool."""
|
||||
self.engine.dispose()
|
||||
|
||||
|
||||
# Global database manager instance (initialized at runtime)
|
||||
_db_manager: DatabaseManager | None = None
|
||||
|
||||
|
||||
def init_database(database_url: str, echo: bool = False) -> DatabaseManager:
|
||||
"""Initialize the global database manager.
|
||||
|
||||
Args:
|
||||
database_url: SQLAlchemy database URL
|
||||
echo: Enable SQL query logging
|
||||
|
||||
Returns:
|
||||
DatabaseManager instance
|
||||
"""
|
||||
global _db_manager
|
||||
_db_manager = DatabaseManager(database_url, echo=echo)
|
||||
return _db_manager
|
||||
|
||||
|
||||
def get_database() -> DatabaseManager:
|
||||
"""Get the global database manager.
|
||||
|
||||
Returns:
|
||||
DatabaseManager instance
|
||||
|
||||
Raises:
|
||||
RuntimeError: If database not initialized
|
||||
"""
|
||||
if _db_manager is None:
|
||||
raise RuntimeError(
|
||||
"Database not initialized. Call init_database() first."
|
||||
)
|
||||
return _db_manager
|
||||
|
||||
|
||||
def get_session() -> Session:
|
||||
"""Get a database session from the global manager.
|
||||
|
||||
Returns:
|
||||
Session instance
|
||||
"""
|
||||
return get_database().get_session()
|
||||
126
src/meshcore_hub/common/logging.py
Normal file
126
src/meshcore_hub/common/logging.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Logging configuration for MeshCore Hub."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from meshcore_hub.common.config import LogLevel
|
||||
|
||||
|
||||
# Default log format
|
||||
DEFAULT_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
|
||||
# Structured log format (more suitable for production/parsing)
|
||||
STRUCTURED_FORMAT = (
|
||||
"%(asctime)s | %(levelname)-8s | %(name)s | %(message)s"
|
||||
)
|
||||
|
||||
|
||||
def configure_logging(
|
||||
level: LogLevel | str = LogLevel.INFO,
|
||||
format_string: Optional[str] = None,
|
||||
structured: bool = False,
|
||||
) -> None:
|
||||
"""Configure logging for the application.
|
||||
|
||||
Args:
|
||||
level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
format_string: Custom log format string (optional)
|
||||
structured: Use structured logging format
|
||||
"""
|
||||
# Convert LogLevel enum to string if necessary
|
||||
if isinstance(level, LogLevel):
|
||||
level_str = level.value
|
||||
else:
|
||||
level_str = level.upper()
|
||||
|
||||
# Get numeric log level
|
||||
numeric_level = getattr(logging, level_str, logging.INFO)
|
||||
|
||||
# Determine format
|
||||
if format_string:
|
||||
log_format = format_string
|
||||
elif structured:
|
||||
log_format = STRUCTURED_FORMAT
|
||||
else:
|
||||
log_format = DEFAULT_FORMAT
|
||||
|
||||
# Configure root logger
|
||||
logging.basicConfig(
|
||||
level=numeric_level,
|
||||
format=log_format,
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout),
|
||||
],
|
||||
)
|
||||
|
||||
# Set levels for noisy third-party loggers
|
||||
logging.getLogger("paho").setLevel(logging.WARNING)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
||||
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
||||
|
||||
# Set our loggers to the configured level
|
||||
logging.getLogger("meshcore_hub").setLevel(numeric_level)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""Get a logger with the given name.
|
||||
|
||||
Args:
|
||||
name: Logger name (typically __name__)
|
||||
|
||||
Returns:
|
||||
Logger instance
|
||||
"""
|
||||
return logging.getLogger(name)
|
||||
|
||||
|
||||
class ComponentLogger:
|
||||
"""Logger wrapper for a specific component."""
|
||||
|
||||
def __init__(self, component: str):
|
||||
"""Initialize component logger.
|
||||
|
||||
Args:
|
||||
component: Component name (e.g., 'interface', 'collector')
|
||||
"""
|
||||
self.component = component
|
||||
self._logger = logging.getLogger(f"meshcore_hub.{component}")
|
||||
|
||||
def debug(self, message: str, **kwargs: object) -> None:
|
||||
"""Log a debug message."""
|
||||
self._logger.debug(message, extra=kwargs)
|
||||
|
||||
def info(self, message: str, **kwargs: object) -> None:
|
||||
"""Log an info message."""
|
||||
self._logger.info(message, extra=kwargs)
|
||||
|
||||
def warning(self, message: str, **kwargs: object) -> None:
|
||||
"""Log a warning message."""
|
||||
self._logger.warning(message, extra=kwargs)
|
||||
|
||||
def error(self, message: str, **kwargs: object) -> None:
|
||||
"""Log an error message."""
|
||||
self._logger.error(message, extra=kwargs)
|
||||
|
||||
def critical(self, message: str, **kwargs: object) -> None:
|
||||
"""Log a critical message."""
|
||||
self._logger.critical(message, extra=kwargs)
|
||||
|
||||
def exception(self, message: str, **kwargs: object) -> None:
|
||||
"""Log an exception with traceback."""
|
||||
self._logger.exception(message, extra=kwargs)
|
||||
|
||||
|
||||
def get_component_logger(component: str) -> ComponentLogger:
|
||||
"""Get a component-specific logger.
|
||||
|
||||
Args:
|
||||
component: Component name
|
||||
|
||||
Returns:
|
||||
ComponentLogger instance
|
||||
"""
|
||||
return ComponentLogger(component)
|
||||
22
src/meshcore_hub/common/models/__init__.py
Normal file
22
src/meshcore_hub/common/models/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""SQLAlchemy database models."""
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin
|
||||
from meshcore_hub.common.models.node import Node
|
||||
from meshcore_hub.common.models.node_tag import NodeTag
|
||||
from meshcore_hub.common.models.message import Message
|
||||
from meshcore_hub.common.models.advertisement import Advertisement
|
||||
from meshcore_hub.common.models.trace_path import TracePath
|
||||
from meshcore_hub.common.models.telemetry import Telemetry
|
||||
from meshcore_hub.common.models.event_log import EventLog
|
||||
|
||||
__all__ = [
|
||||
"Base",
|
||||
"TimestampMixin",
|
||||
"Node",
|
||||
"NodeTag",
|
||||
"Message",
|
||||
"Advertisement",
|
||||
"TracePath",
|
||||
"Telemetry",
|
||||
"EventLog",
|
||||
]
|
||||
67
src/meshcore_hub/common/models/advertisement.py
Normal file
67
src/meshcore_hub/common/models/advertisement.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Advertisement model for storing node advertisements."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Index, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
|
||||
|
||||
class Advertisement(Base, UUIDMixin, TimestampMixin):
|
||||
"""Advertisement model for storing node advertisements.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
receiver_node_id: FK to nodes (receiving interface)
|
||||
node_id: FK to nodes (advertised node)
|
||||
public_key: Advertised public key
|
||||
name: Advertised name
|
||||
adv_type: Node type (chat, repeater, room, none)
|
||||
flags: Capability flags
|
||||
received_at: When received by interface
|
||||
created_at: Record creation timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "advertisements"
|
||||
|
||||
receiver_node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
public_key: Mapped[str] = mapped_column(
|
||||
String(64),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
)
|
||||
adv_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
nullable=True,
|
||||
)
|
||||
flags: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
received_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_advertisements_received_at", "received_at"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Advertisement(id={self.id}, public_key={self.public_key[:12]}..., name={self.name})>"
|
||||
71
src/meshcore_hub/common/models/base.py
Normal file
71
src/meshcore_hub/common/models/base.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Base model with common fields and mixins."""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import DateTime, func
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
|
||||
def generate_uuid() -> str:
|
||||
"""Generate a new UUID string."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime."""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Base class for all SQLAlchemy models."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TimestampMixin:
|
||||
"""Mixin that adds created_at and updated_at timestamp columns."""
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
server_default=func.now(),
|
||||
onupdate=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class UUIDMixin:
|
||||
"""Mixin that adds a UUID primary key."""
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
primary_key=True,
|
||||
default=generate_uuid,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
def model_to_dict(model: Any) -> dict[str, Any]:
|
||||
"""Convert a SQLAlchemy model instance to a dictionary.
|
||||
|
||||
Args:
|
||||
model: SQLAlchemy model instance
|
||||
|
||||
Returns:
|
||||
Dictionary representation of the model
|
||||
"""
|
||||
result = {}
|
||||
for column in model.__table__.columns:
|
||||
value = getattr(model, column.name)
|
||||
if isinstance(value, datetime):
|
||||
result[column.name] = value.isoformat()
|
||||
else:
|
||||
result[column.name] = value
|
||||
return result
|
||||
52
src/meshcore_hub/common/models/event_log.py
Normal file
52
src/meshcore_hub/common/models/event_log.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""EventLog model for storing all event payloads."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Index, String
|
||||
from sqlalchemy.dialects.sqlite import JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
|
||||
|
||||
class EventLog(Base, UUIDMixin, TimestampMixin):
|
||||
"""EventLog model for storing all event payloads for audit/debugging.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
receiver_node_id: FK to nodes (receiving interface)
|
||||
event_type: Event type name
|
||||
payload: Full event payload as JSON
|
||||
received_at: When received by interface
|
||||
created_at: Record creation timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "events_log"
|
||||
|
||||
receiver_node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
event_type: Mapped[str] = mapped_column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
)
|
||||
payload: Mapped[Optional[dict[str, Any]]] = mapped_column(
|
||||
JSON,
|
||||
nullable=True,
|
||||
)
|
||||
received_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_events_log_event_type", "event_type"),
|
||||
Index("ix_events_log_received_at", "received_at"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<EventLog(id={self.id}, event_type={self.event_type})>"
|
||||
88
src/meshcore_hub/common/models/message.py
Normal file
88
src/meshcore_hub/common/models/message.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Message model for storing received messages."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, Float, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
|
||||
|
||||
class Message(Base, UUIDMixin, TimestampMixin):
|
||||
"""Message model for storing contact and channel messages.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
receiver_node_id: FK to nodes (receiving interface)
|
||||
message_type: Message type (contact, channel)
|
||||
pubkey_prefix: Sender's public key prefix (12 chars, contact msgs)
|
||||
channel_idx: Channel index (channel msgs)
|
||||
text: Message content
|
||||
path_len: Number of hops
|
||||
txt_type: Message type indicator
|
||||
signature: Message signature (8 hex chars)
|
||||
snr: Signal-to-noise ratio
|
||||
sender_timestamp: Sender's timestamp
|
||||
received_at: When received by interface
|
||||
created_at: Record creation timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "messages"
|
||||
|
||||
receiver_node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
message_type: Mapped[str] = mapped_column(
|
||||
String(20),
|
||||
nullable=False,
|
||||
)
|
||||
pubkey_prefix: Mapped[Optional[str]] = mapped_column(
|
||||
String(12),
|
||||
nullable=True,
|
||||
)
|
||||
channel_idx: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
text: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
nullable=False,
|
||||
)
|
||||
path_len: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
txt_type: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
signature: Mapped[Optional[str]] = mapped_column(
|
||||
String(8),
|
||||
nullable=True,
|
||||
)
|
||||
snr: Mapped[Optional[float]] = mapped_column(
|
||||
Float,
|
||||
nullable=True,
|
||||
)
|
||||
sender_timestamp: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
)
|
||||
received_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_messages_message_type", "message_type"),
|
||||
Index("ix_messages_pubkey_prefix", "pubkey_prefix"),
|
||||
Index("ix_messages_channel_idx", "channel_idx"),
|
||||
Index("ix_messages_received_at", "received_at"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Message(id={self.id}, type={self.message_type}, text={self.text[:20]}...)>"
|
||||
75
src/meshcore_hub/common/models/node.py
Normal file
75
src/meshcore_hub/common/models/node.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Node model for tracking MeshCore network nodes."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import DateTime, Index, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from meshcore_hub.common.models.node_tag import NodeTag
|
||||
|
||||
|
||||
class Node(Base, UUIDMixin, TimestampMixin):
|
||||
"""Node model representing a MeshCore network node.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
public_key: Node's 64-character hex public key (unique)
|
||||
name: Node display name
|
||||
adv_type: Advertisement type (chat, repeater, room, none)
|
||||
flags: Capability/status flags bitmask
|
||||
first_seen: Timestamp of first advertisement
|
||||
last_seen: Timestamp of most recent activity
|
||||
created_at: Record creation timestamp
|
||||
updated_at: Record update timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "nodes"
|
||||
|
||||
public_key: Mapped[str] = mapped_column(
|
||||
String(64),
|
||||
unique=True,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
name: Mapped[Optional[str]] = mapped_column(
|
||||
String(255),
|
||||
nullable=True,
|
||||
)
|
||||
adv_type: Mapped[Optional[str]] = mapped_column(
|
||||
String(20),
|
||||
nullable=True,
|
||||
)
|
||||
flags: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
first_seen: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
last_seen: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
tags: Mapped[list["NodeTag"]] = relationship(
|
||||
"NodeTag",
|
||||
back_populates="node",
|
||||
cascade="all, delete-orphan",
|
||||
lazy="selectin",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_nodes_last_seen", "last_seen"),
|
||||
Index("ix_nodes_adv_type", "adv_type"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Node(id={self.id}, public_key={self.public_key[:12]}..., name={self.name})>"
|
||||
62
src/meshcore_hub/common/models/node_tag.py
Normal file
62
src/meshcore_hub/common/models/node_tag.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""NodeTag model for custom node metadata."""
|
||||
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import ForeignKey, Index, String, Text, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from meshcore_hub.common.models.node import Node
|
||||
|
||||
|
||||
class NodeTag(Base, UUIDMixin, TimestampMixin):
|
||||
"""NodeTag model for custom node metadata.
|
||||
|
||||
Allows users to assign arbitrary key-value tags to nodes.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
node_id: Foreign key to nodes table
|
||||
key: Tag name/key
|
||||
value: Tag value (stored as text, can be JSON for typed values)
|
||||
value_type: Type hint (string, number, boolean, coordinate)
|
||||
created_at: Record creation timestamp
|
||||
updated_at: Record update timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "node_tags"
|
||||
|
||||
node_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
key: Mapped[str] = mapped_column(
|
||||
String(100),
|
||||
nullable=False,
|
||||
)
|
||||
value: Mapped[Optional[str]] = mapped_column(
|
||||
Text,
|
||||
nullable=True,
|
||||
)
|
||||
value_type: Mapped[str] = mapped_column(
|
||||
String(20),
|
||||
default="string",
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
node: Mapped["Node"] = relationship(
|
||||
"Node",
|
||||
back_populates="tags",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("node_id", "key", name="uq_node_tags_node_key"),
|
||||
Index("ix_node_tags_key", "key"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<NodeTag(node_id={self.node_id}, key={self.key}, value={self.value})>"
|
||||
64
src/meshcore_hub/common/models/telemetry.py
Normal file
64
src/meshcore_hub/common/models/telemetry.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Telemetry model for storing sensor data."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Index, LargeBinary, String
|
||||
from sqlalchemy.dialects.sqlite import JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
|
||||
|
||||
class Telemetry(Base, UUIDMixin, TimestampMixin):
|
||||
"""Telemetry model for storing sensor data from network nodes.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
receiver_node_id: FK to nodes (receiving interface)
|
||||
node_id: FK to nodes (reporting node)
|
||||
node_public_key: Reporting node's public key
|
||||
lpp_data: Raw LPP-encoded sensor data
|
||||
parsed_data: Decoded sensor readings as JSON
|
||||
received_at: When received by interface
|
||||
created_at: Record creation timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "telemetry"
|
||||
|
||||
receiver_node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
node_public_key: Mapped[str] = mapped_column(
|
||||
String(64),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
lpp_data: Mapped[Optional[bytes]] = mapped_column(
|
||||
LargeBinary,
|
||||
nullable=True,
|
||||
)
|
||||
parsed_data: Mapped[Optional[dict[str, Any]]] = mapped_column(
|
||||
JSON,
|
||||
nullable=True,
|
||||
)
|
||||
received_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_telemetry_node_public_key", "node_public_key"),
|
||||
Index("ix_telemetry_received_at", "received_at"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Telemetry(id={self.id}, node_public_key={self.node_public_key[:12]}...)>"
|
||||
77
src/meshcore_hub/common/models/trace_path.py
Normal file
77
src/meshcore_hub/common/models/trace_path.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""TracePath model for storing network trace data."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import BigInteger, DateTime, ForeignKey, Index, Integer, String
|
||||
from sqlalchemy.dialects.sqlite import JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
|
||||
|
||||
class TracePath(Base, UUIDMixin, TimestampMixin):
|
||||
"""TracePath model for storing network trace path results.
|
||||
|
||||
Attributes:
|
||||
id: UUID primary key
|
||||
receiver_node_id: FK to nodes (receiving interface)
|
||||
initiator_tag: Unique trace identifier
|
||||
path_len: Path length
|
||||
flags: Trace flags
|
||||
auth: Authentication data
|
||||
path_hashes: JSON array of node hash identifiers
|
||||
snr_values: JSON array of SNR values per hop
|
||||
hop_count: Total number of hops
|
||||
received_at: When received by interface
|
||||
created_at: Record creation timestamp
|
||||
"""
|
||||
|
||||
__tablename__ = "trace_paths"
|
||||
|
||||
receiver_node_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("nodes.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
initiator_tag: Mapped[int] = mapped_column(
|
||||
BigInteger,
|
||||
nullable=False,
|
||||
)
|
||||
path_len: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
flags: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
auth: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
path_hashes: Mapped[Optional[list[str]]] = mapped_column(
|
||||
JSON,
|
||||
nullable=True,
|
||||
)
|
||||
snr_values: Mapped[Optional[list[float]]] = mapped_column(
|
||||
JSON,
|
||||
nullable=True,
|
||||
)
|
||||
hop_count: Mapped[Optional[int]] = mapped_column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
)
|
||||
received_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_trace_paths_initiator_tag", "initiator_tag"),
|
||||
Index("ix_trace_paths_received_at", "received_at"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<TracePath(id={self.id}, initiator_tag={self.initiator_tag}, hop_count={self.hop_count})>"
|
||||
365
src/meshcore_hub/common/mqtt.py
Normal file
365
src/meshcore_hub/common/mqtt.py
Normal file
@@ -0,0 +1,365 @@
|
||||
"""MQTT client utilities for MeshCore Hub."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import paho.mqtt.client as mqtt
|
||||
from paho.mqtt.enums import CallbackAPIVersion
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MQTTConfig:
|
||||
"""MQTT connection configuration."""
|
||||
|
||||
host: str = "localhost"
|
||||
port: int = 1883
|
||||
username: Optional[str] = None
|
||||
password: Optional[str] = None
|
||||
prefix: str = "meshcore"
|
||||
client_id: Optional[str] = None
|
||||
keepalive: int = 60
|
||||
clean_session: bool = True
|
||||
|
||||
|
||||
class TopicBuilder:
|
||||
"""Helper class for building MQTT topics."""
|
||||
|
||||
def __init__(self, prefix: str = "meshcore"):
|
||||
"""Initialize topic builder.
|
||||
|
||||
Args:
|
||||
prefix: MQTT topic prefix
|
||||
"""
|
||||
self.prefix = prefix
|
||||
|
||||
def event_topic(self, public_key: str, event_name: str) -> str:
|
||||
"""Build an event topic.
|
||||
|
||||
Args:
|
||||
public_key: Node's public key
|
||||
event_name: Event name
|
||||
|
||||
Returns:
|
||||
Full MQTT topic string
|
||||
"""
|
||||
return f"{self.prefix}/{public_key}/event/{event_name}"
|
||||
|
||||
def command_topic(self, public_key: str, command_name: str) -> str:
|
||||
"""Build a command topic.
|
||||
|
||||
Args:
|
||||
public_key: Node's public key (or '+' for wildcard)
|
||||
command_name: Command name
|
||||
|
||||
Returns:
|
||||
Full MQTT topic string
|
||||
"""
|
||||
return f"{self.prefix}/{public_key}/command/{command_name}"
|
||||
|
||||
def all_events_topic(self) -> str:
|
||||
"""Build a topic pattern to subscribe to all events.
|
||||
|
||||
Returns:
|
||||
MQTT topic pattern with wildcards
|
||||
"""
|
||||
return f"{self.prefix}/+/event/#"
|
||||
|
||||
def all_commands_topic(self) -> str:
|
||||
"""Build a topic pattern to subscribe to all commands.
|
||||
|
||||
Returns:
|
||||
MQTT topic pattern with wildcards
|
||||
"""
|
||||
return f"{self.prefix}/+/command/#"
|
||||
|
||||
def parse_event_topic(self, topic: str) -> tuple[str, str] | None:
|
||||
"""Parse an event topic to extract public key and event name.
|
||||
|
||||
Args:
|
||||
topic: Full MQTT topic string
|
||||
|
||||
Returns:
|
||||
Tuple of (public_key, event_name) or None if invalid
|
||||
"""
|
||||
parts = topic.split("/")
|
||||
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "event":
|
||||
public_key = parts[1]
|
||||
event_name = "/".join(parts[3:])
|
||||
return (public_key, event_name)
|
||||
return None
|
||||
|
||||
def parse_command_topic(self, topic: str) -> tuple[str, str] | None:
|
||||
"""Parse a command topic to extract public key and command name.
|
||||
|
||||
Args:
|
||||
topic: Full MQTT topic string
|
||||
|
||||
Returns:
|
||||
Tuple of (public_key, command_name) or None if invalid
|
||||
"""
|
||||
parts = topic.split("/")
|
||||
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "command":
|
||||
public_key = parts[1]
|
||||
command_name = "/".join(parts[3:])
|
||||
return (public_key, command_name)
|
||||
return None
|
||||
|
||||
|
||||
MessageHandler = Callable[[str, str, dict[str, Any]], None]
|
||||
|
||||
|
||||
class MQTTClient:
|
||||
"""Wrapper for paho-mqtt client with helper methods."""
|
||||
|
||||
def __init__(self, config: MQTTConfig):
|
||||
"""Initialize MQTT client.
|
||||
|
||||
Args:
|
||||
config: MQTT configuration
|
||||
"""
|
||||
self.config = config
|
||||
self.topic_builder = TopicBuilder(config.prefix)
|
||||
self._client = mqtt.Client(
|
||||
callback_api_version=CallbackAPIVersion.VERSION2,
|
||||
client_id=config.client_id,
|
||||
clean_session=config.clean_session,
|
||||
)
|
||||
self._connected = False
|
||||
self._message_handlers: dict[str, list[MessageHandler]] = {}
|
||||
|
||||
# Set up authentication if provided
|
||||
if config.username:
|
||||
self._client.username_pw_set(config.username, config.password)
|
||||
|
||||
# Set up callbacks
|
||||
self._client.on_connect = self._on_connect
|
||||
self._client.on_disconnect = self._on_disconnect
|
||||
self._client.on_message = self._on_message
|
||||
|
||||
def _on_connect(
|
||||
self,
|
||||
client: mqtt.Client,
|
||||
userdata: Any,
|
||||
flags: Any,
|
||||
reason_code: Any,
|
||||
properties: Any = None,
|
||||
) -> None:
|
||||
"""Handle connection callback."""
|
||||
if reason_code == 0:
|
||||
self._connected = True
|
||||
logger.info(f"Connected to MQTT broker at {self.config.host}:{self.config.port}")
|
||||
# Resubscribe to topics on reconnect
|
||||
for topic in self._message_handlers.keys():
|
||||
self._client.subscribe(topic)
|
||||
logger.debug(f"Resubscribed to topic: {topic}")
|
||||
else:
|
||||
logger.error(f"Failed to connect to MQTT broker: {reason_code}")
|
||||
|
||||
def _on_disconnect(
|
||||
self,
|
||||
client: mqtt.Client,
|
||||
userdata: Any,
|
||||
disconnect_flags: Any,
|
||||
reason_code: Any,
|
||||
properties: Any = None,
|
||||
) -> None:
|
||||
"""Handle disconnection callback."""
|
||||
self._connected = False
|
||||
logger.warning(f"Disconnected from MQTT broker: {reason_code}")
|
||||
|
||||
def _on_message(
|
||||
self,
|
||||
client: mqtt.Client,
|
||||
userdata: Any,
|
||||
message: mqtt.MQTTMessage,
|
||||
) -> None:
|
||||
"""Handle incoming message callback."""
|
||||
topic = message.topic
|
||||
try:
|
||||
payload = json.loads(message.payload.decode("utf-8"))
|
||||
except (json.JSONDecodeError, UnicodeDecodeError) as e:
|
||||
logger.error(f"Failed to decode message payload: {e}")
|
||||
return
|
||||
|
||||
logger.debug(f"Received message on topic {topic}: {payload}")
|
||||
|
||||
# Call registered handlers
|
||||
for pattern, handlers in self._message_handlers.items():
|
||||
if self._topic_matches(pattern, topic):
|
||||
for handler in handlers:
|
||||
try:
|
||||
handler(topic, pattern, payload)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in message handler: {e}")
|
||||
|
||||
def _topic_matches(self, pattern: str, topic: str) -> bool:
|
||||
"""Check if a topic matches a subscription pattern.
|
||||
|
||||
Args:
|
||||
pattern: MQTT subscription pattern (may contain + and #)
|
||||
topic: Actual topic string
|
||||
|
||||
Returns:
|
||||
True if topic matches pattern
|
||||
"""
|
||||
pattern_parts = pattern.split("/")
|
||||
topic_parts = topic.split("/")
|
||||
|
||||
for i, (p, t) in enumerate(zip(pattern_parts, topic_parts)):
|
||||
if p == "#":
|
||||
return True
|
||||
if p != "+" and p != t:
|
||||
return False
|
||||
|
||||
return len(pattern_parts) == len(topic_parts) or (
|
||||
len(pattern_parts) > 0 and pattern_parts[-1] == "#"
|
||||
)
|
||||
|
||||
def connect(self) -> None:
|
||||
"""Connect to the MQTT broker."""
|
||||
logger.info(f"Connecting to MQTT broker at {self.config.host}:{self.config.port}")
|
||||
self._client.connect(
|
||||
self.config.host,
|
||||
self.config.port,
|
||||
self.config.keepalive,
|
||||
)
|
||||
|
||||
def disconnect(self) -> None:
|
||||
"""Disconnect from the MQTT broker."""
|
||||
self._client.disconnect()
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start the MQTT client loop (blocking)."""
|
||||
self._client.loop_forever()
|
||||
|
||||
def start_background(self) -> None:
|
||||
"""Start the MQTT client loop in background thread."""
|
||||
self._client.loop_start()
|
||||
|
||||
def stop(self) -> None:
|
||||
"""Stop the MQTT client loop."""
|
||||
self._client.loop_stop()
|
||||
|
||||
def subscribe(
|
||||
self,
|
||||
topic: str,
|
||||
handler: MessageHandler,
|
||||
qos: int = 1,
|
||||
) -> None:
|
||||
"""Subscribe to a topic with a handler.
|
||||
|
||||
Args:
|
||||
topic: MQTT topic pattern
|
||||
handler: Message handler function
|
||||
qos: Quality of service level
|
||||
"""
|
||||
if topic not in self._message_handlers:
|
||||
self._message_handlers[topic] = []
|
||||
if self._connected:
|
||||
self._client.subscribe(topic, qos)
|
||||
logger.debug(f"Subscribed to topic: {topic}")
|
||||
|
||||
self._message_handlers[topic].append(handler)
|
||||
|
||||
def unsubscribe(self, topic: str) -> None:
|
||||
"""Unsubscribe from a topic.
|
||||
|
||||
Args:
|
||||
topic: MQTT topic pattern
|
||||
"""
|
||||
if topic in self._message_handlers:
|
||||
del self._message_handlers[topic]
|
||||
self._client.unsubscribe(topic)
|
||||
logger.debug(f"Unsubscribed from topic: {topic}")
|
||||
|
||||
def publish(
|
||||
self,
|
||||
topic: str,
|
||||
payload: dict[str, Any],
|
||||
qos: int = 1,
|
||||
retain: bool = False,
|
||||
) -> None:
|
||||
"""Publish a message to a topic.
|
||||
|
||||
Args:
|
||||
topic: MQTT topic
|
||||
payload: Message payload (will be JSON encoded)
|
||||
qos: Quality of service level
|
||||
retain: Whether to retain the message
|
||||
"""
|
||||
message = json.dumps(payload)
|
||||
self._client.publish(topic, message, qos=qos, retain=retain)
|
||||
logger.debug(f"Published message to topic {topic}: {payload}")
|
||||
|
||||
def publish_event(
|
||||
self,
|
||||
public_key: str,
|
||||
event_name: str,
|
||||
payload: dict[str, Any],
|
||||
) -> None:
|
||||
"""Publish an event message.
|
||||
|
||||
Args:
|
||||
public_key: Node's public key
|
||||
event_name: Event name
|
||||
payload: Event payload
|
||||
"""
|
||||
topic = self.topic_builder.event_topic(public_key, event_name)
|
||||
self.publish(topic, payload)
|
||||
|
||||
def publish_command(
|
||||
self,
|
||||
public_key: str,
|
||||
command_name: str,
|
||||
payload: dict[str, Any],
|
||||
) -> None:
|
||||
"""Publish a command message.
|
||||
|
||||
Args:
|
||||
public_key: Target node's public key (or '+' for all)
|
||||
command_name: Command name
|
||||
payload: Command payload
|
||||
"""
|
||||
topic = self.topic_builder.command_topic(public_key, command_name)
|
||||
self.publish(topic, payload)
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if client is connected to broker."""
|
||||
return self._connected
|
||||
|
||||
|
||||
def create_mqtt_client(
|
||||
host: str = "localhost",
|
||||
port: int = 1883,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
prefix: str = "meshcore",
|
||||
client_id: Optional[str] = None,
|
||||
) -> MQTTClient:
|
||||
"""Create and configure an MQTT client.
|
||||
|
||||
Args:
|
||||
host: MQTT broker host
|
||||
port: MQTT broker port
|
||||
username: MQTT username (optional)
|
||||
password: MQTT password (optional)
|
||||
prefix: Topic prefix
|
||||
client_id: Client identifier (optional)
|
||||
|
||||
Returns:
|
||||
Configured MQTTClient instance
|
||||
"""
|
||||
config = MQTTConfig(
|
||||
host=host,
|
||||
port=port,
|
||||
username=username,
|
||||
password=password,
|
||||
prefix=prefix,
|
||||
client_id=client_id,
|
||||
)
|
||||
return MQTTClient(config)
|
||||
59
src/meshcore_hub/common/schemas/__init__.py
Normal file
59
src/meshcore_hub/common/schemas/__init__.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Pydantic schemas for API request/response validation."""
|
||||
|
||||
from meshcore_hub.common.schemas.events import (
|
||||
AdvertisementEvent,
|
||||
ContactMessageEvent,
|
||||
ChannelMessageEvent,
|
||||
TraceDataEvent,
|
||||
TelemetryResponseEvent,
|
||||
ContactsEvent,
|
||||
SendConfirmedEvent,
|
||||
StatusResponseEvent,
|
||||
BatteryEvent,
|
||||
PathUpdatedEvent,
|
||||
)
|
||||
from meshcore_hub.common.schemas.nodes import (
|
||||
NodeRead,
|
||||
NodeList,
|
||||
NodeTagCreate,
|
||||
NodeTagUpdate,
|
||||
NodeTagRead,
|
||||
)
|
||||
from meshcore_hub.common.schemas.messages import (
|
||||
MessageRead,
|
||||
MessageList,
|
||||
MessageFilters,
|
||||
)
|
||||
from meshcore_hub.common.schemas.commands import (
|
||||
SendMessageCommand,
|
||||
SendChannelMessageCommand,
|
||||
SendAdvertCommand,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Events
|
||||
"AdvertisementEvent",
|
||||
"ContactMessageEvent",
|
||||
"ChannelMessageEvent",
|
||||
"TraceDataEvent",
|
||||
"TelemetryResponseEvent",
|
||||
"ContactsEvent",
|
||||
"SendConfirmedEvent",
|
||||
"StatusResponseEvent",
|
||||
"BatteryEvent",
|
||||
"PathUpdatedEvent",
|
||||
# Nodes
|
||||
"NodeRead",
|
||||
"NodeList",
|
||||
"NodeTagCreate",
|
||||
"NodeTagUpdate",
|
||||
"NodeTagRead",
|
||||
# Messages
|
||||
"MessageRead",
|
||||
"MessageList",
|
||||
"MessageFilters",
|
||||
# Commands
|
||||
"SendMessageCommand",
|
||||
"SendChannelMessageCommand",
|
||||
"SendAdvertCommand",
|
||||
]
|
||||
89
src/meshcore_hub/common/schemas/commands.py
Normal file
89
src/meshcore_hub/common/schemas/commands.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Pydantic schemas for command API endpoints."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SendMessageCommand(BaseModel):
|
||||
"""Schema for sending a direct message."""
|
||||
|
||||
destination: str = Field(
|
||||
...,
|
||||
min_length=12,
|
||||
max_length=64,
|
||||
description="Destination public key or prefix",
|
||||
)
|
||||
text: str = Field(
|
||||
...,
|
||||
min_length=1,
|
||||
max_length=1000,
|
||||
description="Message content",
|
||||
)
|
||||
timestamp: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Unix timestamp (optional, defaults to current time)",
|
||||
)
|
||||
|
||||
|
||||
class SendChannelMessageCommand(BaseModel):
|
||||
"""Schema for sending a channel message."""
|
||||
|
||||
channel_idx: int = Field(
|
||||
...,
|
||||
ge=0,
|
||||
le=255,
|
||||
description="Channel index (0-255)",
|
||||
)
|
||||
text: str = Field(
|
||||
...,
|
||||
min_length=1,
|
||||
max_length=1000,
|
||||
description="Message content",
|
||||
)
|
||||
timestamp: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Unix timestamp (optional, defaults to current time)",
|
||||
)
|
||||
|
||||
|
||||
class SendAdvertCommand(BaseModel):
|
||||
"""Schema for sending an advertisement."""
|
||||
|
||||
flood: bool = Field(
|
||||
default=True,
|
||||
description="Whether to flood the advertisement",
|
||||
)
|
||||
|
||||
|
||||
class RequestStatusCommand(BaseModel):
|
||||
"""Schema for requesting node status."""
|
||||
|
||||
target_public_key: Optional[str] = Field(
|
||||
default=None,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Target node public key (optional)",
|
||||
)
|
||||
|
||||
|
||||
class RequestTelemetryCommand(BaseModel):
|
||||
"""Schema for requesting telemetry data."""
|
||||
|
||||
target_public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Target node public key",
|
||||
)
|
||||
|
||||
|
||||
class CommandResponse(BaseModel):
|
||||
"""Schema for command response."""
|
||||
|
||||
success: bool = Field(..., description="Whether command was accepted")
|
||||
message: str = Field(..., description="Response message")
|
||||
command_id: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Command tracking ID (if applicable)",
|
||||
)
|
||||
261
src/meshcore_hub/common/schemas/events.py
Normal file
261
src/meshcore_hub/common/schemas/events.py
Normal file
@@ -0,0 +1,261 @@
|
||||
"""Pydantic schemas for MeshCore events."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class AdvertisementEvent(BaseModel):
|
||||
"""Schema for ADVERTISEMENT / NEW_ADVERT events."""
|
||||
|
||||
public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Node's 64-character hex public key",
|
||||
)
|
||||
name: Optional[str] = Field(
|
||||
default=None,
|
||||
max_length=255,
|
||||
description="Node name/alias",
|
||||
)
|
||||
adv_type: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Node type: chat, repeater, room, none",
|
||||
)
|
||||
flags: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Capability/status flags bitmask",
|
||||
)
|
||||
|
||||
|
||||
class ContactMessageEvent(BaseModel):
|
||||
"""Schema for CONTACT_MSG_RECV events."""
|
||||
|
||||
pubkey_prefix: str = Field(
|
||||
...,
|
||||
min_length=12,
|
||||
max_length=12,
|
||||
description="First 12 characters of sender's public key",
|
||||
)
|
||||
text: str = Field(..., description="Message content")
|
||||
path_len: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of hops message traveled",
|
||||
)
|
||||
txt_type: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Message type indicator (0=plain, 2=signed, etc.)",
|
||||
)
|
||||
signature: Optional[str] = Field(
|
||||
default=None,
|
||||
max_length=8,
|
||||
description="Message signature (8 hex chars)",
|
||||
)
|
||||
SNR: Optional[float] = Field(
|
||||
default=None,
|
||||
alias="snr",
|
||||
description="Signal-to-Noise Ratio in dB",
|
||||
)
|
||||
sender_timestamp: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Unix timestamp when message was sent",
|
||||
)
|
||||
|
||||
class Config:
|
||||
populate_by_name = True
|
||||
|
||||
|
||||
class ChannelMessageEvent(BaseModel):
|
||||
"""Schema for CHANNEL_MSG_RECV events."""
|
||||
|
||||
channel_idx: int = Field(
|
||||
...,
|
||||
ge=0,
|
||||
le=255,
|
||||
description="Channel number (0-255)",
|
||||
)
|
||||
text: str = Field(..., description="Message content")
|
||||
path_len: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of hops message traveled",
|
||||
)
|
||||
txt_type: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Message type indicator",
|
||||
)
|
||||
signature: Optional[str] = Field(
|
||||
default=None,
|
||||
max_length=8,
|
||||
description="Message signature (8 hex chars)",
|
||||
)
|
||||
SNR: Optional[float] = Field(
|
||||
default=None,
|
||||
alias="snr",
|
||||
description="Signal-to-Noise Ratio in dB",
|
||||
)
|
||||
sender_timestamp: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Unix timestamp when message was sent",
|
||||
)
|
||||
|
||||
class Config:
|
||||
populate_by_name = True
|
||||
|
||||
|
||||
class TraceDataEvent(BaseModel):
|
||||
"""Schema for TRACE_DATA events."""
|
||||
|
||||
initiator_tag: int = Field(
|
||||
...,
|
||||
description="Unique trace identifier",
|
||||
)
|
||||
path_len: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Length of the path",
|
||||
)
|
||||
flags: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Trace flags/options",
|
||||
)
|
||||
auth: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Authentication/validation data",
|
||||
)
|
||||
path_hashes: Optional[list[str]] = Field(
|
||||
default=None,
|
||||
description="Array of 2-character node hash identifiers",
|
||||
)
|
||||
snr_values: Optional[list[float]] = Field(
|
||||
default=None,
|
||||
description="Array of SNR values per hop",
|
||||
)
|
||||
hop_count: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of hops",
|
||||
)
|
||||
|
||||
|
||||
class TelemetryResponseEvent(BaseModel):
|
||||
"""Schema for TELEMETRY_RESPONSE events."""
|
||||
|
||||
node_public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Full public key of reporting node",
|
||||
)
|
||||
lpp_data: Optional[bytes] = Field(
|
||||
default=None,
|
||||
description="Raw LPP-encoded sensor data",
|
||||
)
|
||||
parsed_data: Optional[dict[str, Any]] = Field(
|
||||
default=None,
|
||||
description="Decoded sensor readings",
|
||||
)
|
||||
|
||||
|
||||
class ContactInfo(BaseModel):
|
||||
"""Schema for a single contact in CONTACTS event."""
|
||||
|
||||
public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Node's full public key",
|
||||
)
|
||||
name: Optional[str] = Field(
|
||||
default=None,
|
||||
max_length=255,
|
||||
description="Node name/alias",
|
||||
)
|
||||
node_type: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Node type: chat, repeater, room, none",
|
||||
)
|
||||
|
||||
|
||||
class ContactsEvent(BaseModel):
|
||||
"""Schema for CONTACTS sync events."""
|
||||
|
||||
contacts: list[ContactInfo] = Field(
|
||||
...,
|
||||
description="Array of contact objects",
|
||||
)
|
||||
|
||||
|
||||
class SendConfirmedEvent(BaseModel):
|
||||
"""Schema for SEND_CONFIRMED events."""
|
||||
|
||||
destination_public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Recipient's full public key",
|
||||
)
|
||||
round_trip_ms: int = Field(
|
||||
...,
|
||||
description="Round-trip time in milliseconds",
|
||||
)
|
||||
|
||||
|
||||
class StatusResponseEvent(BaseModel):
|
||||
"""Schema for STATUS_RESPONSE events."""
|
||||
|
||||
node_public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Node's full public key",
|
||||
)
|
||||
status: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Status description",
|
||||
)
|
||||
uptime: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Uptime in seconds",
|
||||
)
|
||||
message_count: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total messages processed",
|
||||
)
|
||||
|
||||
|
||||
class BatteryEvent(BaseModel):
|
||||
"""Schema for BATTERY events."""
|
||||
|
||||
battery_voltage: float = Field(
|
||||
...,
|
||||
description="Battery voltage (e.g., 3.7V)",
|
||||
)
|
||||
battery_percentage: int = Field(
|
||||
...,
|
||||
ge=0,
|
||||
le=100,
|
||||
description="Battery level 0-100%",
|
||||
)
|
||||
|
||||
|
||||
class PathUpdatedEvent(BaseModel):
|
||||
"""Schema for PATH_UPDATED events."""
|
||||
|
||||
node_public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Target node's full public key",
|
||||
)
|
||||
hop_count: int = Field(
|
||||
...,
|
||||
description="Number of hops in new path",
|
||||
)
|
||||
|
||||
|
||||
class WebhookPayload(BaseModel):
|
||||
"""Schema for webhook payload envelope."""
|
||||
|
||||
event_type: str = Field(..., description="Event type name")
|
||||
timestamp: datetime = Field(..., description="Event timestamp (ISO 8601)")
|
||||
data: dict[str, Any] = Field(..., description="Event-specific payload")
|
||||
189
src/meshcore_hub/common/schemas/messages.py
Normal file
189
src/meshcore_hub/common/schemas/messages.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""Pydantic schemas for message API endpoints."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class MessageRead(BaseModel):
|
||||
"""Schema for reading a message."""
|
||||
|
||||
id: str = Field(..., description="Message UUID")
|
||||
receiver_node_id: Optional[str] = Field(
|
||||
default=None, description="Receiving interface node UUID"
|
||||
)
|
||||
message_type: str = Field(..., description="Message type (contact, channel)")
|
||||
pubkey_prefix: Optional[str] = Field(
|
||||
default=None, description="Sender's public key prefix (12 chars)"
|
||||
)
|
||||
channel_idx: Optional[int] = Field(
|
||||
default=None, description="Channel index"
|
||||
)
|
||||
text: str = Field(..., description="Message content")
|
||||
path_len: Optional[int] = Field(default=None, description="Number of hops")
|
||||
txt_type: Optional[int] = Field(
|
||||
default=None, description="Message type indicator"
|
||||
)
|
||||
signature: Optional[str] = Field(
|
||||
default=None, description="Message signature"
|
||||
)
|
||||
snr: Optional[float] = Field(
|
||||
default=None, description="Signal-to-noise ratio"
|
||||
)
|
||||
sender_timestamp: Optional[datetime] = Field(
|
||||
default=None, description="Sender's timestamp"
|
||||
)
|
||||
received_at: datetime = Field(..., description="When received by interface")
|
||||
created_at: datetime = Field(..., description="Record creation timestamp")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class MessageList(BaseModel):
|
||||
"""Schema for paginated message list response."""
|
||||
|
||||
items: list[MessageRead] = Field(..., description="List of messages")
|
||||
total: int = Field(..., description="Total number of messages")
|
||||
limit: int = Field(..., description="Page size limit")
|
||||
offset: int = Field(..., description="Page offset")
|
||||
|
||||
|
||||
class MessageFilters(BaseModel):
|
||||
"""Schema for message query filters."""
|
||||
|
||||
type: Optional[Literal["contact", "channel"]] = Field(
|
||||
default=None,
|
||||
description="Filter by message type",
|
||||
)
|
||||
pubkey_prefix: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Filter by sender public key prefix",
|
||||
)
|
||||
channel_idx: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Filter by channel index",
|
||||
)
|
||||
since: Optional[datetime] = Field(
|
||||
default=None,
|
||||
description="Start timestamp filter",
|
||||
)
|
||||
until: Optional[datetime] = Field(
|
||||
default=None,
|
||||
description="End timestamp filter",
|
||||
)
|
||||
search: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Search in message text",
|
||||
)
|
||||
limit: int = Field(default=50, ge=1, le=100, description="Page size limit")
|
||||
offset: int = Field(default=0, ge=0, description="Page offset")
|
||||
|
||||
|
||||
class AdvertisementRead(BaseModel):
|
||||
"""Schema for reading an advertisement."""
|
||||
|
||||
id: str = Field(..., description="Advertisement UUID")
|
||||
receiver_node_id: Optional[str] = Field(
|
||||
default=None, description="Receiving interface node UUID"
|
||||
)
|
||||
node_id: Optional[str] = Field(
|
||||
default=None, description="Advertised node UUID"
|
||||
)
|
||||
public_key: str = Field(..., description="Advertised public key")
|
||||
name: Optional[str] = Field(default=None, description="Advertised name")
|
||||
adv_type: Optional[str] = Field(default=None, description="Node type")
|
||||
flags: Optional[int] = Field(default=None, description="Capability flags")
|
||||
received_at: datetime = Field(..., description="When received")
|
||||
created_at: datetime = Field(..., description="Record creation timestamp")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class AdvertisementList(BaseModel):
|
||||
"""Schema for paginated advertisement list response."""
|
||||
|
||||
items: list[AdvertisementRead] = Field(..., description="List of advertisements")
|
||||
total: int = Field(..., description="Total number of advertisements")
|
||||
limit: int = Field(..., description="Page size limit")
|
||||
offset: int = Field(..., description="Page offset")
|
||||
|
||||
|
||||
class TracePathRead(BaseModel):
|
||||
"""Schema for reading a trace path."""
|
||||
|
||||
id: str = Field(..., description="Trace path UUID")
|
||||
receiver_node_id: Optional[str] = Field(
|
||||
default=None, description="Receiving interface node UUID"
|
||||
)
|
||||
initiator_tag: int = Field(..., description="Trace identifier")
|
||||
path_len: Optional[int] = Field(default=None, description="Path length")
|
||||
flags: Optional[int] = Field(default=None, description="Trace flags")
|
||||
auth: Optional[int] = Field(default=None, description="Auth data")
|
||||
path_hashes: Optional[list[str]] = Field(
|
||||
default=None, description="Node hash identifiers"
|
||||
)
|
||||
snr_values: Optional[list[float]] = Field(
|
||||
default=None, description="SNR values per hop"
|
||||
)
|
||||
hop_count: Optional[int] = Field(default=None, description="Total hops")
|
||||
received_at: datetime = Field(..., description="When received")
|
||||
created_at: datetime = Field(..., description="Record creation timestamp")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TracePathList(BaseModel):
|
||||
"""Schema for paginated trace path list response."""
|
||||
|
||||
items: list[TracePathRead] = Field(..., description="List of trace paths")
|
||||
total: int = Field(..., description="Total number of trace paths")
|
||||
limit: int = Field(..., description="Page size limit")
|
||||
offset: int = Field(..., description="Page offset")
|
||||
|
||||
|
||||
class TelemetryRead(BaseModel):
|
||||
"""Schema for reading a telemetry record."""
|
||||
|
||||
id: str = Field(..., description="Telemetry UUID")
|
||||
receiver_node_id: Optional[str] = Field(
|
||||
default=None, description="Receiving interface node UUID"
|
||||
)
|
||||
node_id: Optional[str] = Field(
|
||||
default=None, description="Reporting node UUID"
|
||||
)
|
||||
node_public_key: str = Field(..., description="Reporting node public key")
|
||||
parsed_data: Optional[dict] = Field(
|
||||
default=None, description="Decoded sensor readings"
|
||||
)
|
||||
received_at: datetime = Field(..., description="When received")
|
||||
created_at: datetime = Field(..., description="Record creation timestamp")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TelemetryList(BaseModel):
|
||||
"""Schema for paginated telemetry list response."""
|
||||
|
||||
items: list[TelemetryRead] = Field(..., description="List of telemetry records")
|
||||
total: int = Field(..., description="Total number of records")
|
||||
limit: int = Field(..., description="Page size limit")
|
||||
offset: int = Field(..., description="Page offset")
|
||||
|
||||
|
||||
class DashboardStats(BaseModel):
|
||||
"""Schema for dashboard statistics."""
|
||||
|
||||
total_nodes: int = Field(..., description="Total number of nodes")
|
||||
active_nodes: int = Field(..., description="Nodes active in last 24h")
|
||||
total_messages: int = Field(..., description="Total number of messages")
|
||||
messages_today: int = Field(..., description="Messages received today")
|
||||
total_advertisements: int = Field(..., description="Total advertisements")
|
||||
channel_message_counts: dict[int, int] = Field(
|
||||
default_factory=dict,
|
||||
description="Message count per channel",
|
||||
)
|
||||
101
src/meshcore_hub/common/schemas/nodes.py
Normal file
101
src/meshcore_hub/common/schemas/nodes.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Pydantic schemas for node API endpoints."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class NodeTagCreate(BaseModel):
|
||||
"""Schema for creating a node tag."""
|
||||
|
||||
key: str = Field(
|
||||
...,
|
||||
min_length=1,
|
||||
max_length=100,
|
||||
description="Tag name/key",
|
||||
)
|
||||
value: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Tag value",
|
||||
)
|
||||
value_type: Literal["string", "number", "boolean", "coordinate"] = Field(
|
||||
default="string",
|
||||
description="Value type hint",
|
||||
)
|
||||
|
||||
|
||||
class NodeTagUpdate(BaseModel):
|
||||
"""Schema for updating a node tag."""
|
||||
|
||||
value: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Tag value",
|
||||
)
|
||||
value_type: Optional[Literal["string", "number", "boolean", "coordinate"]] = Field(
|
||||
default=None,
|
||||
description="Value type hint",
|
||||
)
|
||||
|
||||
|
||||
class NodeTagRead(BaseModel):
|
||||
"""Schema for reading a node tag."""
|
||||
|
||||
id: str = Field(..., description="Tag UUID")
|
||||
node_id: str = Field(..., description="Parent node UUID")
|
||||
key: str = Field(..., description="Tag name/key")
|
||||
value: Optional[str] = Field(default=None, description="Tag value")
|
||||
value_type: str = Field(..., description="Value type hint")
|
||||
created_at: datetime = Field(..., description="Creation timestamp")
|
||||
updated_at: datetime = Field(..., description="Last update timestamp")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class NodeRead(BaseModel):
|
||||
"""Schema for reading a node."""
|
||||
|
||||
id: str = Field(..., description="Node UUID")
|
||||
public_key: str = Field(..., description="Node's 64-character hex public key")
|
||||
name: Optional[str] = Field(default=None, description="Node display name")
|
||||
adv_type: Optional[str] = Field(default=None, description="Advertisement type")
|
||||
flags: Optional[int] = Field(default=None, description="Capability flags")
|
||||
first_seen: datetime = Field(..., description="First advertisement timestamp")
|
||||
last_seen: datetime = Field(..., description="Last activity timestamp")
|
||||
created_at: datetime = Field(..., description="Record creation timestamp")
|
||||
updated_at: datetime = Field(..., description="Record update timestamp")
|
||||
tags: list[NodeTagRead] = Field(
|
||||
default_factory=list, description="Node tags"
|
||||
)
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class NodeList(BaseModel):
|
||||
"""Schema for paginated node list response."""
|
||||
|
||||
items: list[NodeRead] = Field(..., description="List of nodes")
|
||||
total: int = Field(..., description="Total number of nodes")
|
||||
limit: int = Field(..., description="Page size limit")
|
||||
offset: int = Field(..., description="Page offset")
|
||||
|
||||
|
||||
class NodeFilters(BaseModel):
|
||||
"""Schema for node query filters."""
|
||||
|
||||
search: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Search in name or public key",
|
||||
)
|
||||
adv_type: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Filter by advertisement type",
|
||||
)
|
||||
has_tag: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Filter by tag key",
|
||||
)
|
||||
limit: int = Field(default=50, ge=1, le=100, description="Page size limit")
|
||||
offset: int = Field(default=0, ge=0, description="Page offset")
|
||||
1
src/meshcore_hub/interface/__init__.py
Normal file
1
src/meshcore_hub/interface/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Interface component for MeshCore device communication."""
|
||||
0
src/meshcore_hub/py.typed
Normal file
0
src/meshcore_hub/py.typed
Normal file
1
src/meshcore_hub/web/__init__.py
Normal file
1
src/meshcore_hub/web/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Web dashboard component for visualizing MeshCore network."""
|
||||
1
src/meshcore_hub/web/routes/__init__.py
Normal file
1
src/meshcore_hub/web/routes/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Web dashboard route handlers."""
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""MeshCore Hub test suite."""
|
||||
29
tests/conftest.py
Normal file
29
tests/conftest.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Shared pytest fixtures for all tests."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from meshcore_hub.common.models import Base
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_engine():
|
||||
"""Create an in-memory SQLite database engine for testing."""
|
||||
engine = create_engine(
|
||||
"sqlite:///:memory:",
|
||||
connect_args={"check_same_thread": False},
|
||||
)
|
||||
Base.metadata.create_all(engine)
|
||||
yield engine
|
||||
Base.metadata.drop_all(engine)
|
||||
engine.dispose()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_session(db_engine):
|
||||
"""Create a database session for testing."""
|
||||
Session = sessionmaker(bind=db_engine)
|
||||
session = Session()
|
||||
yield session
|
||||
session.close()
|
||||
1
tests/test_api/__init__.py
Normal file
1
tests/test_api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API component tests."""
|
||||
1
tests/test_collector/__init__.py
Normal file
1
tests/test_collector/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Collector component tests."""
|
||||
1
tests/test_collector/test_handlers/__init__.py
Normal file
1
tests/test_collector/test_handlers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Event handler tests."""
|
||||
1
tests/test_common/__init__.py
Normal file
1
tests/test_common/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Common package tests."""
|
||||
84
tests/test_common/test_config.py
Normal file
84
tests/test_common/test_config.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Tests for configuration settings."""
|
||||
|
||||
import pytest
|
||||
|
||||
from meshcore_hub.common.config import (
|
||||
CommonSettings,
|
||||
InterfaceSettings,
|
||||
CollectorSettings,
|
||||
APISettings,
|
||||
WebSettings,
|
||||
LogLevel,
|
||||
InterfaceMode,
|
||||
)
|
||||
|
||||
|
||||
class TestCommonSettings:
|
||||
"""Tests for CommonSettings."""
|
||||
|
||||
def test_default_values(self) -> None:
|
||||
"""Test default setting values."""
|
||||
settings = CommonSettings()
|
||||
|
||||
assert settings.log_level == LogLevel.INFO
|
||||
assert settings.mqtt_host == "localhost"
|
||||
assert settings.mqtt_port == 1883
|
||||
assert settings.mqtt_username is None
|
||||
assert settings.mqtt_password is None
|
||||
assert settings.mqtt_prefix == "meshcore"
|
||||
|
||||
|
||||
class TestInterfaceSettings:
|
||||
"""Tests for InterfaceSettings."""
|
||||
|
||||
def test_default_values(self) -> None:
|
||||
"""Test default setting values."""
|
||||
settings = InterfaceSettings()
|
||||
|
||||
assert settings.interface_mode == InterfaceMode.RECEIVER
|
||||
assert settings.serial_port == "/dev/ttyUSB0"
|
||||
assert settings.serial_baud == 115200
|
||||
assert settings.mock_device is False
|
||||
|
||||
|
||||
class TestCollectorSettings:
|
||||
"""Tests for CollectorSettings."""
|
||||
|
||||
def test_default_values(self) -> None:
|
||||
"""Test default setting values."""
|
||||
settings = CollectorSettings()
|
||||
|
||||
assert settings.database_url == "sqlite:///./meshcore.db"
|
||||
|
||||
def test_database_url_validation(self) -> None:
|
||||
"""Test database URL validation."""
|
||||
with pytest.raises(ValueError):
|
||||
CollectorSettings(database_url="")
|
||||
|
||||
|
||||
class TestAPISettings:
|
||||
"""Tests for APISettings."""
|
||||
|
||||
def test_default_values(self) -> None:
|
||||
"""Test default setting values."""
|
||||
settings = APISettings()
|
||||
|
||||
assert settings.api_host == "0.0.0.0"
|
||||
assert settings.api_port == 8000
|
||||
assert settings.database_url == "sqlite:///./meshcore.db"
|
||||
assert settings.api_read_key is None
|
||||
assert settings.api_admin_key is None
|
||||
|
||||
|
||||
class TestWebSettings:
|
||||
"""Tests for WebSettings."""
|
||||
|
||||
def test_default_values(self) -> None:
|
||||
"""Test default setting values."""
|
||||
settings = WebSettings()
|
||||
|
||||
assert settings.web_host == "0.0.0.0"
|
||||
assert settings.web_port == 8080
|
||||
assert settings.api_base_url == "http://localhost:8000"
|
||||
assert settings.network_name == "MeshCore Network"
|
||||
assert settings.members_file == "members.json"
|
||||
178
tests/test_common/test_models.py
Normal file
178
tests/test_common/test_models.py
Normal file
@@ -0,0 +1,178 @@
|
||||
"""Tests for database models."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from meshcore_hub.common.models import (
|
||||
Base,
|
||||
Node,
|
||||
NodeTag,
|
||||
Message,
|
||||
Advertisement,
|
||||
TracePath,
|
||||
Telemetry,
|
||||
EventLog,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_session():
|
||||
"""Create an in-memory SQLite database session."""
|
||||
engine = create_engine(
|
||||
"sqlite:///:memory:",
|
||||
connect_args={"check_same_thread": False},
|
||||
)
|
||||
Base.metadata.create_all(engine)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
yield session
|
||||
session.close()
|
||||
Base.metadata.drop_all(engine)
|
||||
engine.dispose()
|
||||
|
||||
|
||||
class TestNodeModel:
|
||||
"""Tests for Node model."""
|
||||
|
||||
def test_create_node(self, db_session) -> None:
|
||||
"""Test creating a node."""
|
||||
node = Node(
|
||||
public_key="a" * 64,
|
||||
name="Test Node",
|
||||
adv_type="chat",
|
||||
flags=218,
|
||||
)
|
||||
db_session.add(node)
|
||||
db_session.commit()
|
||||
|
||||
assert node.id is not None
|
||||
assert node.public_key == "a" * 64
|
||||
assert node.name == "Test Node"
|
||||
assert node.adv_type == "chat"
|
||||
assert node.flags == 218
|
||||
|
||||
def test_node_tags_relationship(self, db_session) -> None:
|
||||
"""Test node-tag relationship."""
|
||||
node = Node(public_key="b" * 64, name="Tagged Node")
|
||||
tag = NodeTag(key="location", value="51.5,-0.1", value_type="coordinate")
|
||||
node.tags.append(tag)
|
||||
|
||||
db_session.add(node)
|
||||
db_session.commit()
|
||||
|
||||
assert len(node.tags) == 1
|
||||
assert node.tags[0].key == "location"
|
||||
|
||||
|
||||
class TestMessageModel:
|
||||
"""Tests for Message model."""
|
||||
|
||||
def test_create_contact_message(self, db_session) -> None:
|
||||
"""Test creating a contact message."""
|
||||
message = Message(
|
||||
message_type="contact",
|
||||
pubkey_prefix="01ab2186c4d5",
|
||||
text="Hello World!",
|
||||
path_len=3,
|
||||
snr=15.5,
|
||||
)
|
||||
db_session.add(message)
|
||||
db_session.commit()
|
||||
|
||||
assert message.id is not None
|
||||
assert message.message_type == "contact"
|
||||
assert message.text == "Hello World!"
|
||||
|
||||
def test_create_channel_message(self, db_session) -> None:
|
||||
"""Test creating a channel message."""
|
||||
message = Message(
|
||||
message_type="channel",
|
||||
channel_idx=4,
|
||||
text="Channel broadcast",
|
||||
path_len=10,
|
||||
)
|
||||
db_session.add(message)
|
||||
db_session.commit()
|
||||
|
||||
assert message.channel_idx == 4
|
||||
assert message.message_type == "channel"
|
||||
|
||||
|
||||
class TestAdvertisementModel:
|
||||
"""Tests for Advertisement model."""
|
||||
|
||||
def test_create_advertisement(self, db_session) -> None:
|
||||
"""Test creating an advertisement."""
|
||||
ad = Advertisement(
|
||||
public_key="c" * 64,
|
||||
name="Repeater-01",
|
||||
adv_type="repeater",
|
||||
flags=128,
|
||||
)
|
||||
db_session.add(ad)
|
||||
db_session.commit()
|
||||
|
||||
assert ad.id is not None
|
||||
assert ad.public_key == "c" * 64
|
||||
assert ad.adv_type == "repeater"
|
||||
|
||||
|
||||
class TestTracePathModel:
|
||||
"""Tests for TracePath model."""
|
||||
|
||||
def test_create_trace_path(self, db_session) -> None:
|
||||
"""Test creating a trace path."""
|
||||
trace = TracePath(
|
||||
initiator_tag=123456789,
|
||||
path_len=3,
|
||||
path_hashes=["4a", "b3", "fa"],
|
||||
snr_values=[25.3, 18.7, 12.4],
|
||||
hop_count=3,
|
||||
)
|
||||
db_session.add(trace)
|
||||
db_session.commit()
|
||||
|
||||
assert trace.id is not None
|
||||
assert trace.initiator_tag == 123456789
|
||||
assert trace.path_hashes == ["4a", "b3", "fa"]
|
||||
|
||||
|
||||
class TestTelemetryModel:
|
||||
"""Tests for Telemetry model."""
|
||||
|
||||
def test_create_telemetry(self, db_session) -> None:
|
||||
"""Test creating a telemetry record."""
|
||||
telemetry = Telemetry(
|
||||
node_public_key="d" * 64,
|
||||
parsed_data={
|
||||
"temperature": 22.5,
|
||||
"humidity": 65,
|
||||
"battery": 3.8,
|
||||
},
|
||||
)
|
||||
db_session.add(telemetry)
|
||||
db_session.commit()
|
||||
|
||||
assert telemetry.id is not None
|
||||
assert telemetry.parsed_data["temperature"] == 22.5
|
||||
|
||||
|
||||
class TestEventLogModel:
|
||||
"""Tests for EventLog model."""
|
||||
|
||||
def test_create_event_log(self, db_session) -> None:
|
||||
"""Test creating an event log entry."""
|
||||
event = EventLog(
|
||||
event_type="BATTERY",
|
||||
payload={
|
||||
"battery_voltage": 3.8,
|
||||
"battery_percentage": 75,
|
||||
},
|
||||
)
|
||||
db_session.add(event)
|
||||
db_session.commit()
|
||||
|
||||
assert event.id is not None
|
||||
assert event.event_type == "BATTERY"
|
||||
assert event.payload["battery_percentage"] == 75
|
||||
1
tests/test_interface/__init__.py
Normal file
1
tests/test_interface/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Interface component tests."""
|
||||
1
tests/test_web/__init__.py
Normal file
1
tests/test_web/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Web dashboard component tests."""
|
||||
Reference in New Issue
Block a user