forked from iarv/meshcore-hub
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 89ceee8741 | |||
| 64ec1a7135 | |||
| 3d632a94b1 | |||
| fbd29ff78e | |||
| 86bff07f7d | |||
| 3abd5ce3ea | |||
| 0bf2086f16 | |||
| 40dc6647e9 | |||
| f4e95a254e | |||
| ba43be9e62 | |||
| 5b22ab29cf | |||
| 278d102064 | |||
| f0cee14bd8 | |||
| 5ff8d16bcb | |||
| e8a60d4869 | |||
| 84b8614e29 | |||
| 3bc47a33bc | |||
| 3ae8ecbd70 | |||
| 38164380af | |||
| dc3c771c76 | |||
| deb307c6ae | |||
| b8c8284643 | |||
| d310a119ed | |||
| 2b307679c9 | |||
| 6f7521951f | |||
| ab498292b2 |
@@ -52,6 +52,11 @@ MQTT_USERNAME=
|
||||
MQTT_PASSWORD=
|
||||
MQTT_PREFIX=meshcore
|
||||
|
||||
# Enable TLS/SSL for MQTT connection (default: false)
|
||||
# When enabled, uses TLS with system CA certificates (e.g., for Let's Encrypt)
|
||||
# Set to true for secure MQTT connections (port 8883)
|
||||
MQTT_TLS=false
|
||||
|
||||
# External port mappings for local MQTT broker (--profile mqtt only)
|
||||
MQTT_EXTERNAL_PORT=1883
|
||||
MQTT_WS_PORT=9001
|
||||
@@ -69,6 +74,10 @@ SERIAL_PORT_SENDER=/dev/ttyUSB1
|
||||
# Baud rate for serial communication
|
||||
SERIAL_BAUD=115200
|
||||
|
||||
# Optional device/node name to set on startup
|
||||
# This name is broadcast to the mesh network in advertisements
|
||||
MESHCORE_DEVICE_NAME=
|
||||
|
||||
# Optional node address override (64-char hex string)
|
||||
# Only set if you need to override the device's public key
|
||||
NODE_ADDRESS=
|
||||
@@ -137,3 +146,35 @@ WEBHOOK_MESSAGE_SECRET=
|
||||
WEBHOOK_TIMEOUT=10.0
|
||||
WEBHOOK_MAX_RETRIES=3
|
||||
WEBHOOK_RETRY_BACKOFF=2.0
|
||||
|
||||
# ===================
|
||||
# Data Retention Settings
|
||||
# ===================
|
||||
|
||||
# Enable automatic cleanup of old event data
|
||||
# When enabled, the collector runs periodic cleanup to delete old events
|
||||
# Default: true
|
||||
DATA_RETENTION_ENABLED=true
|
||||
|
||||
# Number of days to retain event data (advertisements, messages, telemetry, etc.)
|
||||
# Events older than this are deleted during cleanup
|
||||
# Default: 30 days
|
||||
DATA_RETENTION_DAYS=30
|
||||
|
||||
# Hours between automatic cleanup runs (applies to both events and nodes)
|
||||
# Default: 24 hours (once per day)
|
||||
DATA_RETENTION_INTERVAL_HOURS=24
|
||||
|
||||
# ===================
|
||||
# Node Cleanup Settings
|
||||
# ===================
|
||||
|
||||
# Enable automatic cleanup of inactive nodes
|
||||
# Nodes that haven't been seen (last_seen) for the specified period are removed
|
||||
# Nodes with last_seen=NULL (never seen on network) are NOT removed
|
||||
# Default: true
|
||||
NODE_CLEANUP_ENABLED=true
|
||||
|
||||
# Remove nodes not seen for this many days (based on last_seen field)
|
||||
# Default: 7 days
|
||||
NODE_CLEANUP_DAYS=7
|
||||
|
||||
@@ -2,9 +2,9 @@ name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, master]
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main, master]
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
|
||||
@@ -47,4 +47,3 @@ jobs:
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
# claude_args: '--allowed-tools Bash(gh pr:*)'
|
||||
|
||||
|
||||
@@ -2,11 +2,9 @@ name: Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, master]
|
||||
branches: [main]
|
||||
tags:
|
||||
- "v*"
|
||||
pull_request:
|
||||
branches: [main, master]
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
@@ -59,13 +57,13 @@ jobs:
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION=${{ startsWith(github.ref, 'refs/tags/v') && github.ref_name || format('0.0.0.dev0+g{0}', github.sha) }}
|
||||
BUILD_VERSION=${{ github.ref_name }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Test Docker image
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
docker build -t meshcore-hub-test --build-arg SETUPTOOLS_SCM_PRETEND_VERSION=0.0.0.dev0+g${{ github.sha }} -f Dockerfile .
|
||||
docker build -t meshcore-hub-test --build-arg BUILD_VERSION=${{ github.ref_name }} -f Dockerfile .
|
||||
docker run --rm meshcore-hub-test --version
|
||||
docker run --rm meshcore-hub-test --help
|
||||
|
||||
@@ -218,4 +218,3 @@ __marimo__/
|
||||
# MeshCore Hub specific
|
||||
*.db
|
||||
meshcore.db
|
||||
src/meshcore_hub/_version.py
|
||||
|
||||
@@ -264,6 +264,7 @@ meshcore-hub/
|
||||
│ ├── collector/
|
||||
│ │ ├── cli.py # Collector CLI with seed commands
|
||||
│ │ ├── subscriber.py # MQTT subscriber
|
||||
│ │ ├── cleanup.py # Data retention/cleanup service
|
||||
│ │ ├── tag_import.py # Tag import from YAML
|
||||
│ │ ├── member_import.py # Member import from YAML
|
||||
│ │ ├── handlers/ # Event handlers
|
||||
@@ -502,6 +503,48 @@ The collector supports forwarding events to external HTTP endpoints:
|
||||
| `WEBHOOK_MAX_RETRIES` | Max retries on failure (default: 3) |
|
||||
| `WEBHOOK_RETRY_BACKOFF` | Exponential backoff multiplier (default: 2.0) |
|
||||
|
||||
### Data Retention / Cleanup Configuration
|
||||
|
||||
The collector supports automatic cleanup of old event data and inactive nodes:
|
||||
|
||||
**Event Data Cleanup:**
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `DATA_RETENTION_ENABLED` | Enable automatic event data cleanup (default: true) |
|
||||
| `DATA_RETENTION_DAYS` | Days to retain event data (default: 30) |
|
||||
| `DATA_RETENTION_INTERVAL_HOURS` | Hours between cleanup runs (default: 24) |
|
||||
|
||||
When enabled, the collector automatically deletes event data older than the retention period:
|
||||
- Advertisements
|
||||
- Messages (channel and direct)
|
||||
- Telemetry
|
||||
- Trace paths
|
||||
- Event logs
|
||||
|
||||
**Node Cleanup:**
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `NODE_CLEANUP_ENABLED` | Enable automatic cleanup of inactive nodes (default: true) |
|
||||
| `NODE_CLEANUP_DAYS` | Remove nodes not seen for this many days (default: 7) |
|
||||
|
||||
When enabled, the collector automatically removes nodes where:
|
||||
- `last_seen` is older than the configured number of days
|
||||
- Nodes with `last_seen=NULL` (never seen on network) are **NOT** removed
|
||||
- Nodes created via tag import that have never been seen on the mesh are preserved
|
||||
|
||||
**Note:** Both event data and node cleanup run on the same schedule (DATA_RETENTION_INTERVAL_HOURS).
|
||||
|
||||
Manual cleanup can be triggered at any time with:
|
||||
```bash
|
||||
# Dry run to see what would be deleted
|
||||
meshcore-hub collector cleanup --retention-days 30 --dry-run
|
||||
|
||||
# Live cleanup
|
||||
meshcore-hub collector cleanup --retention-days 30
|
||||
```
|
||||
|
||||
Webhook payload structure:
|
||||
```json
|
||||
{
|
||||
@@ -581,6 +624,20 @@ On startup, the receiver performs these initialization steps:
|
||||
1. Set device clock to current Unix timestamp
|
||||
2. Send a local (non-flood) advertisement
|
||||
3. Start automatic message fetching
|
||||
4. Sync the device's contact database
|
||||
|
||||
### Contact Sync Behavior
|
||||
|
||||
The receiver syncs the device's contact database in two scenarios:
|
||||
|
||||
1. **Startup**: Initial sync when receiver starts
|
||||
2. **Advertisement Events**: Automatic sync triggered whenever an advertisement is received from the mesh
|
||||
|
||||
Since advertisements are typically received every ~20 minutes, contact sync happens automatically without manual intervention. Each contact from the device is published individually to MQTT:
|
||||
- Topic: `{prefix}/{device_public_key}/event/contact`
|
||||
- Payload: `{public_key, adv_name, type}`
|
||||
|
||||
This ensures the collector's database stays current with all nodes discovered on the mesh network.
|
||||
|
||||
## References
|
||||
|
||||
|
||||
+7
-6
@@ -21,9 +21,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
RUN python -m venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Build argument for version (set via CI or manually)
|
||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION=0.0.0+docker
|
||||
|
||||
# Copy project files
|
||||
WORKDIR /app
|
||||
COPY pyproject.toml README.md ./
|
||||
@@ -31,9 +28,13 @@ COPY src/ ./src/
|
||||
COPY alembic/ ./alembic/
|
||||
COPY alembic.ini ./
|
||||
|
||||
# Install the package with version from build arg
|
||||
RUN pip install --upgrade pip && \
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION} pip install .
|
||||
# Build argument for version (set via CI or manually)
|
||||
ARG BUILD_VERSION=dev
|
||||
|
||||
# Set version in _version.py and install the package
|
||||
RUN sed -i "s|__version__ = \"dev\"|__version__ = \"${BUILD_VERSION}\"|" src/meshcore_hub/_version.py && \
|
||||
pip install --upgrade pip && \
|
||||
pip install .
|
||||
|
||||
# =============================================================================
|
||||
# Stage 2: Runtime - Final production image
|
||||
|
||||
@@ -481,6 +481,7 @@ ${DATA_HOME}/
|
||||
| INTERFACE_MODE | RECEIVER | RECEIVER or SENDER |
|
||||
| SERIAL_PORT | /dev/ttyUSB0 | Serial port path |
|
||||
| SERIAL_BAUD | 115200 | Baud rate |
|
||||
| MESHCORE_DEVICE_NAME | *(none)* | Device/node name set on startup |
|
||||
| MOCK_DEVICE | false | Use mock device |
|
||||
|
||||
### Collector
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
Python 3.11+ platform for managing and orchestrating MeshCore mesh networks.
|
||||
|
||||

|
||||
|
||||
## Overview
|
||||
|
||||
MeshCore Hub provides a complete solution for monitoring, collecting, and interacting with MeshCore mesh networks. It consists of multiple components that work together:
|
||||
@@ -235,6 +237,57 @@ meshcore-hub api
|
||||
meshcore-hub web
|
||||
```
|
||||
|
||||
## Updating an Existing Installation
|
||||
|
||||
To update MeshCore Hub to the latest version:
|
||||
|
||||
```bash
|
||||
# Navigate to your installation directory
|
||||
cd meshcore-hub
|
||||
|
||||
# Pull the latest code
|
||||
git pull
|
||||
|
||||
# Pull latest Docker images
|
||||
docker compose --profile all pull
|
||||
|
||||
# Recreate and restart services
|
||||
# For receiver/sender only installs:
|
||||
docker compose --profile receiver up -d --force-recreate
|
||||
|
||||
# For core services with MQTT:
|
||||
docker compose --profile mqtt --profile core up -d --force-recreate
|
||||
|
||||
# For core services without local MQTT:
|
||||
docker compose --profile core up -d --force-recreate
|
||||
|
||||
# For complete stack (all services):
|
||||
docker compose --profile mqtt --profile core --profile receiver up -d --force-recreate
|
||||
|
||||
# View logs to verify update
|
||||
docker compose logs -f
|
||||
```
|
||||
|
||||
**Note:** Database migrations run automatically on collector startup, so no manual migration step is needed when using Docker.
|
||||
|
||||
For manual installations:
|
||||
|
||||
```bash
|
||||
# Pull latest code
|
||||
git pull
|
||||
|
||||
# Activate virtual environment
|
||||
source .venv/bin/activate
|
||||
|
||||
# Update dependencies
|
||||
pip install -e ".[dev]"
|
||||
|
||||
# Run database migrations
|
||||
meshcore-hub db upgrade
|
||||
|
||||
# Restart your services
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
All components are configured via environment variables. Create a `.env` file or export variables:
|
||||
@@ -255,6 +308,7 @@ All components are configured via environment variables. Create a `.env` file or
|
||||
| `INTERFACE_MODE` | `RECEIVER` | Operating mode (RECEIVER or SENDER) |
|
||||
| `SERIAL_PORT` | `/dev/ttyUSB0` | Serial port for MeshCore device |
|
||||
| `SERIAL_BAUD` | `115200` | Serial baud rate |
|
||||
| `MESHCORE_DEVICE_NAME` | *(none)* | Device/node name set on startup (broadcast in advertisements) |
|
||||
| `MOCK_DEVICE` | `false` | Use mock device for testing |
|
||||
|
||||
### Collector Settings
|
||||
@@ -266,7 +320,7 @@ All components are configured via environment variables. Create a `.env` file or
|
||||
|
||||
#### Webhook Configuration
|
||||
|
||||
The collector can forward events to external HTTP endpoints:
|
||||
The collector can forward certain events to external HTTP endpoints:
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
@@ -317,6 +371,7 @@ meshcore-hub --help
|
||||
|
||||
# Interface component
|
||||
meshcore-hub interface --mode receiver --port /dev/ttyUSB0
|
||||
meshcore-hub interface --mode receiver --device-name "Gateway Node" # Set device name
|
||||
meshcore-hub interface --mode sender --mock # Use mock device
|
||||
|
||||
# Collector component
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
# IPNet Network Members
|
||||
members:
|
||||
- name: Louis
|
||||
callsign: Louis
|
||||
role: admin
|
||||
description: IPNet Founder
|
||||
nodes:
|
||||
# ip2-rep01
|
||||
- public_key: 2337484665ced7e210007e9fd9db98ced0a24a6eab8b4cbe3a06b3a1cea33ca1
|
||||
node_role: repeater
|
||||
# ip2-rep02
|
||||
- public_key: 8cb01fff1afc099055af418ce5fc5e60384df9ff763c25dd7e6a5e0922e8df90
|
||||
node_role: repeater
|
||||
# ip2-rep03
|
||||
- public_key: 5b565df747913358e24d890b2227de9c35d09763746b6ec326c15ebbf9b8be3b
|
||||
node_role: repeater
|
||||
# ip2-sol01
|
||||
- public_key: 87eb9487a1a4351e986e55627b2d09c4da61f94d080eaf4d7129caef89886e25
|
||||
node_role: repeater
|
||||
# personal chat node
|
||||
- public_key: c6e0d85528b4b5d7f53aa7dded2b7e0b9c8f8a5c00acfaad47476ef5f3c7dc47
|
||||
node_role: chat
|
||||
- name: Mark
|
||||
callsign: Mark
|
||||
role: member
|
||||
description: IPNet Member
|
||||
nodes:
|
||||
- public_key: 22309435fbd9dd1f14870a1895dc854779f6b2af72b08542f6105d264a493ebe
|
||||
node_role: repeater
|
||||
- public_key: 9135986b83815ada92883358435cc6528c7db60cb647f9b6547739a1ce5eb1c8
|
||||
node_role: repeater
|
||||
- public_key: 2a4f89e766dfa1758e35a69962c1f6d352b206a5e3562a589155a3ebfe7fc2bb
|
||||
node_role: repeater
|
||||
- public_key: e790b73b2d6e377dd0f575c847f3ef42232f610eb9a19af57083fc4f647309ac
|
||||
node_role: repeater
|
||||
- public_key: d3c20d962f7384c111fbafad6fbc1c1dc0e5c3ce802fb3ee11020e8d8207ed3a
|
||||
node_role: repeater
|
||||
- public_key: b00ce9d218203e96d8557a4d59e06f5de59bbc4dcc4df9c870079d2cb8b5bd80
|
||||
node_role: repeater
|
||||
- name: CCZ
|
||||
callsign: CCZ
|
||||
role: member
|
||||
nodes:
|
||||
- public_key: e334ec5475789d542ed9e692fbeef7444a371fcc05adcbda1f47ba6a3191b459
|
||||
node_role: repeater
|
||||
- public_key: cc15fb33e98f2e098a543f516f770dc3061a1a6b30f79b84780663bf68ae6b53
|
||||
node_role: repeater
|
||||
- public_key: 20ed75ffc0f9777951716bb3d308d7f041fd2ad32fe2e998e600d0361e1fe2ac
|
||||
node_role: repeater
|
||||
description: IPNet Member
|
||||
- name: Walshie
|
||||
callsign: Walshie86
|
||||
role: member
|
||||
description: IPNet Member
|
||||
nodes:
|
||||
- public_key: bd7b5ac75f660675b39f368e1dbb6d1dbcefd8bd7a170e21a942954f67c8bf52
|
||||
node_role: repeater
|
||||
- public_key: 9cf300c40112ea34d0a59858270948b27ab6cd87e840de338f3ca782c17537b2
|
||||
node_role: repeater
|
||||
- name: Craig
|
||||
callsign: M7XCN
|
||||
role: member
|
||||
description: IPNet Member
|
||||
nodes:
|
||||
- public_key: 8accb6d0189ccaffb745ba54793e7fe3edd515edb45554325d957e48c1b9f3b3
|
||||
node_role: repeater
|
||||
@@ -1,239 +0,0 @@
|
||||
# IPNet Network Node Tags
|
||||
# Uses YAML primitives: numbers, booleans, and strings are auto-detected
|
||||
|
||||
# IP2 Area Nodes
|
||||
2337484665ced7e210007e9fd9db98ced0a24a6eab8b4cbe3a06b3a1cea33ca1:
|
||||
friendly_name: IP2 Repeater 1
|
||||
node_id: ip2-rep01.ipnt.uk
|
||||
member_id: louis
|
||||
area: IP2
|
||||
lat: 52.0357627
|
||||
lon: 1.132079
|
||||
location_description: Fountains Road
|
||||
hardware: Heltec V3
|
||||
antenna: Paradar 8.5dBi Omni
|
||||
elevation: 31
|
||||
role: infra
|
||||
|
||||
8cb01fff1afc099055af418ce5fc5e60384df9ff763c25dd7e6a5e0922e8df90:
|
||||
friendly_name: IP2 Repeater 2
|
||||
node_id: ip2-rep02.ipnt.uk
|
||||
member_id: louis
|
||||
area: IP2
|
||||
lat: 52.0390682
|
||||
lon: 1.1304141
|
||||
location_description: Belstead Road
|
||||
hardware: Heltec V3
|
||||
antenna: McGill 6dBi Omni
|
||||
elevation: 44
|
||||
role: infra
|
||||
|
||||
5b565df747913358e24d890b2227de9c35d09763746b6ec326c15ebbf9b8be3b:
|
||||
friendly_name: IP2 Repeater 3
|
||||
node_id: ip2-rep03.ipnt.uk
|
||||
member_id: louis
|
||||
area: IP2
|
||||
lat: 52.046356
|
||||
lon: 1.134661
|
||||
location_description: Birkfield Drive
|
||||
hardware: Heltec V3
|
||||
antenna: Paradar 8.5dBi Omni
|
||||
elevation: 52
|
||||
role: infra
|
||||
|
||||
780d0939f90b22d3bd7cbedcaf4e8d468a12c01886ab24b8cfa11eab2f5516c5:
|
||||
friendly_name: IP2 Integration 1
|
||||
node_id: ip2-int01.ipnt.uk
|
||||
member_id: louis
|
||||
area: IP2
|
||||
lat: 52.0354539
|
||||
lon: 1.1295338
|
||||
location_description: Fountains Road
|
||||
hardware: Heltec V3
|
||||
antenna: Generic 5dBi Whip
|
||||
elevation: 25
|
||||
role: infra
|
||||
|
||||
30121dc60362c633c457ffa18f49b3e1d6823402c33709f32d7df70612250b96:
|
||||
friendly_name: MeshBot
|
||||
node_id: bot.ipnt.uk
|
||||
member_id: louis
|
||||
area: IP2
|
||||
lat: 52.0354539
|
||||
lon: 1.1295338
|
||||
location_description: Fountains Road
|
||||
hardware: Heltec V3
|
||||
antenna: Generic 5dBi Whip
|
||||
elevation: 25
|
||||
role: infra
|
||||
|
||||
# IP3 Area Nodes
|
||||
9135986b83815ada92883358435cc6528c7db60cb647f9b6547739a1ce5eb1c8:
|
||||
friendly_name: IP3 Repeater 1
|
||||
node_id: ip3-rep01.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP3
|
||||
lat: 52.045803
|
||||
lon: 1.204416
|
||||
location_description: Brokehall
|
||||
hardware: Heltec V3
|
||||
antenna: Paradar 8.5dBi Omni
|
||||
elevation: 42
|
||||
role: infra
|
||||
|
||||
e334ec5475789d542ed9e692fbeef7444a371fcc05adcbda1f47ba6a3191b459:
|
||||
friendly_name: IP3 Repeater 2
|
||||
node_id: ip3-rep02.ipnt.uk
|
||||
member_id: ccz
|
||||
area: IP3
|
||||
lat: 52.03297
|
||||
lon: 1.17543
|
||||
location_description: Morland Road Allotments
|
||||
hardware: Heltec T114
|
||||
antenna: Unknown
|
||||
elevation: 39
|
||||
role: infra
|
||||
|
||||
cc15fb33e98f2e098a543f516f770dc3061a1a6b30f79b84780663bf68ae6b53:
|
||||
friendly_name: IP3 Repeater 3
|
||||
node_id: ip3-rep03.ipnt.uk
|
||||
member_id: ccz
|
||||
area: IP3
|
||||
lat: 52.04499
|
||||
lon: 1.18149
|
||||
location_description: Hatfield Road
|
||||
hardware: Heltec V3
|
||||
antenna: Unknown
|
||||
elevation: 39
|
||||
role: infra
|
||||
|
||||
22309435fbd9dd1f14870a1895dc854779f6b2af72b08542f6105d264a493ebe:
|
||||
friendly_name: IP3 Integration 1
|
||||
node_id: ip3-int01.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP3
|
||||
lat: 52.045773
|
||||
lon: 1.212808
|
||||
location_description: Brokehall
|
||||
hardware: Heltec V3
|
||||
antenna: Generic 3dBi Whip
|
||||
elevation: 37
|
||||
role: infra
|
||||
|
||||
2a4f89e766dfa1758e35a69962c1f6d352b206a5e3562a589155a3ebfe7fc2bb:
|
||||
friendly_name: IP3 Repeater 4
|
||||
node_id: ip3-rep04.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP3
|
||||
lat: 52.046383
|
||||
lon: 1.174542
|
||||
location_description: Holywells
|
||||
hardware: Sensecap Solar
|
||||
antenna: Paradar 6.5dbi Omni
|
||||
elevation: 21
|
||||
role: infra
|
||||
|
||||
e790b73b2d6e377dd0f575c847f3ef42232f610eb9a19af57083fc4f647309ac:
|
||||
friendly_name: IP3 Repeater 5
|
||||
node_id: ip3-rep05.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP3
|
||||
lat: 52.05252
|
||||
lon: 1.17034
|
||||
location_description: Back Hamlet
|
||||
hardware: Heltec T114
|
||||
antenna: Paradar 6.5dBi Omni
|
||||
elevation: 38
|
||||
role: infra
|
||||
|
||||
20ed75ffc0f9777951716bb3d308d7f041fd2ad32fe2e998e600d0361e1fe2ac:
|
||||
friendly_name: IP3 Repeater 6
|
||||
node_id: ip3-rep06.ipnt.uk
|
||||
member_id: ccz
|
||||
area: IP3
|
||||
lat: 52.04893
|
||||
lon: 1.18965
|
||||
location_description: Dover Road
|
||||
hardware: Unknown
|
||||
antenna: Generic 5dBi Whip
|
||||
elevation: 38
|
||||
role: infra
|
||||
|
||||
69fb8431e7ab307513797544fab99ce53ce24c46ec2d3a11767fe70f2ca37b23:
|
||||
friendly_name: IP3 Test Repeater 1
|
||||
node_id: ip3-tst01.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP3
|
||||
lat: 52.041869
|
||||
lon: 1.204789
|
||||
location_description: Brokehall
|
||||
hardware: Station G2
|
||||
antenna: McGill 10dBi Panel
|
||||
elevation: 37
|
||||
role: infra
|
||||
|
||||
# IP4 Area Nodes
|
||||
d3c20d962f7384c111fbafad6fbc1c1dc0e5c3ce802fb3ee11020e8d8207ed3a:
|
||||
friendly_name: IP4 Repeater 1
|
||||
node_id: ip4-rep01.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP4
|
||||
lat: 52.052445
|
||||
lon: 1.156882
|
||||
location_description: Wine Rack
|
||||
hardware: Heltec T114
|
||||
antenna: Generic 5dbi Whip
|
||||
elevation: 50
|
||||
role: infra
|
||||
|
||||
b00ce9d218203e96d8557a4d59e06f5de59bbc4dcc4df9c870079d2cb8b5bd80:
|
||||
friendly_name: IP4 Repeater 2
|
||||
node_id: ip4-rep02.ipnt.uk
|
||||
member_id: markab
|
||||
area: IP4
|
||||
lat: 52.06217
|
||||
lon: 1.18332
|
||||
location_description: Rushmere Road
|
||||
hardware: Heltec V3
|
||||
antenna: Paradar 5dbi Whip
|
||||
elevation: 35
|
||||
role: infra
|
||||
|
||||
8accb6d0189ccaffb745ba54793e7fe3edd515edb45554325d957e48c1b9f3b3:
|
||||
friendly_name: IP4 Repeater 3
|
||||
node_id: ip4-rep03.ipnt.uk
|
||||
member_id: craig
|
||||
area: IP4
|
||||
lat: 52.058
|
||||
lon: 1.165
|
||||
location_description: IP4 Area
|
||||
hardware: Heltec v3
|
||||
antenna: Generic Whip
|
||||
elevation: 30
|
||||
role: infra
|
||||
|
||||
# IP8 Area Nodes
|
||||
bd7b5ac75f660675b39f368e1dbb6d1dbcefd8bd7a170e21a942954f67c8bf52:
|
||||
friendly_name: IP8 Repeater 1
|
||||
node_id: rep01.ip8.ipnt.uk
|
||||
member_id: walshie86
|
||||
area: IP8
|
||||
lat: 52.033684
|
||||
lon: 1.118384
|
||||
location_description: Grove Hill
|
||||
hardware: Heltec V3
|
||||
antenna: McGill 3dBi Omni
|
||||
elevation: 13
|
||||
role: infra
|
||||
|
||||
9cf300c40112ea34d0a59858270948b27ab6cd87e840de338f3ca782c17537b2:
|
||||
friendly_name: IP8 Repeater 2
|
||||
node_id: rep02.ip8.ipnt.uk
|
||||
member_id: walshie86
|
||||
area: IP8
|
||||
lat: 52.035648
|
||||
lon: 1.073271
|
||||
location_description: Washbrook
|
||||
hardware: Sensecap Solar
|
||||
elevation: 13
|
||||
role: infra
|
||||
@@ -47,6 +47,7 @@ services:
|
||||
- MQTT_USERNAME=${MQTT_USERNAME:-}
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- SERIAL_PORT=${SERIAL_PORT:-/dev/ttyUSB0}
|
||||
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
|
||||
- NODE_ADDRESS=${NODE_ADDRESS:-}
|
||||
@@ -81,6 +82,7 @@ services:
|
||||
- MQTT_USERNAME=${MQTT_USERNAME:-}
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- SERIAL_PORT=${SERIAL_PORT_SENDER:-/dev/ttyUSB1}
|
||||
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
|
||||
- NODE_ADDRESS=${NODE_ADDRESS_SENDER:-}
|
||||
@@ -112,6 +114,7 @@ services:
|
||||
- MQTT_USERNAME=${MQTT_USERNAME:-}
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MOCK_DEVICE=true
|
||||
- NODE_ADDRESS=${NODE_ADDRESS:-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef}
|
||||
command: ["interface", "receiver", "--mock"]
|
||||
@@ -145,6 +148,7 @@ services:
|
||||
- MQTT_USERNAME=${MQTT_USERNAME:-}
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- DATA_HOME=/data
|
||||
- SEED_HOME=/seed
|
||||
# Explicitly unset to use DATA_HOME-based default path
|
||||
@@ -161,6 +165,12 @@ services:
|
||||
- WEBHOOK_TIMEOUT=${WEBHOOK_TIMEOUT:-10.0}
|
||||
- WEBHOOK_MAX_RETRIES=${WEBHOOK_MAX_RETRIES:-3}
|
||||
- WEBHOOK_RETRY_BACKOFF=${WEBHOOK_RETRY_BACKOFF:-2.0}
|
||||
# Data retention and cleanup configuration
|
||||
- DATA_RETENTION_ENABLED=${DATA_RETENTION_ENABLED:-true}
|
||||
- DATA_RETENTION_DAYS=${DATA_RETENTION_DAYS:-30}
|
||||
- DATA_RETENTION_INTERVAL_HOURS=${DATA_RETENTION_INTERVAL_HOURS:-24}
|
||||
- NODE_CLEANUP_ENABLED=${NODE_CLEANUP_ENABLED:-true}
|
||||
- NODE_CLEANUP_DAYS=${NODE_CLEANUP_DAYS:-7}
|
||||
command: ["collector"]
|
||||
healthcheck:
|
||||
test: ["CMD", "meshcore-hub", "health", "collector"]
|
||||
@@ -197,6 +207,7 @@ services:
|
||||
- MQTT_USERNAME=${MQTT_USERNAME:-}
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- DATA_HOME=/data
|
||||
# Explicitly unset to use DATA_HOME-based default path
|
||||
- DATABASE_URL=
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 133 KiB |
+2
-6
@@ -1,10 +1,10 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=68.0", "wheel", "setuptools-scm>=8.0"]
|
||||
requires = ["setuptools>=68.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "meshcore-hub"
|
||||
dynamic = ["version"]
|
||||
version = "0.0.0"
|
||||
description = "Python monorepo for managing and orchestrating MeshCore mesh networks"
|
||||
readme = "README.md"
|
||||
license = {text = "GPL-3.0-or-later"}
|
||||
@@ -68,10 +68,6 @@ Documentation = "https://github.com/ipnet-mesh/meshcore-hub#readme"
|
||||
Repository = "https://github.com/ipnet-mesh/meshcore-hub"
|
||||
Issues = "https://github.com/ipnet-mesh/meshcore-hub/issues"
|
||||
|
||||
[tool.setuptools_scm]
|
||||
version_file = "src/meshcore_hub/_version.py"
|
||||
fallback_version = "0.0.0+unknown"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""MeshCore Hub - Python monorepo for managing MeshCore mesh networks."""
|
||||
|
||||
from meshcore_hub._version import __version__, __version_tuple__
|
||||
from meshcore_hub._version import __version__
|
||||
|
||||
__all__ = ["__version__", "__version_tuple__"]
|
||||
__all__ = ["__version__"]
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
"""MeshCore Hub version information.
|
||||
|
||||
This file contains the version string for the package.
|
||||
It can be overridden at build time by setting BUILD_VERSION environment variable.
|
||||
"""
|
||||
|
||||
__version__ = "dev"
|
||||
__all__ = ["__version__"]
|
||||
@@ -52,6 +52,7 @@ def create_app(
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
cors_origins: list[str] | None = None,
|
||||
) -> FastAPI:
|
||||
"""Create and configure the FastAPI application.
|
||||
@@ -63,6 +64,7 @@ def create_app(
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
cors_origins: Allowed CORS origins
|
||||
|
||||
Returns:
|
||||
@@ -85,6 +87,7 @@ def create_app(
|
||||
app.state.mqtt_host = mqtt_host
|
||||
app.state.mqtt_port = mqtt_port
|
||||
app.state.mqtt_prefix = mqtt_prefix
|
||||
app.state.mqtt_tls = mqtt_tls
|
||||
|
||||
# Configure CORS
|
||||
if cors_origins is None:
|
||||
|
||||
@@ -67,6 +67,13 @@ import click
|
||||
envvar="MQTT_TOPIC_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-tls",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--cors-origins",
|
||||
type=str,
|
||||
@@ -92,6 +99,7 @@ def api(
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
mqtt_prefix: str,
|
||||
mqtt_tls: bool,
|
||||
cors_origins: str | None,
|
||||
reload: bool,
|
||||
) -> None:
|
||||
@@ -171,6 +179,7 @@ def api(
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
cors_origins=origins_list,
|
||||
)
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ def get_mqtt_client(request: Request) -> MQTTClient:
|
||||
mqtt_host = getattr(request.app.state, "mqtt_host", "localhost")
|
||||
mqtt_port = getattr(request.app.state, "mqtt_port", 1883)
|
||||
mqtt_prefix = getattr(request.app.state, "mqtt_prefix", "meshcore")
|
||||
mqtt_tls = getattr(request.app.state, "mqtt_tls", False)
|
||||
|
||||
# Use unique client ID to allow multiple API instances
|
||||
unique_id = uuid.uuid4().hex[:8]
|
||||
@@ -65,6 +66,7 @@ def get_mqtt_client(request: Request) -> MQTTClient:
|
||||
port=mqtt_port,
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-api-{unique_id}",
|
||||
tls=mqtt_tls,
|
||||
)
|
||||
|
||||
client = MQTTClient(config)
|
||||
|
||||
@@ -19,12 +19,12 @@ from meshcore_hub.common.schemas.messages import (
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _get_friendly_name(node: Optional[Node]) -> Optional[str]:
|
||||
"""Extract friendly_name tag from a node's tags."""
|
||||
def _get_tag_name(node: Optional[Node]) -> Optional[str]:
|
||||
"""Extract name tag from a node's tags."""
|
||||
if not node or not node.tags:
|
||||
return None
|
||||
for tag in node.tags:
|
||||
if tag.key == "friendly_name":
|
||||
if tag.key == "name":
|
||||
return tag.value
|
||||
return None
|
||||
|
||||
@@ -57,15 +57,15 @@ def _fetch_receivers_for_events(
|
||||
receivers_by_hash: dict[str, list[ReceiverInfo]] = {}
|
||||
|
||||
node_ids = [r.node_id for r in results]
|
||||
friendly_names: dict[str, str] = {}
|
||||
tag_names: dict[str, str] = {}
|
||||
if node_ids:
|
||||
fn_query = (
|
||||
tag_query = (
|
||||
select(NodeTag.node_id, NodeTag.value)
|
||||
.where(NodeTag.node_id.in_(node_ids))
|
||||
.where(NodeTag.key == "friendly_name")
|
||||
.where(NodeTag.key == "name")
|
||||
)
|
||||
for node_id, value in session.execute(fn_query).all():
|
||||
friendly_names[node_id] = value
|
||||
for node_id, value in session.execute(tag_query).all():
|
||||
tag_names[node_id] = value
|
||||
|
||||
for row in results:
|
||||
if row.event_hash not in receivers_by_hash:
|
||||
@@ -76,7 +76,7 @@ def _fetch_receivers_for_events(
|
||||
node_id=row.node_id,
|
||||
public_key=row.public_key,
|
||||
name=row.name,
|
||||
friendly_name=friendly_names.get(row.node_id),
|
||||
tag_name=tag_names.get(row.node_id),
|
||||
snr=row.snr,
|
||||
received_at=row.received_at,
|
||||
)
|
||||
@@ -173,11 +173,11 @@ async def list_advertisements(
|
||||
data = {
|
||||
"received_by": row.receiver_pk,
|
||||
"receiver_name": row.receiver_name,
|
||||
"receiver_friendly_name": _get_friendly_name(receiver_node),
|
||||
"receiver_tag_name": _get_tag_name(receiver_node),
|
||||
"public_key": adv.public_key,
|
||||
"name": adv.name,
|
||||
"node_name": row.source_name,
|
||||
"node_friendly_name": _get_friendly_name(source_node),
|
||||
"node_tag_name": _get_tag_name(source_node),
|
||||
"adv_type": adv.adv_type or row.source_adv_type,
|
||||
"flags": adv.flags,
|
||||
"received_at": adv.received_at,
|
||||
@@ -255,11 +255,11 @@ async def get_advertisement(
|
||||
data = {
|
||||
"received_by": result.receiver_pk,
|
||||
"receiver_name": result.receiver_name,
|
||||
"receiver_friendly_name": _get_friendly_name(receiver_node),
|
||||
"receiver_tag_name": _get_tag_name(receiver_node),
|
||||
"public_key": adv.public_key,
|
||||
"name": adv.name,
|
||||
"node_name": result.source_name,
|
||||
"node_friendly_name": _get_friendly_name(source_node),
|
||||
"node_tag_name": _get_tag_name(source_node),
|
||||
"adv_type": adv.adv_type or result.source_adv_type,
|
||||
"flags": adv.flags,
|
||||
"received_at": adv.received_at,
|
||||
|
||||
@@ -82,11 +82,11 @@ async def get_stats(
|
||||
.all()
|
||||
)
|
||||
|
||||
# Get node names, adv_types, and friendly_name tags for the advertised nodes
|
||||
# Get node names, adv_types, and name tags for the advertised nodes
|
||||
ad_public_keys = [ad.public_key for ad in recent_ads]
|
||||
node_names: dict[str, str] = {}
|
||||
node_adv_types: dict[str, str] = {}
|
||||
friendly_names: dict[str, str] = {}
|
||||
tag_names: dict[str, str] = {}
|
||||
if ad_public_keys:
|
||||
# Get node names and adv_types from Node table
|
||||
node_query = select(Node.public_key, Node.name, Node.adv_type).where(
|
||||
@@ -98,21 +98,21 @@ async def get_stats(
|
||||
if adv_type:
|
||||
node_adv_types[public_key] = adv_type
|
||||
|
||||
# Get friendly_name tags
|
||||
friendly_name_query = (
|
||||
# Get name tags
|
||||
tag_name_query = (
|
||||
select(Node.public_key, NodeTag.value)
|
||||
.join(NodeTag, Node.id == NodeTag.node_id)
|
||||
.where(Node.public_key.in_(ad_public_keys))
|
||||
.where(NodeTag.key == "friendly_name")
|
||||
.where(NodeTag.key == "name")
|
||||
)
|
||||
for public_key, value in session.execute(friendly_name_query).all():
|
||||
friendly_names[public_key] = value
|
||||
for public_key, value in session.execute(tag_name_query).all():
|
||||
tag_names[public_key] = value
|
||||
|
||||
recent_advertisements = [
|
||||
RecentAdvertisement(
|
||||
public_key=ad.public_key,
|
||||
name=ad.name or node_names.get(ad.public_key),
|
||||
friendly_name=friendly_names.get(ad.public_key),
|
||||
tag_name=tag_names.get(ad.public_key),
|
||||
adv_type=ad.adv_type or node_adv_types.get(ad.public_key),
|
||||
received_at=ad.received_at,
|
||||
)
|
||||
@@ -146,7 +146,7 @@ async def get_stats(
|
||||
# Look up sender names for these messages
|
||||
msg_prefixes = [m.pubkey_prefix for m in channel_msgs if m.pubkey_prefix]
|
||||
msg_sender_names: dict[str, str] = {}
|
||||
msg_friendly_names: dict[str, str] = {}
|
||||
msg_tag_names: dict[str, str] = {}
|
||||
if msg_prefixes:
|
||||
for prefix in set(msg_prefixes):
|
||||
sender_node_query = select(Node.public_key, Node.name).where(
|
||||
@@ -156,14 +156,14 @@ async def get_stats(
|
||||
if name:
|
||||
msg_sender_names[public_key[:12]] = name
|
||||
|
||||
sender_friendly_query = (
|
||||
sender_tag_query = (
|
||||
select(Node.public_key, NodeTag.value)
|
||||
.join(NodeTag, Node.id == NodeTag.node_id)
|
||||
.where(Node.public_key.startswith(prefix))
|
||||
.where(NodeTag.key == "friendly_name")
|
||||
.where(NodeTag.key == "name")
|
||||
)
|
||||
for public_key, value in session.execute(sender_friendly_query).all():
|
||||
msg_friendly_names[public_key[:12]] = value
|
||||
for public_key, value in session.execute(sender_tag_query).all():
|
||||
msg_tag_names[public_key[:12]] = value
|
||||
|
||||
channel_messages[int(channel_idx)] = [
|
||||
ChannelMessage(
|
||||
@@ -171,8 +171,8 @@ async def get_stats(
|
||||
sender_name=(
|
||||
msg_sender_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
|
||||
),
|
||||
sender_friendly_name=(
|
||||
msg_friendly_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
|
||||
sender_tag_name=(
|
||||
msg_tag_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
|
||||
),
|
||||
pubkey_prefix=m.pubkey_prefix,
|
||||
received_at=m.received_at,
|
||||
|
||||
@@ -41,7 +41,7 @@ def _enrich_member_nodes(
|
||||
updated_at=mn.updated_at,
|
||||
node_name=info.get("name"),
|
||||
node_adv_type=info.get("adv_type"),
|
||||
friendly_name=info.get("friendly_name"),
|
||||
tag_name=info.get("tag_name"),
|
||||
)
|
||||
)
|
||||
return enriched_nodes
|
||||
@@ -100,15 +100,15 @@ async def list_members(
|
||||
)
|
||||
nodes = session.execute(node_query).scalars().all()
|
||||
for node in nodes:
|
||||
friendly_name = None
|
||||
tag_name = None
|
||||
for tag in node.tags:
|
||||
if tag.key == "friendly_name":
|
||||
friendly_name = tag.value
|
||||
if tag.key == "name":
|
||||
tag_name = tag.value
|
||||
break
|
||||
node_info[node.public_key] = {
|
||||
"name": node.name,
|
||||
"adv_type": node.adv_type,
|
||||
"friendly_name": friendly_name,
|
||||
"tag_name": tag_name,
|
||||
}
|
||||
|
||||
return MemberList(
|
||||
@@ -145,15 +145,15 @@ async def get_member(
|
||||
)
|
||||
nodes = session.execute(node_query).scalars().all()
|
||||
for node in nodes:
|
||||
friendly_name = None
|
||||
tag_name = None
|
||||
for tag in node.tags:
|
||||
if tag.key == "friendly_name":
|
||||
friendly_name = tag.value
|
||||
if tag.key == "name":
|
||||
tag_name = tag.value
|
||||
break
|
||||
node_info[node.public_key] = {
|
||||
"name": node.name,
|
||||
"adv_type": node.adv_type,
|
||||
"friendly_name": friendly_name,
|
||||
"tag_name": tag_name,
|
||||
}
|
||||
|
||||
return _member_to_read(member, node_info)
|
||||
|
||||
@@ -15,12 +15,12 @@ from meshcore_hub.common.schemas.messages import MessageList, MessageRead, Recei
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _get_friendly_name(node: Optional[Node]) -> Optional[str]:
|
||||
"""Extract friendly_name tag from a node's tags."""
|
||||
def _get_tag_name(node: Optional[Node]) -> Optional[str]:
|
||||
"""Extract name tag from a node's tags."""
|
||||
if not node or not node.tags:
|
||||
return None
|
||||
for tag in node.tags:
|
||||
if tag.key == "friendly_name":
|
||||
if tag.key == "name":
|
||||
return tag.value
|
||||
return None
|
||||
|
||||
@@ -64,17 +64,17 @@ def _fetch_receivers_for_events(
|
||||
# Group by event_hash
|
||||
receivers_by_hash: dict[str, list[ReceiverInfo]] = {}
|
||||
|
||||
# Get friendly names for receiver nodes
|
||||
# Get tag names for receiver nodes
|
||||
node_ids = [r.node_id for r in results]
|
||||
friendly_names: dict[str, str] = {}
|
||||
tag_names: dict[str, str] = {}
|
||||
if node_ids:
|
||||
fn_query = (
|
||||
tag_query = (
|
||||
select(NodeTag.node_id, NodeTag.value)
|
||||
.where(NodeTag.node_id.in_(node_ids))
|
||||
.where(NodeTag.key == "friendly_name")
|
||||
.where(NodeTag.key == "name")
|
||||
)
|
||||
for node_id, value in session.execute(fn_query).all():
|
||||
friendly_names[node_id] = value
|
||||
for node_id, value in session.execute(tag_query).all():
|
||||
tag_names[node_id] = value
|
||||
|
||||
for row in results:
|
||||
if row.event_hash not in receivers_by_hash:
|
||||
@@ -85,7 +85,7 @@ def _fetch_receivers_for_events(
|
||||
node_id=row.node_id,
|
||||
public_key=row.public_key,
|
||||
name=row.name,
|
||||
friendly_name=friendly_names.get(row.node_id),
|
||||
tag_name=tag_names.get(row.node_id),
|
||||
snr=row.snr,
|
||||
received_at=row.received_at,
|
||||
)
|
||||
@@ -153,10 +153,10 @@ async def list_messages(
|
||||
# Execute
|
||||
results = session.execute(query).all()
|
||||
|
||||
# Look up sender names and friendly_names for senders with pubkey_prefix
|
||||
# Look up sender names and tag names for senders with pubkey_prefix
|
||||
pubkey_prefixes = [r[0].pubkey_prefix for r in results if r[0].pubkey_prefix]
|
||||
sender_names: dict[str, str] = {}
|
||||
friendly_names: dict[str, str] = {}
|
||||
sender_tag_names: dict[str, str] = {}
|
||||
if pubkey_prefixes:
|
||||
# Find nodes whose public_key starts with any of these prefixes
|
||||
for prefix in set(pubkey_prefixes):
|
||||
@@ -168,15 +168,15 @@ async def list_messages(
|
||||
if name:
|
||||
sender_names[public_key[:12]] = name
|
||||
|
||||
# Get friendly_name tag
|
||||
friendly_name_query = (
|
||||
# Get name tag
|
||||
tag_name_query = (
|
||||
select(Node.public_key, NodeTag.value)
|
||||
.join(NodeTag, Node.id == NodeTag.node_id)
|
||||
.where(Node.public_key.startswith(prefix))
|
||||
.where(NodeTag.key == "friendly_name")
|
||||
.where(NodeTag.key == "name")
|
||||
)
|
||||
for public_key, value in session.execute(friendly_name_query).all():
|
||||
friendly_names[public_key[:12]] = value
|
||||
for public_key, value in session.execute(tag_name_query).all():
|
||||
sender_tag_names[public_key[:12]] = value
|
||||
|
||||
# Collect receiver node IDs to fetch tags
|
||||
receiver_ids = set()
|
||||
@@ -214,14 +214,14 @@ async def list_messages(
|
||||
"receiver_node_id": m.receiver_node_id,
|
||||
"received_by": receiver_pk,
|
||||
"receiver_name": receiver_name,
|
||||
"receiver_friendly_name": _get_friendly_name(receiver_node),
|
||||
"receiver_tag_name": _get_tag_name(receiver_node),
|
||||
"message_type": m.message_type,
|
||||
"pubkey_prefix": m.pubkey_prefix,
|
||||
"sender_name": (
|
||||
sender_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
|
||||
),
|
||||
"sender_friendly_name": (
|
||||
friendly_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
|
||||
"sender_tag_name": (
|
||||
sender_tag_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
|
||||
),
|
||||
"channel_idx": m.channel_idx,
|
||||
"text": m.text,
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
"""Data retention and cleanup service for MeshCore Hub.
|
||||
|
||||
This module provides functionality to delete old event data and inactive nodes
|
||||
based on configured retention policies.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from sqlalchemy import delete, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from meshcore_hub.common.models import (
|
||||
Advertisement,
|
||||
EventLog,
|
||||
Message,
|
||||
Node,
|
||||
Telemetry,
|
||||
TracePath,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CleanupStats:
|
||||
"""Statistics from a cleanup operation."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.advertisements_deleted: int = 0
|
||||
self.messages_deleted: int = 0
|
||||
self.telemetry_deleted: int = 0
|
||||
self.trace_paths_deleted: int = 0
|
||||
self.event_logs_deleted: int = 0
|
||||
self.nodes_deleted: int = 0
|
||||
self.total_deleted: int = 0
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"CleanupStats(total={self.total_deleted}, "
|
||||
f"advertisements={self.advertisements_deleted}, "
|
||||
f"messages={self.messages_deleted}, "
|
||||
f"telemetry={self.telemetry_deleted}, "
|
||||
f"trace_paths={self.trace_paths_deleted}, "
|
||||
f"event_logs={self.event_logs_deleted}, "
|
||||
f"nodes={self.nodes_deleted})"
|
||||
)
|
||||
|
||||
|
||||
async def cleanup_old_data(
|
||||
db: AsyncSession,
|
||||
retention_days: int,
|
||||
dry_run: bool = False,
|
||||
) -> CleanupStats:
|
||||
"""Delete event data older than the retention period.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
retention_days: Number of days to retain data
|
||||
dry_run: If True, only count records without deleting
|
||||
|
||||
Returns:
|
||||
CleanupStats object with deletion counts
|
||||
"""
|
||||
stats = CleanupStats()
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=retention_days)
|
||||
|
||||
logger.info(
|
||||
"Starting data cleanup (dry_run=%s, retention_days=%d, cutoff=%s)",
|
||||
dry_run,
|
||||
retention_days,
|
||||
cutoff_date.isoformat(),
|
||||
)
|
||||
|
||||
# Clean up advertisements
|
||||
stats.advertisements_deleted = await _cleanup_table(
|
||||
db, Advertisement, cutoff_date, "advertisements", dry_run
|
||||
)
|
||||
|
||||
# Clean up messages
|
||||
stats.messages_deleted = await _cleanup_table(
|
||||
db, Message, cutoff_date, "messages", dry_run
|
||||
)
|
||||
|
||||
# Clean up telemetry
|
||||
stats.telemetry_deleted = await _cleanup_table(
|
||||
db, Telemetry, cutoff_date, "telemetry", dry_run
|
||||
)
|
||||
|
||||
# Clean up trace paths
|
||||
stats.trace_paths_deleted = await _cleanup_table(
|
||||
db, TracePath, cutoff_date, "trace_paths", dry_run
|
||||
)
|
||||
|
||||
# Clean up event logs
|
||||
stats.event_logs_deleted = await _cleanup_table(
|
||||
db, EventLog, cutoff_date, "event_logs", dry_run
|
||||
)
|
||||
|
||||
stats.total_deleted = (
|
||||
stats.advertisements_deleted
|
||||
+ stats.messages_deleted
|
||||
+ stats.telemetry_deleted
|
||||
+ stats.trace_paths_deleted
|
||||
+ stats.event_logs_deleted
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
await db.commit()
|
||||
logger.info("Cleanup completed: %s", stats)
|
||||
else:
|
||||
logger.info("Cleanup dry run completed: %s", stats)
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
async def _cleanup_table(
|
||||
db: AsyncSession,
|
||||
model: type,
|
||||
cutoff_date: datetime,
|
||||
table_name: str,
|
||||
dry_run: bool,
|
||||
) -> int:
|
||||
"""Delete old records from a specific table.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
model: SQLAlchemy model class
|
||||
cutoff_date: Delete records older than this date
|
||||
table_name: Name of table for logging
|
||||
dry_run: If True, only count without deleting
|
||||
|
||||
Returns:
|
||||
Number of records deleted (or would be deleted in dry_run)
|
||||
"""
|
||||
from sqlalchemy import select
|
||||
|
||||
if dry_run:
|
||||
# Count records that would be deleted
|
||||
stmt = (
|
||||
select(func.count())
|
||||
.select_from(model)
|
||||
.where(model.created_at < cutoff_date) # type: ignore[attr-defined]
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
logger.debug(
|
||||
"[DRY RUN] Would delete %d records from %s older than %s",
|
||||
count,
|
||||
table_name,
|
||||
cutoff_date.isoformat(),
|
||||
)
|
||||
return count
|
||||
else:
|
||||
# Delete old records
|
||||
result = await db.execute(delete(model).where(model.created_at < cutoff_date)) # type: ignore[attr-defined]
|
||||
count = result.rowcount or 0 # type: ignore[attr-defined]
|
||||
logger.debug(
|
||||
"Deleted %d records from %s older than %s",
|
||||
count,
|
||||
table_name,
|
||||
cutoff_date.isoformat(),
|
||||
)
|
||||
return count
|
||||
|
||||
|
||||
async def cleanup_inactive_nodes(
|
||||
db: AsyncSession,
|
||||
inactivity_days: int,
|
||||
dry_run: bool = False,
|
||||
) -> int:
|
||||
"""Delete nodes that haven't been seen for the specified number of days.
|
||||
|
||||
Only deletes nodes where last_seen is older than the cutoff date.
|
||||
Nodes with last_seen=NULL are NOT deleted (never seen on network).
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
inactivity_days: Delete nodes not seen for this many days
|
||||
dry_run: If True, only count without deleting
|
||||
|
||||
Returns:
|
||||
Number of nodes deleted (or would be deleted in dry_run)
|
||||
"""
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=inactivity_days)
|
||||
|
||||
logger.info(
|
||||
"Starting node cleanup (dry_run=%s, inactivity_days=%d, cutoff=%s)",
|
||||
dry_run,
|
||||
inactivity_days,
|
||||
cutoff_date.isoformat(),
|
||||
)
|
||||
|
||||
if dry_run:
|
||||
# Count nodes that would be deleted
|
||||
# Only count nodes with last_seen < cutoff (excludes NULL last_seen)
|
||||
stmt = (
|
||||
select(func.count())
|
||||
.select_from(Node)
|
||||
.where(Node.last_seen < cutoff_date)
|
||||
.where(Node.last_seen.isnot(None))
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
logger.info(
|
||||
"[DRY RUN] Would delete %d nodes not seen since %s",
|
||||
count,
|
||||
cutoff_date.isoformat(),
|
||||
)
|
||||
return count
|
||||
else:
|
||||
# Delete inactive nodes
|
||||
# Only delete nodes with last_seen < cutoff (excludes NULL last_seen)
|
||||
result = await db.execute(
|
||||
delete(Node)
|
||||
.where(Node.last_seen < cutoff_date)
|
||||
.where(Node.last_seen.isnot(None))
|
||||
)
|
||||
await db.commit()
|
||||
count = result.rowcount or 0 # type: ignore[attr-defined]
|
||||
logger.info(
|
||||
"Deleted %d nodes not seen since %s",
|
||||
count,
|
||||
cutoff_date.isoformat(),
|
||||
)
|
||||
return count
|
||||
@@ -47,6 +47,13 @@ if TYPE_CHECKING:
|
||||
envvar="MQTT_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-tls",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--data-home",
|
||||
type=str,
|
||||
@@ -82,6 +89,7 @@ def collector(
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
data_home: str | None,
|
||||
seed_home: str | None,
|
||||
database_url: str | None,
|
||||
@@ -125,6 +133,7 @@ def collector(
|
||||
ctx.obj["mqtt_username"] = mqtt_username
|
||||
ctx.obj["mqtt_password"] = mqtt_password
|
||||
ctx.obj["prefix"] = prefix
|
||||
ctx.obj["mqtt_tls"] = mqtt_tls
|
||||
ctx.obj["data_home"] = data_home or settings.data_home
|
||||
ctx.obj["seed_home"] = settings.effective_seed_home
|
||||
ctx.obj["database_url"] = effective_db_url
|
||||
@@ -139,6 +148,7 @@ def collector(
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
database_url=effective_db_url,
|
||||
log_level=log_level,
|
||||
data_home=data_home or settings.data_home,
|
||||
@@ -152,6 +162,7 @@ def _run_collector_service(
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
database_url: str,
|
||||
log_level: str,
|
||||
data_home: str,
|
||||
@@ -228,6 +239,26 @@ def _run_collector_service(
|
||||
|
||||
from meshcore_hub.collector.subscriber import run_collector
|
||||
|
||||
# Show cleanup configuration
|
||||
click.echo("")
|
||||
click.echo("Cleanup configuration:")
|
||||
if settings.data_retention_enabled:
|
||||
click.echo(
|
||||
f" Event data: Enabled (retention: {settings.data_retention_days} days)"
|
||||
)
|
||||
else:
|
||||
click.echo(" Event data: Disabled")
|
||||
|
||||
if settings.node_cleanup_enabled:
|
||||
click.echo(
|
||||
f" Inactive nodes: Enabled (inactivity: {settings.node_cleanup_days} days)"
|
||||
)
|
||||
else:
|
||||
click.echo(" Inactive nodes: Disabled")
|
||||
|
||||
if settings.data_retention_enabled or settings.node_cleanup_enabled:
|
||||
click.echo(f" Interval: {settings.data_retention_interval_hours} hours")
|
||||
|
||||
click.echo("")
|
||||
click.echo("Starting MQTT subscriber...")
|
||||
run_collector(
|
||||
@@ -236,8 +267,14 @@ def _run_collector_service(
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
database_url=database_url,
|
||||
webhook_dispatcher=webhook_dispatcher,
|
||||
cleanup_enabled=settings.data_retention_enabled,
|
||||
cleanup_retention_days=settings.data_retention_days,
|
||||
cleanup_interval_hours=settings.data_retention_interval_hours,
|
||||
node_cleanup_enabled=settings.node_cleanup_enabled,
|
||||
node_cleanup_days=settings.node_cleanup_days,
|
||||
)
|
||||
|
||||
|
||||
@@ -254,6 +291,7 @@ def run_cmd(ctx: click.Context) -> None:
|
||||
mqtt_username=ctx.obj["mqtt_username"],
|
||||
mqtt_password=ctx.obj["mqtt_password"],
|
||||
prefix=ctx.obj["prefix"],
|
||||
mqtt_tls=ctx.obj["mqtt_tls"],
|
||||
database_url=ctx.obj["database_url"],
|
||||
log_level=ctx.obj["log_level"],
|
||||
data_home=ctx.obj["data_home"],
|
||||
@@ -345,8 +383,11 @@ def _run_seed_import(
|
||||
file_path=str(node_tags_file),
|
||||
db=db,
|
||||
create_nodes=create_nodes,
|
||||
clear_existing=True,
|
||||
)
|
||||
if verbose:
|
||||
if stats["deleted"]:
|
||||
click.echo(f" Deleted {stats['deleted']} existing tags")
|
||||
click.echo(
|
||||
f" Tags: {stats['created']} created, {stats['updated']} updated"
|
||||
)
|
||||
@@ -390,16 +431,24 @@ def _run_seed_import(
|
||||
default=False,
|
||||
help="Skip tags for nodes that don't exist (default: create nodes)",
|
||||
)
|
||||
@click.option(
|
||||
"--clear-existing",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Delete all existing tags before importing",
|
||||
)
|
||||
@click.pass_context
|
||||
def import_tags_cmd(
|
||||
ctx: click.Context,
|
||||
file: str | None,
|
||||
no_create_nodes: bool,
|
||||
clear_existing: bool,
|
||||
) -> None:
|
||||
"""Import node tags from a YAML file.
|
||||
|
||||
Reads a YAML file containing tag definitions and upserts them
|
||||
into the database. Existing tags are updated, new tags are created.
|
||||
into the database. By default, existing tags are updated and new tags are created.
|
||||
Use --clear-existing to delete all tags before importing.
|
||||
|
||||
FILE is the path to the YAML file containing tags.
|
||||
If not provided, defaults to {SEED_HOME}/node_tags.yaml.
|
||||
@@ -454,11 +503,14 @@ def import_tags_cmd(
|
||||
file_path=tags_file,
|
||||
db=db,
|
||||
create_nodes=not no_create_nodes,
|
||||
clear_existing=clear_existing,
|
||||
)
|
||||
|
||||
# Report results
|
||||
click.echo("")
|
||||
click.echo("Import complete:")
|
||||
if stats["deleted"]:
|
||||
click.echo(f" Tags deleted: {stats['deleted']}")
|
||||
click.echo(f" Total tags in file: {stats['total']}")
|
||||
click.echo(f" Tags created: {stats['created']}")
|
||||
click.echo(f" Tags updated: {stats['updated']}")
|
||||
@@ -549,3 +601,90 @@ def import_members_cmd(
|
||||
click.echo(f" - {error}", err=True)
|
||||
|
||||
db.dispose()
|
||||
|
||||
|
||||
@collector.command("cleanup")
|
||||
@click.option(
|
||||
"--retention-days",
|
||||
type=int,
|
||||
default=30,
|
||||
envvar="DATA_RETENTION_DAYS",
|
||||
help="Number of days to retain data (default: 30)",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Show what would be deleted without deleting",
|
||||
)
|
||||
@click.pass_context
|
||||
def cleanup_cmd(
|
||||
ctx: click.Context,
|
||||
retention_days: int,
|
||||
dry_run: bool,
|
||||
) -> None:
|
||||
"""Manually run data cleanup to delete old events.
|
||||
|
||||
Deletes event data older than the retention period:
|
||||
- Advertisements
|
||||
- Messages (channel and direct)
|
||||
- Telemetry
|
||||
- Trace paths
|
||||
- Event logs
|
||||
|
||||
Node records are never deleted - only event data.
|
||||
|
||||
Use --dry-run to preview what would be deleted without
|
||||
actually deleting anything.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
configure_logging(level=ctx.obj["log_level"])
|
||||
|
||||
click.echo(f"Database: {ctx.obj['database_url']}")
|
||||
click.echo(f"Retention: {retention_days} days")
|
||||
click.echo(f"Mode: {'DRY RUN' if dry_run else 'LIVE'}")
|
||||
click.echo("")
|
||||
|
||||
if dry_run:
|
||||
click.echo("Running in dry-run mode - no data will be deleted.")
|
||||
else:
|
||||
click.echo("WARNING: This will permanently delete old event data!")
|
||||
if not click.confirm("Continue?"):
|
||||
click.echo("Aborted.")
|
||||
return
|
||||
|
||||
click.echo("")
|
||||
|
||||
from meshcore_hub.common.database import DatabaseManager
|
||||
from meshcore_hub.collector.cleanup import cleanup_old_data
|
||||
|
||||
# Initialize database
|
||||
db = DatabaseManager(ctx.obj["database_url"])
|
||||
|
||||
# Run cleanup
|
||||
async def run_cleanup() -> None:
|
||||
async with db.async_session() as session:
|
||||
stats = await cleanup_old_data(
|
||||
session,
|
||||
retention_days,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
click.echo("")
|
||||
click.echo("Cleanup results:")
|
||||
click.echo(f" Advertisements: {stats.advertisements_deleted}")
|
||||
click.echo(f" Messages: {stats.messages_deleted}")
|
||||
click.echo(f" Telemetry: {stats.telemetry_deleted}")
|
||||
click.echo(f" Trace paths: {stats.trace_paths_deleted}")
|
||||
click.echo(f" Event logs: {stats.event_logs_deleted}")
|
||||
click.echo(f" Total: {stats.total_deleted}")
|
||||
|
||||
if dry_run:
|
||||
click.echo("")
|
||||
click.echo("(Dry run - no data was actually deleted)")
|
||||
|
||||
asyncio.run(run_cleanup())
|
||||
db.dispose()
|
||||
click.echo("")
|
||||
click.echo("Cleanup complete." if not dry_run else "Dry run complete.")
|
||||
|
||||
@@ -6,6 +6,7 @@ The subscriber:
|
||||
3. Routes events to appropriate handlers
|
||||
4. Persists data to database
|
||||
5. Dispatches events to configured webhooks
|
||||
6. Performs scheduled data cleanup if enabled
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
@@ -14,6 +15,7 @@ import signal
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Callable, Optional, TYPE_CHECKING
|
||||
|
||||
from meshcore_hub.common.database import DatabaseManager
|
||||
@@ -38,6 +40,11 @@ class Subscriber:
|
||||
mqtt_client: MQTTClient,
|
||||
db_manager: DatabaseManager,
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
cleanup_retention_days: int = 30,
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
):
|
||||
"""Initialize subscriber.
|
||||
|
||||
@@ -45,6 +52,11 @@ class Subscriber:
|
||||
mqtt_client: MQTT client instance
|
||||
db_manager: Database manager instance
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
cleanup_retention_days: Number of days to retain event data
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
"""
|
||||
self.mqtt = mqtt_client
|
||||
self.db = db_manager
|
||||
@@ -59,6 +71,14 @@ class Subscriber:
|
||||
self._webhook_queue: list[tuple[str, dict[str, Any], str]] = []
|
||||
self._webhook_lock = threading.Lock()
|
||||
self._webhook_thread: Optional[threading.Thread] = None
|
||||
# Data cleanup
|
||||
self._cleanup_enabled = cleanup_enabled
|
||||
self._cleanup_retention_days = cleanup_retention_days
|
||||
self._cleanup_interval_hours = cleanup_interval_hours
|
||||
self._node_cleanup_enabled = node_cleanup_enabled
|
||||
self._node_cleanup_days = node_cleanup_days
|
||||
self._cleanup_thread: Optional[threading.Thread] = None
|
||||
self._last_cleanup: Optional[datetime] = None
|
||||
|
||||
@property
|
||||
def is_healthy(self) -> bool:
|
||||
@@ -202,6 +222,115 @@ class Subscriber:
|
||||
if self._webhook_thread.is_alive():
|
||||
logger.warning("Webhook processor thread did not stop cleanly")
|
||||
|
||||
def _start_cleanup_scheduler(self) -> None:
|
||||
"""Start background thread for periodic data cleanup."""
|
||||
if not self._cleanup_enabled and not self._node_cleanup_enabled:
|
||||
logger.info("Data cleanup and node cleanup are both disabled")
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Starting cleanup scheduler (interval_hours=%d)",
|
||||
self._cleanup_interval_hours,
|
||||
)
|
||||
if self._cleanup_enabled:
|
||||
logger.info(
|
||||
" Event data cleanup: ENABLED (retention_days=%d)",
|
||||
self._cleanup_retention_days,
|
||||
)
|
||||
else:
|
||||
logger.info(" Event data cleanup: DISABLED")
|
||||
|
||||
if self._node_cleanup_enabled:
|
||||
logger.info(
|
||||
" Node cleanup: ENABLED (inactivity_days=%d)", self._node_cleanup_days
|
||||
)
|
||||
else:
|
||||
logger.info(" Node cleanup: DISABLED")
|
||||
|
||||
def run_cleanup_loop() -> None:
|
||||
"""Run async cleanup tasks in background thread."""
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
while self._running:
|
||||
# Check if cleanup is due
|
||||
now = datetime.now(timezone.utc)
|
||||
should_run = False
|
||||
|
||||
if self._last_cleanup is None:
|
||||
# First run
|
||||
should_run = True
|
||||
else:
|
||||
# Check if interval has passed
|
||||
hours_since_last = (
|
||||
now - self._last_cleanup
|
||||
).total_seconds() / 3600
|
||||
should_run = hours_since_last >= self._cleanup_interval_hours
|
||||
|
||||
if should_run:
|
||||
try:
|
||||
logger.info("Starting scheduled cleanup")
|
||||
from meshcore_hub.collector.cleanup import (
|
||||
cleanup_old_data,
|
||||
cleanup_inactive_nodes,
|
||||
)
|
||||
|
||||
# Get async session and run cleanup
|
||||
async def run_cleanup() -> None:
|
||||
async with self.db.async_session() as session:
|
||||
# Run event data cleanup if enabled
|
||||
if self._cleanup_enabled:
|
||||
stats = await cleanup_old_data(
|
||||
session,
|
||||
self._cleanup_retention_days,
|
||||
dry_run=False,
|
||||
)
|
||||
logger.info(
|
||||
"Event cleanup completed: %s", stats
|
||||
)
|
||||
|
||||
# Run node cleanup if enabled
|
||||
if self._node_cleanup_enabled:
|
||||
nodes_deleted = await cleanup_inactive_nodes(
|
||||
session,
|
||||
self._node_cleanup_days,
|
||||
dry_run=False,
|
||||
)
|
||||
logger.info(
|
||||
"Node cleanup completed: %d nodes deleted",
|
||||
nodes_deleted,
|
||||
)
|
||||
|
||||
loop.run_until_complete(run_cleanup())
|
||||
self._last_cleanup = now
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Cleanup error: {e}", exc_info=True)
|
||||
|
||||
# Sleep for 1 hour before next check
|
||||
for _ in range(3600):
|
||||
if not self._running:
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
finally:
|
||||
loop.close()
|
||||
logger.info("Cleanup scheduler stopped")
|
||||
|
||||
self._cleanup_thread = threading.Thread(
|
||||
target=run_cleanup_loop, daemon=True, name="cleanup-scheduler"
|
||||
)
|
||||
self._cleanup_thread.start()
|
||||
|
||||
def _stop_cleanup_scheduler(self) -> None:
|
||||
"""Stop the cleanup scheduler thread."""
|
||||
if self._cleanup_thread and self._cleanup_thread.is_alive():
|
||||
# Thread will exit when self._running becomes False
|
||||
self._cleanup_thread.join(timeout=5.0)
|
||||
if self._cleanup_thread.is_alive():
|
||||
logger.warning("Cleanup scheduler thread did not stop cleanly")
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start the subscriber."""
|
||||
logger.info("Starting collector subscriber")
|
||||
@@ -239,6 +368,9 @@ class Subscriber:
|
||||
# Start webhook processor if configured
|
||||
self._start_webhook_processor()
|
||||
|
||||
# Start cleanup scheduler if configured
|
||||
self._start_cleanup_scheduler()
|
||||
|
||||
# Start health reporter for Docker health checks
|
||||
self._health_reporter = HealthReporter(
|
||||
component="collector",
|
||||
@@ -271,6 +403,9 @@ class Subscriber:
|
||||
self._running = False
|
||||
self._shutdown_event.set()
|
||||
|
||||
# Stop cleanup scheduler
|
||||
self._stop_cleanup_scheduler()
|
||||
|
||||
# Stop webhook processor
|
||||
self._stop_webhook_processor()
|
||||
|
||||
@@ -293,8 +428,14 @@ def create_subscriber(
|
||||
mqtt_username: Optional[str] = None,
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
database_url: str = "sqlite:///./meshcore.db",
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
cleanup_retention_days: int = 30,
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
) -> Subscriber:
|
||||
"""Create a configured subscriber instance.
|
||||
|
||||
@@ -304,8 +445,14 @@ def create_subscriber(
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
database_url: Database connection URL
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
cleanup_retention_days: Number of days to retain event data
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
|
||||
Returns:
|
||||
Configured Subscriber instance
|
||||
@@ -319,6 +466,7 @@ def create_subscriber(
|
||||
password=mqtt_password,
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-collector-{unique_id}",
|
||||
tls=mqtt_tls,
|
||||
)
|
||||
mqtt_client = MQTTClient(mqtt_config)
|
||||
|
||||
@@ -326,7 +474,16 @@ def create_subscriber(
|
||||
db_manager = DatabaseManager(database_url)
|
||||
|
||||
# Create subscriber
|
||||
subscriber = Subscriber(mqtt_client, db_manager, webhook_dispatcher)
|
||||
subscriber = Subscriber(
|
||||
mqtt_client,
|
||||
db_manager,
|
||||
webhook_dispatcher,
|
||||
cleanup_enabled=cleanup_enabled,
|
||||
cleanup_retention_days=cleanup_retention_days,
|
||||
cleanup_interval_hours=cleanup_interval_hours,
|
||||
node_cleanup_enabled=node_cleanup_enabled,
|
||||
node_cleanup_days=node_cleanup_days,
|
||||
)
|
||||
|
||||
# Register handlers
|
||||
from meshcore_hub.collector.handlers import register_all_handlers
|
||||
@@ -342,8 +499,14 @@ def run_collector(
|
||||
mqtt_username: Optional[str] = None,
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
database_url: str = "sqlite:///./meshcore.db",
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
cleanup_retention_days: int = 30,
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
) -> None:
|
||||
"""Run the collector (blocking).
|
||||
|
||||
@@ -353,8 +516,14 @@ def run_collector(
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
database_url: Database connection URL
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
cleanup_retention_days: Number of days to retain event data
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
"""
|
||||
subscriber = create_subscriber(
|
||||
mqtt_host=mqtt_host,
|
||||
@@ -362,8 +531,14 @@ def run_collector(
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
database_url=database_url,
|
||||
webhook_dispatcher=webhook_dispatcher,
|
||||
cleanup_enabled=cleanup_enabled,
|
||||
cleanup_retention_days=cleanup_retention_days,
|
||||
cleanup_interval_hours=cleanup_interval_hours,
|
||||
node_cleanup_enabled=node_cleanup_enabled,
|
||||
node_cleanup_days=node_cleanup_days,
|
||||
)
|
||||
|
||||
# Set up signal handlers
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any
|
||||
|
||||
import yaml
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import delete, func, select
|
||||
|
||||
from meshcore_hub.common.database import DatabaseManager
|
||||
from meshcore_hub.common.models import Node, NodeTag
|
||||
@@ -151,16 +151,19 @@ def import_tags(
|
||||
file_path: str | Path,
|
||||
db: DatabaseManager,
|
||||
create_nodes: bool = True,
|
||||
clear_existing: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Import tags from a YAML file into the database.
|
||||
|
||||
Performs upsert operations - existing tags are updated, new tags are created.
|
||||
Optionally clears all existing tags before import.
|
||||
|
||||
Args:
|
||||
file_path: Path to the tags YAML file
|
||||
db: Database manager instance
|
||||
create_nodes: If True, create nodes that don't exist. If False, skip tags
|
||||
for non-existent nodes.
|
||||
clear_existing: If True, delete all existing tags before importing.
|
||||
|
||||
Returns:
|
||||
Dictionary with import statistics:
|
||||
@@ -169,6 +172,7 @@ def import_tags(
|
||||
- updated: Number of existing tags updated
|
||||
- skipped: Number of tags skipped (node not found and create_nodes=False)
|
||||
- nodes_created: Number of new nodes created
|
||||
- deleted: Number of existing tags deleted (if clear_existing=True)
|
||||
- errors: List of error messages
|
||||
"""
|
||||
stats: dict[str, Any] = {
|
||||
@@ -177,6 +181,7 @@ def import_tags(
|
||||
"updated": 0,
|
||||
"skipped": 0,
|
||||
"nodes_created": 0,
|
||||
"deleted": 0,
|
||||
"errors": [],
|
||||
}
|
||||
|
||||
@@ -194,6 +199,15 @@ def import_tags(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
with db.session_scope() as session:
|
||||
# Clear all existing tags if requested
|
||||
if clear_existing:
|
||||
delete_count = (
|
||||
session.execute(select(func.count()).select_from(NodeTag)).scalar() or 0
|
||||
)
|
||||
session.execute(delete(NodeTag))
|
||||
stats["deleted"] = delete_count
|
||||
logger.info(f"Deleted {delete_count} existing tags")
|
||||
|
||||
# Cache nodes by public_key to reduce queries
|
||||
node_cache: dict[str, Node] = {}
|
||||
|
||||
@@ -210,7 +224,8 @@ def import_tags(
|
||||
node = Node(
|
||||
public_key=public_key,
|
||||
first_seen=now,
|
||||
last_seen=now,
|
||||
# last_seen is intentionally left unset (None)
|
||||
# It will be set when the node is actually seen via events
|
||||
)
|
||||
session.add(node)
|
||||
session.flush()
|
||||
@@ -231,24 +246,8 @@ def import_tags(
|
||||
tag_value = tag_data.get("value")
|
||||
tag_type = tag_data.get("type", "string")
|
||||
|
||||
# Find or create tag
|
||||
tag_query = select(NodeTag).where(
|
||||
NodeTag.node_id == node.id,
|
||||
NodeTag.key == tag_key,
|
||||
)
|
||||
existing_tag = session.execute(tag_query).scalar_one_or_none()
|
||||
|
||||
if existing_tag:
|
||||
# Update existing tag
|
||||
existing_tag.value = tag_value
|
||||
existing_tag.value_type = tag_type
|
||||
stats["updated"] += 1
|
||||
logger.debug(
|
||||
f"Updated tag {tag_key}={tag_value} "
|
||||
f"for {public_key[:12]}..."
|
||||
)
|
||||
else:
|
||||
# Create new tag
|
||||
if clear_existing:
|
||||
# When clearing, always create new tags
|
||||
new_tag = NodeTag(
|
||||
node_id=node.id,
|
||||
key=tag_key,
|
||||
@@ -261,6 +260,39 @@ def import_tags(
|
||||
f"Created tag {tag_key}={tag_value} "
|
||||
f"for {public_key[:12]}..."
|
||||
)
|
||||
else:
|
||||
# Find or create tag
|
||||
tag_query = select(NodeTag).where(
|
||||
NodeTag.node_id == node.id,
|
||||
NodeTag.key == tag_key,
|
||||
)
|
||||
existing_tag = session.execute(
|
||||
tag_query
|
||||
).scalar_one_or_none()
|
||||
|
||||
if existing_tag:
|
||||
# Update existing tag
|
||||
existing_tag.value = tag_value
|
||||
existing_tag.value_type = tag_type
|
||||
stats["updated"] += 1
|
||||
logger.debug(
|
||||
f"Updated tag {tag_key}={tag_value} "
|
||||
f"for {public_key[:12]}..."
|
||||
)
|
||||
else:
|
||||
# Create new tag
|
||||
new_tag = NodeTag(
|
||||
node_id=node.id,
|
||||
key=tag_key,
|
||||
value=tag_value,
|
||||
value_type=tag_type,
|
||||
)
|
||||
session.add(new_tag)
|
||||
stats["created"] += 1
|
||||
logger.debug(
|
||||
f"Created tag {tag_key}={tag_value} "
|
||||
f"for {public_key[:12]}..."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error processing tag {tag_key} for {public_key[:12]}...: {e}"
|
||||
|
||||
@@ -52,6 +52,9 @@ class CommonSettings(BaseSettings):
|
||||
default=None, description="MQTT password (optional)"
|
||||
)
|
||||
mqtt_prefix: str = Field(default="meshcore", description="MQTT topic prefix")
|
||||
mqtt_tls: bool = Field(
|
||||
default=False, description="Enable TLS/SSL for MQTT connection"
|
||||
)
|
||||
|
||||
|
||||
class InterfaceSettings(CommonSettings):
|
||||
@@ -70,6 +73,11 @@ class InterfaceSettings(CommonSettings):
|
||||
# Mock device
|
||||
mock_device: bool = Field(default=False, description="Use mock device for testing")
|
||||
|
||||
# Device name
|
||||
meshcore_device_name: Optional[str] = Field(
|
||||
default=None, description="Device/node name (optional)"
|
||||
)
|
||||
|
||||
|
||||
class CollectorSettings(CommonSettings):
|
||||
"""Settings for the Collector component."""
|
||||
@@ -121,6 +129,29 @@ class CollectorSettings(CommonSettings):
|
||||
default=2.0, description="Retry backoff multiplier"
|
||||
)
|
||||
|
||||
# Data retention / cleanup settings
|
||||
data_retention_enabled: bool = Field(
|
||||
default=True, description="Enable automatic event data cleanup"
|
||||
)
|
||||
data_retention_days: int = Field(
|
||||
default=30, description="Number of days to retain event data", ge=1
|
||||
)
|
||||
data_retention_interval_hours: int = Field(
|
||||
default=24,
|
||||
description="Hours between automatic cleanup runs (applies to both events and nodes)",
|
||||
ge=1,
|
||||
)
|
||||
|
||||
# Node cleanup settings
|
||||
node_cleanup_enabled: bool = Field(
|
||||
default=True, description="Enable automatic cleanup of inactive nodes"
|
||||
)
|
||||
node_cleanup_days: int = Field(
|
||||
default=7,
|
||||
description="Remove nodes not seen for this many days (last_seen)",
|
||||
ge=1,
|
||||
)
|
||||
|
||||
@property
|
||||
def collector_data_dir(self) -> str:
|
||||
"""Get the collector data directory path."""
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""Database connection and session management."""
|
||||
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator
|
||||
from contextlib import asynccontextmanager, contextmanager
|
||||
from typing import AsyncGenerator, Generator
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from meshcore_hub.common.models.base import Base
|
||||
@@ -100,6 +101,17 @@ class DatabaseManager:
|
||||
self.engine = create_database_engine(database_url, echo=echo)
|
||||
self.session_factory = create_session_factory(self.engine)
|
||||
|
||||
# Create async engine for async operations
|
||||
async_url = database_url.replace("sqlite://", "sqlite+aiosqlite://")
|
||||
self.async_engine = create_async_engine(async_url, echo=echo)
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker
|
||||
|
||||
self.async_session_factory = async_sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
def create_tables(self) -> None:
|
||||
"""Create all database tables."""
|
||||
create_tables(self.engine)
|
||||
@@ -138,6 +150,21 @@ class DatabaseManager:
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_session(self) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Provide an async session context manager.
|
||||
|
||||
Yields:
|
||||
AsyncSession instance
|
||||
|
||||
Example:
|
||||
async with db.async_session() as session:
|
||||
result = await session.execute(select(Node))
|
||||
await session.commit()
|
||||
"""
|
||||
async with self.async_session_factory() as session:
|
||||
yield session
|
||||
|
||||
def dispose(self) -> None:
|
||||
"""Dispose of the database engine and connection pool."""
|
||||
self.engine.dispose()
|
||||
|
||||
@@ -23,6 +23,7 @@ class MQTTConfig:
|
||||
client_id: Optional[str] = None
|
||||
keepalive: int = 60
|
||||
clean_session: bool = True
|
||||
tls: bool = False
|
||||
|
||||
|
||||
class TopicBuilder:
|
||||
@@ -131,6 +132,11 @@ class MQTTClient:
|
||||
self._connected = False
|
||||
self._message_handlers: dict[str, list[MessageHandler]] = {}
|
||||
|
||||
# Set up TLS if enabled
|
||||
if config.tls:
|
||||
self._client.tls_set()
|
||||
logger.debug("TLS/SSL enabled for MQTT connection")
|
||||
|
||||
# Set up authentication if provided
|
||||
if config.username:
|
||||
self._client.username_pw_set(config.username, config.password)
|
||||
@@ -344,6 +350,7 @@ def create_mqtt_client(
|
||||
password: Optional[str] = None,
|
||||
prefix: str = "meshcore",
|
||||
client_id: Optional[str] = None,
|
||||
tls: bool = False,
|
||||
) -> MQTTClient:
|
||||
"""Create and configure an MQTT client.
|
||||
|
||||
@@ -354,6 +361,7 @@ def create_mqtt_client(
|
||||
password: MQTT password (optional)
|
||||
prefix: Topic prefix
|
||||
client_id: Client identifier (optional)
|
||||
tls: Enable TLS/SSL connection (optional)
|
||||
|
||||
Returns:
|
||||
Configured MQTTClient instance
|
||||
@@ -365,5 +373,6 @@ def create_mqtt_client(
|
||||
password=password,
|
||||
prefix=prefix,
|
||||
client_id=client_id,
|
||||
tls=tls,
|
||||
)
|
||||
return MQTTClient(config)
|
||||
|
||||
@@ -35,9 +35,7 @@ class MemberNodeRead(BaseModel):
|
||||
node_adv_type: Optional[str] = Field(
|
||||
default=None, description="Node's advertisement type"
|
||||
)
|
||||
friendly_name: Optional[str] = Field(
|
||||
default=None, description="Node's friendly name tag"
|
||||
)
|
||||
tag_name: Optional[str] = Field(default=None, description="Node's name tag")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
@@ -12,9 +12,7 @@ class ReceiverInfo(BaseModel):
|
||||
node_id: str = Field(..., description="Receiver node UUID")
|
||||
public_key: str = Field(..., description="Receiver node public key")
|
||||
name: Optional[str] = Field(default=None, description="Receiver node name")
|
||||
friendly_name: Optional[str] = Field(
|
||||
default=None, description="Receiver friendly name from tags"
|
||||
)
|
||||
tag_name: Optional[str] = Field(default=None, description="Receiver name from tags")
|
||||
snr: Optional[float] = Field(
|
||||
default=None, description="Signal-to-noise ratio at this receiver"
|
||||
)
|
||||
@@ -31,8 +29,8 @@ class MessageRead(BaseModel):
|
||||
default=None, description="Receiving interface node public key"
|
||||
)
|
||||
receiver_name: Optional[str] = Field(default=None, description="Receiver node name")
|
||||
receiver_friendly_name: Optional[str] = Field(
|
||||
default=None, description="Receiver friendly name from tags"
|
||||
receiver_tag_name: Optional[str] = Field(
|
||||
default=None, description="Receiver name from tags"
|
||||
)
|
||||
message_type: str = Field(..., description="Message type (contact, channel)")
|
||||
pubkey_prefix: Optional[str] = Field(
|
||||
@@ -41,8 +39,8 @@ class MessageRead(BaseModel):
|
||||
sender_name: Optional[str] = Field(
|
||||
default=None, description="Sender's advertised node name"
|
||||
)
|
||||
sender_friendly_name: Optional[str] = Field(
|
||||
default=None, description="Sender's friendly name from node tags"
|
||||
sender_tag_name: Optional[str] = Field(
|
||||
default=None, description="Sender's name from node tags"
|
||||
)
|
||||
channel_idx: Optional[int] = Field(default=None, description="Channel index")
|
||||
text: str = Field(..., description="Message content")
|
||||
@@ -110,16 +108,16 @@ class AdvertisementRead(BaseModel):
|
||||
default=None, description="Receiving interface node public key"
|
||||
)
|
||||
receiver_name: Optional[str] = Field(default=None, description="Receiver node name")
|
||||
receiver_friendly_name: Optional[str] = Field(
|
||||
default=None, description="Receiver friendly name from tags"
|
||||
receiver_tag_name: Optional[str] = Field(
|
||||
default=None, description="Receiver name from tags"
|
||||
)
|
||||
public_key: str = Field(..., description="Advertised public key")
|
||||
name: Optional[str] = Field(default=None, description="Advertised name")
|
||||
node_name: Optional[str] = Field(
|
||||
default=None, description="Node name from nodes table"
|
||||
)
|
||||
node_friendly_name: Optional[str] = Field(
|
||||
default=None, description="Node friendly name from tags"
|
||||
node_tag_name: Optional[str] = Field(
|
||||
default=None, description="Node name from tags"
|
||||
)
|
||||
adv_type: Optional[str] = Field(default=None, description="Node type")
|
||||
flags: Optional[int] = Field(default=None, description="Capability flags")
|
||||
@@ -215,7 +213,7 @@ class RecentAdvertisement(BaseModel):
|
||||
|
||||
public_key: str = Field(..., description="Node public key")
|
||||
name: Optional[str] = Field(default=None, description="Node name")
|
||||
friendly_name: Optional[str] = Field(default=None, description="Friendly name tag")
|
||||
tag_name: Optional[str] = Field(default=None, description="Name tag")
|
||||
adv_type: Optional[str] = Field(default=None, description="Node type")
|
||||
received_at: datetime = Field(..., description="When received")
|
||||
|
||||
@@ -225,8 +223,8 @@ class ChannelMessage(BaseModel):
|
||||
|
||||
text: str = Field(..., description="Message text")
|
||||
sender_name: Optional[str] = Field(default=None, description="Sender name")
|
||||
sender_friendly_name: Optional[str] = Field(
|
||||
default=None, description="Sender friendly name"
|
||||
sender_tag_name: Optional[str] = Field(
|
||||
default=None, description="Sender name from tags"
|
||||
)
|
||||
pubkey_prefix: Optional[str] = Field(
|
||||
default=None, description="Sender public key prefix"
|
||||
|
||||
@@ -51,6 +51,13 @@ def interface() -> None:
|
||||
envvar="NODE_ADDRESS",
|
||||
help="Override for device public key/address (hex string)",
|
||||
)
|
||||
@click.option(
|
||||
"--device-name",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MESHCORE_DEVICE_NAME",
|
||||
help="Device/node name (optional)",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-host",
|
||||
type=str,
|
||||
@@ -86,6 +93,13 @@ def interface() -> None:
|
||||
envvar="MQTT_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-tls",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--log-level",
|
||||
type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]),
|
||||
@@ -99,11 +113,13 @@ def run(
|
||||
baud: int,
|
||||
mock: bool,
|
||||
node_address: str | None,
|
||||
device_name: str | None,
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
log_level: str,
|
||||
) -> None:
|
||||
"""Run the interface component.
|
||||
@@ -139,11 +155,13 @@ def run(
|
||||
baud=baud,
|
||||
mock=mock,
|
||||
node_address=node_address,
|
||||
device_name=device_name,
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
)
|
||||
elif mode_upper == "SENDER":
|
||||
from meshcore_hub.interface.sender import run_sender
|
||||
@@ -153,11 +171,13 @@ def run(
|
||||
baud=baud,
|
||||
mock=mock,
|
||||
node_address=node_address,
|
||||
device_name=device_name,
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
)
|
||||
else:
|
||||
click.echo(f"Unknown mode: {mode}", err=True)
|
||||
@@ -193,6 +213,13 @@ def run(
|
||||
envvar="NODE_ADDRESS",
|
||||
help="Override for device public key/address (hex string)",
|
||||
)
|
||||
@click.option(
|
||||
"--device-name",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MESHCORE_DEVICE_NAME",
|
||||
help="Device/node name (optional)",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-host",
|
||||
type=str,
|
||||
@@ -228,16 +255,25 @@ def run(
|
||||
envvar="MQTT_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-tls",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
def receiver(
|
||||
port: str,
|
||||
baud: int,
|
||||
mock: bool,
|
||||
node_address: str | None,
|
||||
device_name: str | None,
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
) -> None:
|
||||
"""Run interface in RECEIVER mode.
|
||||
|
||||
@@ -262,6 +298,7 @@ def receiver(
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
)
|
||||
|
||||
|
||||
@@ -294,6 +331,13 @@ def receiver(
|
||||
envvar="NODE_ADDRESS",
|
||||
help="Override for device public key/address (hex string)",
|
||||
)
|
||||
@click.option(
|
||||
"--device-name",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MESHCORE_DEVICE_NAME",
|
||||
help="Device/node name (optional)",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-host",
|
||||
type=str,
|
||||
@@ -329,16 +373,25 @@ def receiver(
|
||||
envvar="MQTT_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-tls",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
def sender(
|
||||
port: str,
|
||||
baud: int,
|
||||
mock: bool,
|
||||
node_address: str | None,
|
||||
device_name: str | None,
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
) -> None:
|
||||
"""Run interface in SENDER mode.
|
||||
|
||||
@@ -363,4 +416,5 @@ def sender(
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
)
|
||||
|
||||
@@ -164,6 +164,18 @@ class BaseMeshCoreDevice(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_name(self, name: str) -> bool:
|
||||
"""Set the device's node name.
|
||||
|
||||
Args:
|
||||
name: Node name to set
|
||||
|
||||
Returns:
|
||||
True if name was set successfully
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def start_message_fetching(self) -> bool:
|
||||
"""Start automatic message fetching.
|
||||
@@ -181,11 +193,24 @@ class BaseMeshCoreDevice(ABC):
|
||||
|
||||
Triggers a CONTACTS event with all stored contacts from the device.
|
||||
|
||||
Note: This should only be called before the event loop is running.
|
||||
|
||||
Returns:
|
||||
True if request was sent successfully
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def schedule_get_contacts(self) -> bool:
|
||||
"""Schedule a get_contacts request on the event loop.
|
||||
|
||||
This is safe to call from event handlers while the event loop is running.
|
||||
|
||||
Returns:
|
||||
True if request was scheduled successfully
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def run(self) -> None:
|
||||
"""Run the device event loop (blocking)."""
|
||||
@@ -518,6 +543,24 @@ class MeshCoreDevice(BaseMeshCoreDevice):
|
||||
logger.error(f"Failed to set device time: {e}")
|
||||
return False
|
||||
|
||||
def set_name(self, name: str) -> bool:
|
||||
"""Set the device's node name."""
|
||||
if not self._connected or not self._mc:
|
||||
logger.error("Cannot set name: not connected")
|
||||
return False
|
||||
|
||||
try:
|
||||
|
||||
async def _set_name() -> None:
|
||||
await self._mc.commands.set_name(name)
|
||||
|
||||
self._loop.run_until_complete(_set_name())
|
||||
logger.info(f"Set device name to '{name}'")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set device name: {e}")
|
||||
return False
|
||||
|
||||
def start_message_fetching(self) -> bool:
|
||||
"""Start automatic message fetching."""
|
||||
if not self._connected or not self._mc:
|
||||
@@ -537,7 +580,12 @@ class MeshCoreDevice(BaseMeshCoreDevice):
|
||||
return False
|
||||
|
||||
def get_contacts(self) -> bool:
|
||||
"""Fetch contacts from device contact database."""
|
||||
"""Fetch contacts from device contact database.
|
||||
|
||||
Note: This method should only be called before the event loop is running
|
||||
(e.g., during initialization). For calling during event processing,
|
||||
use schedule_get_contacts() instead.
|
||||
"""
|
||||
if not self._connected or not self._mc:
|
||||
logger.error("Cannot get contacts: not connected")
|
||||
return False
|
||||
@@ -554,6 +602,31 @@ class MeshCoreDevice(BaseMeshCoreDevice):
|
||||
logger.error(f"Failed to get contacts: {e}")
|
||||
return False
|
||||
|
||||
def schedule_get_contacts(self) -> bool:
|
||||
"""Schedule a get_contacts request on the event loop.
|
||||
|
||||
This is safe to call from event handlers while the event loop is running.
|
||||
The request is scheduled as a task on the event loop.
|
||||
|
||||
Returns:
|
||||
True if request was scheduled, False if device not connected
|
||||
"""
|
||||
if not self._connected or not self._mc:
|
||||
logger.error("Cannot get contacts: not connected")
|
||||
return False
|
||||
|
||||
try:
|
||||
|
||||
async def _get_contacts() -> None:
|
||||
await self._mc.commands.get_contacts()
|
||||
|
||||
asyncio.run_coroutine_threadsafe(_get_contacts(), self._loop)
|
||||
logger.info("Scheduled contact sync request")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to schedule get contacts: {e}")
|
||||
return False
|
||||
|
||||
def run(self) -> None:
|
||||
"""Run the device event loop."""
|
||||
self._running = True
|
||||
|
||||
@@ -271,6 +271,17 @@ class MockMeshCoreDevice(BaseMeshCoreDevice):
|
||||
logger.info(f"Mock: Set device time to {timestamp}")
|
||||
return True
|
||||
|
||||
def set_name(self, name: str) -> bool:
|
||||
"""Set the mock device's node name."""
|
||||
if not self._connected:
|
||||
logger.error("Cannot set name: not connected")
|
||||
return False
|
||||
|
||||
logger.info(f"Mock: Set device name to '{name}'")
|
||||
# Update the mock config name
|
||||
self.mock_config.name = name
|
||||
return True
|
||||
|
||||
def start_message_fetching(self) -> bool:
|
||||
"""Start automatic message fetching (mock)."""
|
||||
if not self._connected:
|
||||
@@ -281,7 +292,10 @@ class MockMeshCoreDevice(BaseMeshCoreDevice):
|
||||
return True
|
||||
|
||||
def get_contacts(self) -> bool:
|
||||
"""Fetch contacts from mock device contact database."""
|
||||
"""Fetch contacts from mock device contact database.
|
||||
|
||||
Note: This should only be called before the event loop is running.
|
||||
"""
|
||||
if not self._connected:
|
||||
logger.error("Cannot get contacts: not connected")
|
||||
return False
|
||||
@@ -307,6 +321,14 @@ class MockMeshCoreDevice(BaseMeshCoreDevice):
|
||||
threading.Thread(target=send_contacts, daemon=True).start()
|
||||
return True
|
||||
|
||||
def schedule_get_contacts(self) -> bool:
|
||||
"""Schedule a get_contacts request.
|
||||
|
||||
For the mock device, this is the same as get_contacts() since we
|
||||
don't have a real async event loop. The contacts are sent via a thread.
|
||||
"""
|
||||
return self.get_contacts()
|
||||
|
||||
def run(self) -> None:
|
||||
"""Run the mock device event loop."""
|
||||
self._running = True
|
||||
|
||||
@@ -33,15 +33,18 @@ class Receiver:
|
||||
self,
|
||||
device: BaseMeshCoreDevice,
|
||||
mqtt_client: MQTTClient,
|
||||
device_name: Optional[str] = None,
|
||||
):
|
||||
"""Initialize receiver.
|
||||
|
||||
Args:
|
||||
device: MeshCore device instance
|
||||
mqtt_client: MQTT client instance
|
||||
device_name: Optional device/node name to set on startup
|
||||
"""
|
||||
self.device = device
|
||||
self.mqtt = mqtt_client
|
||||
self.device_name = device_name
|
||||
self._running = False
|
||||
self._shutdown_event = threading.Event()
|
||||
self._device_connected = False
|
||||
@@ -71,11 +74,14 @@ class Receiver:
|
||||
"device_public_key": self.device.public_key,
|
||||
}
|
||||
|
||||
def _initialize_device(self) -> None:
|
||||
def _initialize_device(self, device_name: Optional[str] = None) -> None:
|
||||
"""Initialize device after connection.
|
||||
|
||||
Sets the hardware clock, sends a local advertisement, starts message fetching,
|
||||
and syncs the contact database.
|
||||
Sets the hardware clock, optionally sets device name, sends a local advertisement,
|
||||
starts message fetching, and syncs the contact database.
|
||||
|
||||
Args:
|
||||
device_name: Optional device/node name to set
|
||||
"""
|
||||
# Set device time to current Unix timestamp
|
||||
current_time = int(time.time())
|
||||
@@ -84,11 +90,18 @@ class Receiver:
|
||||
else:
|
||||
logger.warning("Failed to synchronize device clock")
|
||||
|
||||
# Send a local (non-flood) advertisement to announce presence
|
||||
if self.device.send_advertisement(flood=False):
|
||||
logger.info("Sent local advertisement")
|
||||
# Set device name if provided
|
||||
if device_name:
|
||||
if self.device.set_name(device_name):
|
||||
logger.info(f"Set device name to '{device_name}'")
|
||||
else:
|
||||
logger.warning(f"Failed to set device name to '{device_name}'")
|
||||
|
||||
# Send a flood advertisement to broadcast device name
|
||||
if self.device.send_advertisement(flood=True):
|
||||
logger.info("Sent flood advertisement")
|
||||
else:
|
||||
logger.warning("Failed to send local advertisement")
|
||||
logger.warning("Failed to send flood advertisement")
|
||||
|
||||
# Start automatic message fetching
|
||||
if self.device.start_message_fetching():
|
||||
@@ -131,9 +144,24 @@ class Receiver:
|
||||
|
||||
logger.debug(f"Published {event_name} event to MQTT")
|
||||
|
||||
# Trigger contact sync on advertisements
|
||||
if event_type == EventType.ADVERTISEMENT:
|
||||
self._sync_contacts()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event to MQTT: {e}")
|
||||
|
||||
def _sync_contacts(self) -> None:
|
||||
"""Request contact sync from device.
|
||||
|
||||
Called when advertisements are received to ensure contact database
|
||||
stays current with all nodes on the mesh.
|
||||
"""
|
||||
logger.info("Advertisement received, triggering contact sync")
|
||||
success = self.device.schedule_get_contacts()
|
||||
if not success:
|
||||
logger.warning("Contact sync request failed")
|
||||
|
||||
def _publish_contacts(self, payload: dict[str, Any]) -> None:
|
||||
"""Publish each contact as a separate MQTT message.
|
||||
|
||||
@@ -211,8 +239,8 @@ class Receiver:
|
||||
|
||||
self._device_connected = True
|
||||
|
||||
# Initialize device: set time and send local advertisement
|
||||
self._initialize_device()
|
||||
# Initialize device: set time, optionally set name, and send local advertisement
|
||||
self._initialize_device(device_name=self.device_name)
|
||||
|
||||
self._running = True
|
||||
|
||||
@@ -271,11 +299,13 @@ def create_receiver(
|
||||
baud: int = 115200,
|
||||
mock: bool = False,
|
||||
node_address: Optional[str] = None,
|
||||
device_name: Optional[str] = None,
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_username: Optional[str] = None,
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
) -> Receiver:
|
||||
"""Create a configured receiver instance.
|
||||
|
||||
@@ -284,11 +314,13 @@ def create_receiver(
|
||||
baud: Baud rate
|
||||
mock: Use mock device
|
||||
node_address: Optional override for device public key/address
|
||||
device_name: Optional device/node name to set on startup
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
|
||||
Returns:
|
||||
Configured Receiver instance
|
||||
@@ -309,10 +341,11 @@ def create_receiver(
|
||||
password=mqtt_password,
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-receiver-{device.public_key[:12] if device.public_key else 'unknown'}",
|
||||
tls=mqtt_tls,
|
||||
)
|
||||
mqtt_client = MQTTClient(mqtt_config)
|
||||
|
||||
return Receiver(device, mqtt_client)
|
||||
return Receiver(device, mqtt_client, device_name=device_name)
|
||||
|
||||
|
||||
def run_receiver(
|
||||
@@ -320,11 +353,13 @@ def run_receiver(
|
||||
baud: int = 115200,
|
||||
mock: bool = False,
|
||||
node_address: Optional[str] = None,
|
||||
device_name: Optional[str] = None,
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_username: Optional[str] = None,
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
) -> None:
|
||||
"""Run the receiver (blocking).
|
||||
|
||||
@@ -335,22 +370,26 @@ def run_receiver(
|
||||
baud: Baud rate
|
||||
mock: Use mock device
|
||||
node_address: Optional override for device public key/address
|
||||
device_name: Optional device/node name to set on startup
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
"""
|
||||
receiver = create_receiver(
|
||||
port=port,
|
||||
baud=baud,
|
||||
mock=mock,
|
||||
node_address=node_address,
|
||||
device_name=device_name,
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
)
|
||||
|
||||
# Set up signal handlers
|
||||
|
||||
@@ -287,11 +287,13 @@ def create_sender(
|
||||
baud: int = 115200,
|
||||
mock: bool = False,
|
||||
node_address: Optional[str] = None,
|
||||
device_name: Optional[str] = None,
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_username: Optional[str] = None,
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
) -> Sender:
|
||||
"""Create a configured sender instance.
|
||||
|
||||
@@ -300,11 +302,13 @@ def create_sender(
|
||||
baud: Baud rate
|
||||
mock: Use mock device
|
||||
node_address: Optional override for device public key/address
|
||||
device_name: Optional device/node name (not used in SENDER mode)
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
|
||||
Returns:
|
||||
Configured Sender instance
|
||||
@@ -325,6 +329,7 @@ def create_sender(
|
||||
password=mqtt_password,
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-sender-{device.public_key[:12] if device.public_key else 'unknown'}",
|
||||
tls=mqtt_tls,
|
||||
)
|
||||
mqtt_client = MQTTClient(mqtt_config)
|
||||
|
||||
@@ -336,11 +341,13 @@ def run_sender(
|
||||
baud: int = 115200,
|
||||
mock: bool = False,
|
||||
node_address: Optional[str] = None,
|
||||
device_name: Optional[str] = None,
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_username: Optional[str] = None,
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
) -> None:
|
||||
"""Run the sender (blocking).
|
||||
|
||||
@@ -351,22 +358,26 @@ def run_sender(
|
||||
baud: Baud rate
|
||||
mock: Use mock device
|
||||
node_address: Optional override for device public key/address
|
||||
device_name: Optional device/node name (not used in SENDER mode)
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
"""
|
||||
sender = create_sender(
|
||||
port=port,
|
||||
baud=baud,
|
||||
mock=mock,
|
||||
node_address=node_address,
|
||||
device_name=device_name,
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
)
|
||||
|
||||
# Set up signal handlers
|
||||
|
||||
@@ -49,8 +49,8 @@
|
||||
<tr class="hover">
|
||||
<td>
|
||||
<a href="/nodes/{{ ad.public_key }}" class="link link-hover">
|
||||
{% if ad.node_friendly_name or ad.node_name or ad.name %}
|
||||
<div class="font-medium">{{ ad.node_friendly_name or ad.node_name or ad.name }}</div>
|
||||
{% if ad.node_tag_name or ad.node_name or ad.name %}
|
||||
<div class="font-medium">{{ ad.node_tag_name or ad.node_name or ad.name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ ad.public_key[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-sm">{{ ad.public_key[:16] }}...</span>
|
||||
@@ -80,7 +80,7 @@
|
||||
{% for recv in ad.receivers %}
|
||||
<li>
|
||||
<a href="/nodes/{{ recv.public_key }}" class="text-sm">
|
||||
{{ recv.friendly_name or recv.name or recv.public_key[:12] + '...' }}
|
||||
{{ recv.tag_name or recv.name or recv.public_key[:12] + '...' }}
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
@@ -88,8 +88,8 @@
|
||||
</div>
|
||||
{% elif ad.receivers and ad.receivers|length == 1 %}
|
||||
<a href="/nodes/{{ ad.receivers[0].public_key }}" class="link link-hover">
|
||||
{% if ad.receivers[0].friendly_name or ad.receivers[0].name %}
|
||||
<div class="font-medium">{{ ad.receivers[0].friendly_name or ad.receivers[0].name }}</div>
|
||||
{% if ad.receivers[0].tag_name or ad.receivers[0].name %}
|
||||
<div class="font-medium">{{ ad.receivers[0].tag_name or ad.receivers[0].name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ ad.receivers[0].public_key[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-sm">{{ ad.receivers[0].public_key[:16] }}...</span>
|
||||
@@ -97,8 +97,8 @@
|
||||
</a>
|
||||
{% elif ad.received_by %}
|
||||
<a href="/nodes/{{ ad.received_by }}" class="link link-hover">
|
||||
{% if ad.receiver_friendly_name or ad.receiver_name %}
|
||||
<div class="font-medium">{{ ad.receiver_friendly_name or ad.receiver_name }}</div>
|
||||
{% if ad.receiver_tag_name or ad.receiver_name %}
|
||||
<div class="font-medium">{{ ad.receiver_tag_name or ad.receiver_name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ ad.received_by[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-sm">{{ ad.received_by[:16] }}...</span>
|
||||
|
||||
@@ -83,7 +83,7 @@
|
||||
</ul>
|
||||
</div>
|
||||
<div class="navbar-end">
|
||||
<div class="badge badge-outline badge-sm">v{{ version }}</div>
|
||||
<div class="badge badge-outline badge-sm">{{ version }}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -114,7 +114,7 @@
|
||||
<a href="{{ network_contact_github }}" target="_blank" rel="noopener noreferrer" class="link link-hover">GitHub</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
<p class="text-xs opacity-50 mt-2">Powered by <a href="https://github.com/ipnet-mesh/meshcore-hub" target="_blank" rel="noopener noreferrer" class="link link-hover">MeshCore Hub</a> v{{ version }}</p>
|
||||
<p class="text-xs opacity-50 mt-2">Powered by <a href="https://github.com/ipnet-mesh/meshcore-hub" target="_blank" rel="noopener noreferrer" class="link link-hover">MeshCore Hub</a> {{ version }}</p>
|
||||
</aside>
|
||||
</footer>
|
||||
|
||||
|
||||
@@ -78,8 +78,8 @@
|
||||
{% if msg.message_type == 'channel' %}
|
||||
<span class="font-mono">CH{{ msg.channel_idx }}</span>
|
||||
{% else %}
|
||||
{% if msg.sender_friendly_name or msg.sender_name %}
|
||||
<span class="font-medium">{{ msg.sender_friendly_name or msg.sender_name }}</span>
|
||||
{% if msg.sender_tag_name or msg.sender_name %}
|
||||
<span class="font-medium">{{ msg.sender_tag_name or msg.sender_name }}</span>
|
||||
{% else %}
|
||||
<span class="font-mono text-xs">{{ (msg.pubkey_prefix or '-')[:12] }}</span>
|
||||
{% endif %}
|
||||
@@ -96,7 +96,7 @@
|
||||
{% for recv in msg.receivers %}
|
||||
<li>
|
||||
<a href="/nodes/{{ recv.public_key }}" class="text-sm">
|
||||
<span class="flex-1">{{ recv.friendly_name or recv.name or recv.public_key[:12] + '...' }}</span>
|
||||
<span class="flex-1">{{ recv.tag_name or recv.name or recv.public_key[:12] + '...' }}</span>
|
||||
{% if recv.snr is not none %}
|
||||
<span class="badge badge-ghost badge-xs">{{ "%.1f"|format(recv.snr) }}</span>
|
||||
{% endif %}
|
||||
@@ -107,8 +107,8 @@
|
||||
</div>
|
||||
{% elif msg.receivers and msg.receivers|length == 1 %}
|
||||
<a href="/nodes/{{ msg.receivers[0].public_key }}" class="link link-hover">
|
||||
{% if msg.receivers[0].friendly_name or msg.receivers[0].name %}
|
||||
<div class="font-medium">{{ msg.receivers[0].friendly_name or msg.receivers[0].name }}</div>
|
||||
{% if msg.receivers[0].tag_name or msg.receivers[0].name %}
|
||||
<div class="font-medium">{{ msg.receivers[0].tag_name or msg.receivers[0].name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ msg.receivers[0].public_key[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-sm">{{ msg.receivers[0].public_key[:16] }}...</span>
|
||||
@@ -116,8 +116,8 @@
|
||||
</a>
|
||||
{% elif msg.received_by %}
|
||||
<a href="/nodes/{{ msg.received_by }}" class="link link-hover">
|
||||
{% if msg.receiver_friendly_name or msg.receiver_name %}
|
||||
<div class="font-medium">{{ msg.receiver_friendly_name or msg.receiver_name }}</div>
|
||||
{% if msg.receiver_tag_name or msg.receiver_name %}
|
||||
<div class="font-medium">{{ msg.receiver_tag_name or msg.receiver_name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ msg.received_by[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-sm">{{ msg.received_by[:16] }}...</span>
|
||||
|
||||
@@ -8,13 +8,13 @@
|
||||
<li><a href="/">Home</a></li>
|
||||
<li><a href="/nodes">Nodes</a></li>
|
||||
{% if node %}
|
||||
{% set ns = namespace(friendly_name=none) %}
|
||||
{% set ns = namespace(tag_name=none) %}
|
||||
{% for tag in node.tags or [] %}
|
||||
{% if tag.key == 'friendly_name' %}
|
||||
{% set ns.friendly_name = tag.value %}
|
||||
{% if tag.key == 'name' %}
|
||||
{% set ns.tag_name = tag.value %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<li>{{ ns.friendly_name or node.name or public_key[:12] + '...' }}</li>
|
||||
<li>{{ ns.tag_name or node.name or public_key[:12] + '...' }}</li>
|
||||
{% else %}
|
||||
<li>Not Found</li>
|
||||
{% endif %}
|
||||
@@ -31,17 +31,17 @@
|
||||
{% endif %}
|
||||
|
||||
{% if node %}
|
||||
{% set ns = namespace(friendly_name=none) %}
|
||||
{% set ns = namespace(tag_name=none) %}
|
||||
{% for tag in node.tags or [] %}
|
||||
{% if tag.key == 'friendly_name' %}
|
||||
{% set ns.friendly_name = tag.value %}
|
||||
{% if tag.key == 'name' %}
|
||||
{% set ns.tag_name = tag.value %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<!-- Node Info Card -->
|
||||
<div class="card bg-base-100 shadow-xl mb-6">
|
||||
<div class="card-body">
|
||||
<h1 class="card-title text-2xl">
|
||||
{{ ns.friendly_name or node.name or 'Unnamed Node' }}
|
||||
{{ ns.tag_name or node.name or 'Unnamed Node' }}
|
||||
{% if node.adv_type %}
|
||||
<span class="badge badge-secondary">{{ node.adv_type }}</span>
|
||||
{% endif %}
|
||||
@@ -125,8 +125,8 @@
|
||||
<td>
|
||||
{% if adv.received_by %}
|
||||
<a href="/nodes/{{ adv.received_by }}" class="link link-hover">
|
||||
{% if adv.receiver_friendly_name or adv.receiver_name %}
|
||||
<div class="font-medium text-sm">{{ adv.receiver_friendly_name or adv.receiver_name }}</div>
|
||||
{% if adv.receiver_tag_name or adv.receiver_name %}
|
||||
<div class="font-medium text-sm">{{ adv.receiver_tag_name or adv.receiver_name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ adv.received_by[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-xs">{{ adv.received_by[:16] }}...</span>
|
||||
@@ -175,8 +175,8 @@
|
||||
<td>
|
||||
{% if tel.received_by %}
|
||||
<a href="/nodes/{{ tel.received_by }}" class="link link-hover">
|
||||
{% if tel.receiver_friendly_name or tel.receiver_name %}
|
||||
<div class="font-medium text-sm">{{ tel.receiver_friendly_name or tel.receiver_name }}</div>
|
||||
{% if tel.receiver_tag_name or tel.receiver_name %}
|
||||
<div class="font-medium text-sm">{{ tel.receiver_tag_name or tel.receiver_name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ tel.received_by[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-xs">{{ tel.received_by[:16] }}...</span>
|
||||
|
||||
@@ -57,17 +57,17 @@
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for node in nodes %}
|
||||
{% set ns = namespace(friendly_name=none) %}
|
||||
{% set ns = namespace(tag_name=none) %}
|
||||
{% for tag in node.tags or [] %}
|
||||
{% if tag.key == 'friendly_name' %}
|
||||
{% set ns.friendly_name = tag.value %}
|
||||
{% if tag.key == 'name' %}
|
||||
{% set ns.tag_name = tag.value %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
<tr class="hover">
|
||||
<td>
|
||||
<a href="/nodes/{{ node.public_key }}" class="link link-hover">
|
||||
{% if ns.friendly_name or node.name %}
|
||||
<div class="font-medium">{{ ns.friendly_name or node.name }}</div>
|
||||
{% if ns.tag_name or node.name %}
|
||||
<div class="font-medium">{{ ns.tag_name or node.name }}</div>
|
||||
<div class="text-xs font-mono opacity-70">{{ node.public_key[:16] }}...</div>
|
||||
{% else %}
|
||||
<span class="font-mono text-sm">{{ node.public_key[:16] }}...</span>
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
"""Fixtures for collector component tests."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker
|
||||
|
||||
from meshcore_hub.common.database import DatabaseManager
|
||||
from meshcore_hub.common.models.base import Base
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -20,3 +23,29 @@ def db_session(db_manager):
|
||||
session = db_manager.get_session()
|
||||
yield session
|
||||
session.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def async_db_session():
|
||||
"""Create an async database session for testing.
|
||||
|
||||
Uses a separate in-memory database with tables created inline.
|
||||
"""
|
||||
# Create async engine with in-memory database
|
||||
engine = create_async_engine("sqlite+aiosqlite:///:memory:")
|
||||
|
||||
# Create tables
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
# Create session factory
|
||||
async_session_maker = async_sessionmaker(
|
||||
engine, class_=AsyncSession, expire_on_commit=False
|
||||
)
|
||||
|
||||
# Provide session
|
||||
async with async_session_maker() as session:
|
||||
yield session
|
||||
|
||||
# Cleanup
|
||||
await engine.dispose()
|
||||
|
||||
@@ -0,0 +1,250 @@
|
||||
"""Tests for data cleanup functionality."""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from meshcore_hub.collector.cleanup import cleanup_old_data, CleanupStats
|
||||
from meshcore_hub.common.models import (
|
||||
Advertisement,
|
||||
EventLog,
|
||||
Message,
|
||||
Node,
|
||||
Telemetry,
|
||||
TracePath,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_old_data_dry_run(async_db_session: AsyncSession) -> None:
|
||||
"""Test cleanup in dry-run mode."""
|
||||
# Create test node
|
||||
node = Node(
|
||||
public_key="a" * 64,
|
||||
name="Test Node",
|
||||
)
|
||||
async_db_session.add(node)
|
||||
await async_db_session.flush()
|
||||
|
||||
# Create old advertisement (60 days ago)
|
||||
old_date = datetime.now(timezone.utc) - timedelta(days=60)
|
||||
old_adv = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=old_date,
|
||||
updated_at=old_date,
|
||||
)
|
||||
async_db_session.add(old_adv)
|
||||
|
||||
# Create recent advertisement (10 days ago)
|
||||
recent_date = datetime.now(timezone.utc) - timedelta(days=10)
|
||||
recent_adv = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=recent_date,
|
||||
updated_at=recent_date,
|
||||
)
|
||||
async_db_session.add(recent_adv)
|
||||
|
||||
await async_db_session.commit()
|
||||
|
||||
# Run cleanup in dry-run mode with 30-day retention
|
||||
stats = await cleanup_old_data(async_db_session, retention_days=30, dry_run=True)
|
||||
|
||||
# Should report 1 advertisement would be deleted
|
||||
assert stats.advertisements_deleted == 1
|
||||
assert stats.total_deleted == 1
|
||||
|
||||
# Verify no data was actually deleted
|
||||
await async_db_session.rollback() # Refresh from DB
|
||||
from sqlalchemy import select, func
|
||||
|
||||
count = await async_db_session.scalar(
|
||||
select(func.count()).select_from(Advertisement)
|
||||
)
|
||||
assert count == 2 # Both still exist
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_old_data_live(async_db_session: AsyncSession) -> None:
|
||||
"""Test cleanup in live mode."""
|
||||
# Create test node
|
||||
node = Node(
|
||||
public_key="b" * 64,
|
||||
name="Test Node",
|
||||
)
|
||||
async_db_session.add(node)
|
||||
await async_db_session.flush()
|
||||
|
||||
# Create old records (60 days ago)
|
||||
old_date = datetime.now(timezone.utc) - timedelta(days=60)
|
||||
|
||||
old_adv = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=old_date,
|
||||
updated_at=old_date,
|
||||
)
|
||||
async_db_session.add(old_adv)
|
||||
|
||||
old_msg = Message(
|
||||
receiver_node_id=node.id,
|
||||
message_type="channel",
|
||||
text="old message",
|
||||
created_at=old_date,
|
||||
updated_at=old_date,
|
||||
)
|
||||
async_db_session.add(old_msg)
|
||||
|
||||
old_telemetry = Telemetry(
|
||||
receiver_node_id=node.id,
|
||||
node_id=node.id,
|
||||
node_public_key=node.public_key,
|
||||
created_at=old_date,
|
||||
updated_at=old_date,
|
||||
)
|
||||
async_db_session.add(old_telemetry)
|
||||
|
||||
old_trace = TracePath(
|
||||
receiver_node_id=node.id,
|
||||
initiator_tag="test",
|
||||
created_at=old_date,
|
||||
updated_at=old_date,
|
||||
)
|
||||
async_db_session.add(old_trace)
|
||||
|
||||
old_event = EventLog(
|
||||
receiver_node_id=node.id,
|
||||
event_type="test_event",
|
||||
created_at=old_date,
|
||||
updated_at=old_date,
|
||||
)
|
||||
async_db_session.add(old_event)
|
||||
|
||||
# Create recent records (10 days ago)
|
||||
recent_date = datetime.now(timezone.utc) - timedelta(days=10)
|
||||
|
||||
recent_adv = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=recent_date,
|
||||
updated_at=recent_date,
|
||||
)
|
||||
async_db_session.add(recent_adv)
|
||||
|
||||
await async_db_session.commit()
|
||||
|
||||
# Run cleanup with 30-day retention
|
||||
stats = await cleanup_old_data(async_db_session, retention_days=30, dry_run=False)
|
||||
|
||||
# Verify statistics
|
||||
assert stats.advertisements_deleted == 1
|
||||
assert stats.messages_deleted == 1
|
||||
assert stats.telemetry_deleted == 1
|
||||
assert stats.trace_paths_deleted == 1
|
||||
assert stats.event_logs_deleted == 1
|
||||
assert stats.total_deleted == 5
|
||||
|
||||
# Verify old data was deleted
|
||||
from sqlalchemy import select, func
|
||||
|
||||
adv_count = await async_db_session.scalar(
|
||||
select(func.count()).select_from(Advertisement)
|
||||
)
|
||||
assert adv_count == 1 # Only recent one remains
|
||||
|
||||
msg_count = await async_db_session.scalar(select(func.count()).select_from(Message))
|
||||
assert msg_count == 0 # Old one deleted
|
||||
|
||||
# Verify node still exists
|
||||
from sqlalchemy import select
|
||||
|
||||
node_result = await async_db_session.scalar(select(Node).where(Node.id == node.id))
|
||||
assert node_result is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_respects_retention_period(
|
||||
async_db_session: AsyncSession,
|
||||
) -> None:
|
||||
"""Test that cleanup respects the retention period."""
|
||||
# Create test node
|
||||
node = Node(
|
||||
public_key="d" * 64,
|
||||
name="Test Node",
|
||||
)
|
||||
async_db_session.add(node)
|
||||
await async_db_session.flush()
|
||||
|
||||
# Create advertisements at different ages
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# 90 days old - should be deleted with 30-day retention
|
||||
very_old = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=now - timedelta(days=90),
|
||||
updated_at=now - timedelta(days=90),
|
||||
)
|
||||
async_db_session.add(very_old)
|
||||
|
||||
# 40 days old - should be deleted with 30-day retention
|
||||
old = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=now - timedelta(days=40),
|
||||
updated_at=now - timedelta(days=40),
|
||||
)
|
||||
async_db_session.add(old)
|
||||
|
||||
# 20 days old - should be kept
|
||||
recent = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=now - timedelta(days=20),
|
||||
updated_at=now - timedelta(days=20),
|
||||
)
|
||||
async_db_session.add(recent)
|
||||
|
||||
# 5 days old - should be kept
|
||||
very_recent = Advertisement(
|
||||
node_id=node.id,
|
||||
public_key=node.public_key,
|
||||
created_at=now - timedelta(days=5),
|
||||
updated_at=now - timedelta(days=5),
|
||||
)
|
||||
async_db_session.add(very_recent)
|
||||
|
||||
await async_db_session.commit()
|
||||
|
||||
# Run cleanup with 30-day retention
|
||||
stats = await cleanup_old_data(async_db_session, retention_days=30, dry_run=False)
|
||||
|
||||
# Should delete the 2 old ones, keep the 2 recent ones
|
||||
assert stats.advertisements_deleted == 2
|
||||
assert stats.total_deleted == 2
|
||||
|
||||
# Verify count
|
||||
from sqlalchemy import select, func
|
||||
|
||||
adv_count = await async_db_session.scalar(
|
||||
select(func.count()).select_from(Advertisement)
|
||||
)
|
||||
assert adv_count == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_stats_repr() -> None:
|
||||
"""Test CleanupStats string representation."""
|
||||
stats = CleanupStats()
|
||||
stats.advertisements_deleted = 10
|
||||
stats.messages_deleted = 5
|
||||
stats.telemetry_deleted = 3
|
||||
stats.trace_paths_deleted = 2
|
||||
stats.event_logs_deleted = 1
|
||||
stats.total_deleted = 21
|
||||
|
||||
repr_str = repr(stats)
|
||||
assert "total=21" in repr_str
|
||||
assert "advertisements=10" in repr_str
|
||||
assert "messages=5" in repr_str
|
||||
@@ -390,3 +390,64 @@ class TestImportTags:
|
||||
assert tag_dict["is_disabled"].value_type == "boolean"
|
||||
|
||||
Path(f.name).unlink()
|
||||
|
||||
def test_import_with_clear_existing(self, db_manager):
|
||||
"""Test that clear_existing deletes all tags before importing."""
|
||||
# Create initial tags
|
||||
initial_data = {
|
||||
"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef": {
|
||||
"old_tag": "old_value",
|
||||
"shared_tag": "old_value",
|
||||
},
|
||||
"1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef": {
|
||||
"another_old_tag": "value",
|
||||
},
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f:
|
||||
yaml.dump(initial_data, f)
|
||||
f.flush()
|
||||
initial_file = f.name
|
||||
|
||||
stats1 = import_tags(initial_file, db_manager, create_nodes=True)
|
||||
assert stats1["created"] == 3
|
||||
assert stats1["deleted"] == 0
|
||||
|
||||
# Verify initial tags exist
|
||||
with db_manager.session_scope() as session:
|
||||
tags = session.execute(select(NodeTag)).scalars().all()
|
||||
assert len(tags) == 3
|
||||
|
||||
# Import new tags with clear_existing=True
|
||||
new_data = {
|
||||
"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef": {
|
||||
"new_tag": "new_value",
|
||||
"shared_tag": "new_value",
|
||||
}
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f:
|
||||
yaml.dump(new_data, f)
|
||||
f.flush()
|
||||
new_file = f.name
|
||||
|
||||
stats2 = import_tags(
|
||||
new_file, db_manager, create_nodes=True, clear_existing=True
|
||||
)
|
||||
assert stats2["deleted"] == 3 # All 3 old tags deleted
|
||||
assert stats2["created"] == 2 # 2 new tags created
|
||||
assert stats2["updated"] == 0 # No updates when clearing
|
||||
|
||||
# Verify only new tags exist
|
||||
with db_manager.session_scope() as session:
|
||||
tags = session.execute(select(NodeTag)).scalars().all()
|
||||
tag_dict = {t.key: t for t in tags}
|
||||
assert len(tags) == 2
|
||||
assert "new_tag" in tag_dict
|
||||
assert "shared_tag" in tag_dict
|
||||
assert tag_dict["shared_tag"].value == "new_value"
|
||||
assert "old_tag" not in tag_dict
|
||||
assert "another_old_tag" not in tag_dict
|
||||
|
||||
Path(initial_file).unlink()
|
||||
Path(new_file).unlink()
|
||||
|
||||
@@ -62,6 +62,56 @@ class TestReceiver:
|
||||
# Verify MQTT publish was called
|
||||
mock_mqtt_client.publish_event.assert_called()
|
||||
|
||||
def test_receiver_syncs_contacts_on_advertisement(
|
||||
self, receiver, mock_device, mock_mqtt_client
|
||||
):
|
||||
"""Test that receiver syncs contacts when advertisement is received."""
|
||||
import time
|
||||
from unittest.mock import patch
|
||||
|
||||
receiver.start()
|
||||
|
||||
# Patch schedule_get_contacts to track calls
|
||||
with patch.object(
|
||||
mock_device, "schedule_get_contacts", return_value=True
|
||||
) as mock_get:
|
||||
# Inject an advertisement event
|
||||
mock_device.inject_event(
|
||||
EventType.ADVERTISEMENT,
|
||||
{"pubkey_prefix": "b" * 64, "adv_name": "TestNode", "type": 1},
|
||||
)
|
||||
|
||||
# Allow time for event processing
|
||||
time.sleep(0.1)
|
||||
|
||||
# Verify schedule_get_contacts was called
|
||||
mock_get.assert_called()
|
||||
|
||||
def test_receiver_handles_contact_sync_failure(
|
||||
self, receiver, mock_device, mock_mqtt_client
|
||||
):
|
||||
"""Test that receiver handles contact sync failures gracefully."""
|
||||
import time
|
||||
from unittest.mock import patch
|
||||
|
||||
receiver.start()
|
||||
|
||||
# Patch schedule_get_contacts to return False (failure)
|
||||
with patch.object(
|
||||
mock_device, "schedule_get_contacts", return_value=False
|
||||
) as mock_get:
|
||||
# Should not raise exception even if sync fails
|
||||
mock_device.inject_event(
|
||||
EventType.ADVERTISEMENT,
|
||||
{"pubkey_prefix": "c" * 64, "adv_name": "FailNode", "type": 1},
|
||||
)
|
||||
|
||||
# Allow time for event processing
|
||||
time.sleep(0.1)
|
||||
|
||||
# Verify it was attempted
|
||||
mock_get.assert_called()
|
||||
|
||||
|
||||
class TestCreateReceiver:
|
||||
"""Tests for create_receiver factory function."""
|
||||
|
||||
Reference in New Issue
Block a user