mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-05-11 20:06:13 +02:00
Compare commits
22 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3b7e2737ee | |||
| 01158ac69f | |||
| 485df05372 | |||
| e5e9eab935 | |||
| 33b2d3c260 | |||
| eccbd0bac5 | |||
| 4f54ec2c93 | |||
| eed38337c8 | |||
| e1ee7fcd24 | |||
| 2756b1ae8d | |||
| ef1d6a5a1a | |||
| 14f42c59fe | |||
| b9414e84ee | |||
| 95a17ca8ee | |||
| e6cedfbd0b | |||
| c3d0af1473 | |||
| c24e291017 | |||
| d2d009ae79 | |||
| d09166df84 | |||
| f2762ab495 | |||
| a411562ca7 | |||
| cde4d1744e |
@@ -30,3 +30,6 @@ references/
|
|||||||
docker-compose.yml
|
docker-compose.yml
|
||||||
docker-compose.yaml
|
docker-compose.yaml
|
||||||
.docker-certs/
|
.docker-certs/
|
||||||
|
|
||||||
|
# HA test environment (created by scripts/setup/start_ha_test_env.sh)
|
||||||
|
ha_test_config/
|
||||||
|
|||||||
@@ -1,3 +1,21 @@
|
|||||||
|
## [3.11.3] - 2026-04-12
|
||||||
|
|
||||||
|
* Bugfix: Add icons and screenshots for webmanifest
|
||||||
|
* Bugfix: Use incoming DMs, not just outgoing, for recency ranking for preferential radio contact load
|
||||||
|
|
||||||
|
## [3.11.2] - 2026-04-12
|
||||||
|
|
||||||
|
* Feature: Unread DMs are always at the top of the DM list no matter what
|
||||||
|
* Bugfix: Webmanifest needs withCredentials
|
||||||
|
|
||||||
|
## [3.11.1] - 2026-04-12
|
||||||
|
|
||||||
|
* Feature: Home Assistant MQTT fanout
|
||||||
|
* Feature: Add dummy service worker to enable PWA
|
||||||
|
* Bugfix: DB connection plurality issues
|
||||||
|
* Misc: Migration improvements
|
||||||
|
* Misc: Search keys from beginning
|
||||||
|
|
||||||
## [3.11.0] - 2026-04-10
|
## [3.11.0] - 2026-04-10
|
||||||
|
|
||||||
* Feature: Radio health and contact data accessible on fanout bus
|
* Feature: Radio health and contact data accessible on fanout bus
|
||||||
|
|||||||
+305
@@ -0,0 +1,305 @@
|
|||||||
|
# Home Assistant Integration
|
||||||
|
|
||||||
|
RemoteTerm can publish mesh network data to Home Assistant via MQTT Discovery. Devices and entities appear automatically in HA -- no custom component or HACS install needed.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Home Assistant with the [MQTT integration](https://www.home-assistant.io/integrations/mqtt/) configured
|
||||||
|
- An MQTT broker (e.g. Mosquitto) accessible to both HA and RemoteTerm
|
||||||
|
- RemoteTerm running and connected to a radio
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. In RemoteTerm, go to **Settings > Integrations > Add > Home Assistant MQTT Discovery**
|
||||||
|
2. Enter your MQTT broker host and port (same broker HA is connected to)
|
||||||
|
3. Optionally enter broker username/password and TLS settings
|
||||||
|
4. Select contacts for GPS tracking and repeaters for telemetry (see below)
|
||||||
|
5. Configure which messages should fire events (scope selector at the bottom)
|
||||||
|
6. Save and enable
|
||||||
|
|
||||||
|
Devices will appear in HA under **Settings > Devices & Services > MQTT** within a few seconds.
|
||||||
|
|
||||||
|
## What Gets Created
|
||||||
|
|
||||||
|
### Local Radio Device
|
||||||
|
|
||||||
|
Always created. Updates every 60 seconds.
|
||||||
|
|
||||||
|
| Entity | Type | Description |
|
||||||
|
|--------|------|-------------|
|
||||||
|
| `binary_sensor.meshcore_*_connected` | Connectivity | Radio online/offline |
|
||||||
|
| `sensor.meshcore_*_noise_floor` | Signal strength | Radio noise floor (dBm) |
|
||||||
|
|
||||||
|
### Repeater Devices
|
||||||
|
|
||||||
|
One device per tracked repeater (must have repeater opted). Updates when telemetry is collected (auto-collect cycle (~8 hours), or when you manually fetch from the repeater dashboard).
|
||||||
|
|
||||||
|
Repeaters must first be added to the auto-telemetry tracking list in RemoteTerm's Radio settings section. Only auto-tracked repeaters appear in the HA integration's repeater picker.
|
||||||
|
|
||||||
|
| Entity | Type | Unit | Description |
|
||||||
|
|--------|------|------|-------------|
|
||||||
|
| `sensor.meshcore_*_battery_voltage` | Voltage | V | Battery level |
|
||||||
|
| `sensor.meshcore_*_noise_floor` | Signal strength | dBm | Local noise floor |
|
||||||
|
| `sensor.meshcore_*_last_rssi` | Signal strength | dBm | Last received signal strength |
|
||||||
|
| `sensor.meshcore_*_last_snr` | -- | dB | Last signal-to-noise ratio |
|
||||||
|
| `sensor.meshcore_*_packets_received` | -- | count | Total packets received |
|
||||||
|
| `sensor.meshcore_*_packets_sent` | -- | count | Total packets sent |
|
||||||
|
| `sensor.meshcore_*_uptime` | Duration | s | Uptime since last reboot |
|
||||||
|
|
||||||
|
### Contact Device Trackers
|
||||||
|
|
||||||
|
One `device_tracker` per tracked contact. Updates passively whenever RemoteTerm hears an advertisement with GPS coordinates from that contact. No radio commands are sent -- it piggybacks on normal mesh traffic.
|
||||||
|
|
||||||
|
| Entity | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `device_tracker.meshcore_*` | GPS position (latitude/longitude) |
|
||||||
|
|
||||||
|
### Message Event Entity
|
||||||
|
|
||||||
|
A single `event.meshcore_messages` entity that fires for each message matching your configured scope. Each event carries these attributes:
|
||||||
|
|
||||||
|
| Attribute | Example | Description |
|
||||||
|
|-----------|---------|-------------|
|
||||||
|
| `event_type` | `message_received` | Always `message_received` |
|
||||||
|
| `sender_name` | `Alice` | Display name of the sender |
|
||||||
|
| `sender_key` | `aabbccdd...` | Sender's public key |
|
||||||
|
| `text` | `hello` | Message body |
|
||||||
|
| `message_type` | `PRIV` or `CHAN` | Direct message or channel |
|
||||||
|
| `channel_name` | `#general` | Channel name (null for DMs) |
|
||||||
|
| `conversation_key` | `aabbccdd...` | Contact key (DM) or channel key |
|
||||||
|
| `outgoing` | `false` | Whether you sent this message |
|
||||||
|
|
||||||
|
## Entity Naming
|
||||||
|
|
||||||
|
Entity IDs use the first 12 characters of the node's public key as an identifier. For example, a contact with public key `ae92577bae6c...` gets entity ID `device_tracker.meshcore_ae92577bae6c`. You can rename entities in HA's UI without affecting the integration.
|
||||||
|
|
||||||
|
## Common Automations
|
||||||
|
|
||||||
|
### Low repeater battery alert
|
||||||
|
|
||||||
|
Notify when a tracked repeater's battery drops below a threshold.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > Numeric state trigger on `sensor.meshcore_*_battery_voltage`, below `3.8`, action: notification.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Repeater battery low"
|
||||||
|
trigger:
|
||||||
|
- platform: numeric_state
|
||||||
|
entity_id: sensor.meshcore_aabbccddeeff_battery_voltage
|
||||||
|
below: 3.8
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Repeater Battery Low"
|
||||||
|
message: >-
|
||||||
|
{{ state_attr('sensor.meshcore_aabbccddeeff_battery_voltage', 'friendly_name') }}
|
||||||
|
is at {{ states('sensor.meshcore_aabbccddeeff_battery_voltage') }}V
|
||||||
|
```
|
||||||
|
|
||||||
|
### Radio offline alert
|
||||||
|
|
||||||
|
Notify if the radio has been disconnected for more than 5 minutes.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > State trigger on `binary_sensor.meshcore_*_connected`, to `off`, for `00:05:00`, action: notification.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Radio offline"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: binary_sensor.meshcore_aabbccddeeff_connected
|
||||||
|
to: "off"
|
||||||
|
for: "00:05:00"
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "MeshCore Radio Offline"
|
||||||
|
message: "Radio has been disconnected for 5 minutes"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert on any message from a specific room
|
||||||
|
|
||||||
|
Trigger when a message arrives in a specific channel. Two approaches:
|
||||||
|
|
||||||
|
#### Option A: Scope filtering (fully GUI, no template)
|
||||||
|
|
||||||
|
If you only care about one room, configure the HA integration's message scope to "Only listed channels" and select that room. Then every event fire is from that room.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > State trigger on `event.meshcore_messages`, action: notification.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Emergency channel alert"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_messages
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Message in #emergency"
|
||||||
|
message: >-
|
||||||
|
{{ trigger.to_state.attributes.sender_name }}:
|
||||||
|
{{ trigger.to_state.attributes.text }}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Option B: Template condition (multiple rooms, one integration)
|
||||||
|
|
||||||
|
Keep scope as "All messages" and filter in the automation. The trigger is GUI, but the condition uses a one-line template.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > State trigger on `event.meshcore_messages` > Add condition > Template > enter the template below.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Emergency channel alert"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_messages
|
||||||
|
condition:
|
||||||
|
- condition: template
|
||||||
|
value_template: >-
|
||||||
|
{{ trigger.to_state.attributes.channel_name == '#emergency' }}
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Message in #emergency"
|
||||||
|
message: >-
|
||||||
|
{{ trigger.to_state.attributes.sender_name }}:
|
||||||
|
{{ trigger.to_state.attributes.text }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert on DM from a specific contact
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "DM from Alice"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_messages
|
||||||
|
condition:
|
||||||
|
- condition: template
|
||||||
|
value_template: >-
|
||||||
|
{{ trigger.to_state.attributes.message_type == 'PRIV'
|
||||||
|
and trigger.to_state.attributes.sender_name == 'Alice' }}
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "DM from Alice"
|
||||||
|
message: "{{ trigger.to_state.attributes.text }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert on messages containing a keyword
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Keyword alert"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_messages
|
||||||
|
condition:
|
||||||
|
- condition: template
|
||||||
|
value_template: >-
|
||||||
|
{{ 'emergency' in trigger.to_state.attributes.text | lower }}
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Emergency keyword detected"
|
||||||
|
message: >-
|
||||||
|
{{ trigger.to_state.attributes.sender_name }} in
|
||||||
|
{{ trigger.to_state.attributes.channel_name or 'DM' }}:
|
||||||
|
{{ trigger.to_state.attributes.text }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Track a contact on the HA map
|
||||||
|
|
||||||
|
No automation needed. Once a contact is selected for GPS tracking, their `device_tracker` entity automatically appears on the HA map. Go to **Settings > Dashboards > Map** (or add a Map card to any dashboard) and the tracked contact will show up when they advertise their GPS position.
|
||||||
|
|
||||||
|
### Dashboard card showing repeater battery
|
||||||
|
|
||||||
|
Add a sensor card to any dashboard:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
type: sensor
|
||||||
|
entity: sensor.meshcore_aabbccddeeff_battery_voltage
|
||||||
|
name: "Hilltop Repeater Battery"
|
||||||
|
```
|
||||||
|
|
||||||
|
Or an entities card for multiple repeaters:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
type: entities
|
||||||
|
title: "Repeater Status"
|
||||||
|
entities:
|
||||||
|
- entity: sensor.meshcore_aabbccddeeff_battery_voltage
|
||||||
|
name: "Hilltop"
|
||||||
|
- entity: sensor.meshcore_ccdd11223344_battery_voltage
|
||||||
|
name: "Valley"
|
||||||
|
- entity: sensor.meshcore_eeff55667788_battery_voltage
|
||||||
|
name: "Ridge"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Devices don't appear in HA
|
||||||
|
|
||||||
|
- Verify the MQTT integration is configured in HA (**Settings > Devices & Services > MQTT**) and shows "Connected"
|
||||||
|
- Verify RemoteTerm's HA integration shows "Connected" (green dot)
|
||||||
|
- Check that both HA and RemoteTerm are using the same MQTT broker
|
||||||
|
- Subscribe to discovery topics to verify messages are flowing:
|
||||||
|
```
|
||||||
|
mosquitto_sub -h <broker> -t 'homeassistant/#' -v
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stale or duplicate devices
|
||||||
|
|
||||||
|
If you see unexpected devices (e.g. a generic "MeshCore Radio" alongside your named radio), clear the stale retained messages:
|
||||||
|
```
|
||||||
|
mosquitto_pub -h <broker> -t 'homeassistant/binary_sensor/meshcore_unknown/connected/config' -r -n
|
||||||
|
mosquitto_pub -h <broker> -t 'homeassistant/sensor/meshcore_unknown/noise_floor/config' -r -n
|
||||||
|
```
|
||||||
|
|
||||||
|
### Repeater sensors show "Unknown" or "Unavailable"
|
||||||
|
|
||||||
|
Repeater telemetry only updates when collected. Trigger a manual fetch by opening the repeater's dashboard in RemoteTerm and clicking "Status", or wait for the next auto-collect cycle (~8 hours). Sensors show "Unknown" until the first telemetry reading arrives.
|
||||||
|
|
||||||
|
### Contact device tracker shows "Unknown"
|
||||||
|
|
||||||
|
The contact's GPS position only updates when RemoteTerm hears an advertisement from that node that includes GPS coordinates. If the contact's device doesn't broadcast GPS or hasn't advertised recently, the tracker will show as unknown.
|
||||||
|
|
||||||
|
### Entity is "Unavailable"
|
||||||
|
|
||||||
|
Radio health entities have a 120-second expiry. If RemoteTerm stops sending health updates (e.g. it's shut down or loses connection to the broker), HA marks the entities as unavailable after 2 minutes. Restart RemoteTerm or check the broker connection.
|
||||||
|
|
||||||
|
## Removing the Integration
|
||||||
|
|
||||||
|
Disabling or deleting the HA integration in RemoteTerm's settings publishes empty retained messages to all discovery topics, which removes the devices and entities from HA automatically.
|
||||||
|
|
||||||
|
## MQTT Topics Reference
|
||||||
|
|
||||||
|
State topics (where data is published):
|
||||||
|
|
||||||
|
| Topic | Content | Update frequency |
|
||||||
|
|-------|---------|-----------------|
|
||||||
|
| `meshcore/{node_id}/health` | `{"connected": bool, "noise_floor_dbm": int}` | Every 60s |
|
||||||
|
| `meshcore/{node_id}/telemetry` | `{"battery_volts": float, ...}` | ~8h or manual |
|
||||||
|
| `meshcore/{node_id}/gps` | `{"latitude": float, "longitude": float, ...}` | On advert |
|
||||||
|
| `meshcore/events/message` | `{"event_type": "message_received", ...}` | On message |
|
||||||
|
|
||||||
|
Discovery topics (entity registration, under `homeassistant/`):
|
||||||
|
|
||||||
|
| Pattern | Entity type |
|
||||||
|
|---------|------------|
|
||||||
|
| `homeassistant/binary_sensor/meshcore_*/connected/config` | Radio connectivity |
|
||||||
|
| `homeassistant/sensor/meshcore_*/noise_floor/config` | Noise floor sensor |
|
||||||
|
| `homeassistant/sensor/meshcore_*/battery_voltage/config` | Repeater battery |
|
||||||
|
| `homeassistant/sensor/meshcore_*/*/config` | Other repeater sensors |
|
||||||
|
| `homeassistant/device_tracker/meshcore_*/config` | Contact GPS tracker |
|
||||||
|
| `homeassistant/event/meshcore_messages/config` | Message event entity |
|
||||||
|
|
||||||
|
The `{node_id}` is always the first 12 characters of the node's public key, lowercased.
|
||||||
@@ -178,6 +178,22 @@ class Database:
|
|||||||
# Persists in the DB file but we set it explicitly on every connection.
|
# Persists in the DB file but we set it explicitly on every connection.
|
||||||
await self._connection.execute("PRAGMA journal_mode = WAL")
|
await self._connection.execute("PRAGMA journal_mode = WAL")
|
||||||
|
|
||||||
|
# synchronous = NORMAL is safe with WAL — only the most recent
|
||||||
|
# transaction can be lost on an OS crash (no corruption risk).
|
||||||
|
# Reduces fsync overhead vs. the default FULL.
|
||||||
|
await self._connection.execute("PRAGMA synchronous = NORMAL")
|
||||||
|
|
||||||
|
# Retry for up to 5s on lock contention instead of failing instantly.
|
||||||
|
# Matters when a second connection (e.g. VACUUM) touches the DB.
|
||||||
|
await self._connection.execute("PRAGMA busy_timeout = 5000")
|
||||||
|
|
||||||
|
# Bump page cache to ~64 MB (negative value = KB). Keeps hot pages
|
||||||
|
# in memory for read-heavy queries (unreads, pagination, search).
|
||||||
|
await self._connection.execute("PRAGMA cache_size = -64000")
|
||||||
|
|
||||||
|
# Keep temp tables and sort spills in memory instead of on disk.
|
||||||
|
await self._connection.execute("PRAGMA temp_store = MEMORY")
|
||||||
|
|
||||||
# Incremental auto-vacuum: freed pages are reclaimable via
|
# Incremental auto-vacuum: freed pages are reclaimable via
|
||||||
# PRAGMA incremental_vacuum without a full VACUUM. Must be set before
|
# PRAGMA incremental_vacuum without a full VACUUM. Must be set before
|
||||||
# the first table is created (for new databases); for existing databases
|
# the first table is created (for new databases); for existing databases
|
||||||
|
|||||||
@@ -31,12 +31,14 @@ def _register_module_types() -> None:
|
|||||||
from app.fanout.bot import BotModule
|
from app.fanout.bot import BotModule
|
||||||
from app.fanout.map_upload import MapUploadModule
|
from app.fanout.map_upload import MapUploadModule
|
||||||
from app.fanout.mqtt_community import MqttCommunityModule
|
from app.fanout.mqtt_community import MqttCommunityModule
|
||||||
|
from app.fanout.mqtt_ha import MqttHaModule
|
||||||
from app.fanout.mqtt_private import MqttPrivateModule
|
from app.fanout.mqtt_private import MqttPrivateModule
|
||||||
from app.fanout.sqs import SqsModule
|
from app.fanout.sqs import SqsModule
|
||||||
from app.fanout.webhook import WebhookModule
|
from app.fanout.webhook import WebhookModule
|
||||||
|
|
||||||
_MODULE_TYPES["mqtt_private"] = MqttPrivateModule
|
_MODULE_TYPES["mqtt_private"] = MqttPrivateModule
|
||||||
_MODULE_TYPES["mqtt_community"] = MqttCommunityModule
|
_MODULE_TYPES["mqtt_community"] = MqttCommunityModule
|
||||||
|
_MODULE_TYPES["mqtt_ha"] = MqttHaModule
|
||||||
_MODULE_TYPES["bot"] = BotModule
|
_MODULE_TYPES["bot"] = BotModule
|
||||||
_MODULE_TYPES["webhook"] = WebhookModule
|
_MODULE_TYPES["webhook"] = WebhookModule
|
||||||
_MODULE_TYPES["apprise"] = AppriseModule
|
_MODULE_TYPES["apprise"] = AppriseModule
|
||||||
|
|||||||
@@ -0,0 +1,757 @@
|
|||||||
|
"""Home Assistant MQTT Discovery fanout module.
|
||||||
|
|
||||||
|
Publishes HA-compatible discovery configs and state updates so that mesh
|
||||||
|
network devices appear natively in Home Assistant via its built-in MQTT
|
||||||
|
integration. No custom HA component is needed.
|
||||||
|
|
||||||
|
Entity types created:
|
||||||
|
- Local radio: binary_sensor (connectivity) + sensors (noise floor, battery,
|
||||||
|
uptime, RSSI, SNR, airtime, packet counts)
|
||||||
|
- Per tracked repeater: sensor entities for telemetry fields
|
||||||
|
- Per tracked contact: device_tracker for GPS position
|
||||||
|
- Messages: event entity for scope-matched messages
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import ssl
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from app.fanout.base import FanoutModule, get_fanout_message_text
|
||||||
|
from app.fanout.mqtt_base import BaseMqttPublisher
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# ── Repeater telemetry sensor definitions ─────────────────────────────────
|
||||||
|
|
||||||
|
_REPEATER_SENSORS: list[dict[str, Any]] = [
|
||||||
|
{
|
||||||
|
"field": "battery_volts",
|
||||||
|
"name": "Battery Voltage",
|
||||||
|
"object_id": "battery_voltage",
|
||||||
|
"device_class": "voltage",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "V",
|
||||||
|
"precision": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "noise_floor_dbm",
|
||||||
|
"name": "Noise Floor",
|
||||||
|
"object_id": "noise_floor",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_rssi_dbm",
|
||||||
|
"name": "Last RSSI",
|
||||||
|
"object_id": "last_rssi",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_snr_db",
|
||||||
|
"name": "Last SNR",
|
||||||
|
"object_id": "last_snr",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dB",
|
||||||
|
"precision": 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_received",
|
||||||
|
"name": "Packets Received",
|
||||||
|
"object_id": "packets_received",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_sent",
|
||||||
|
"name": "Packets Sent",
|
||||||
|
"object_id": "packets_sent",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "uptime_seconds",
|
||||||
|
"name": "Uptime",
|
||||||
|
"object_id": "uptime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": None,
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# ── LPP sensor metadata ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
_LPP_HA_META: dict[str, dict[str, Any]] = {
|
||||||
|
"temperature": {"device_class": "temperature", "unit": "°C", "precision": 1},
|
||||||
|
"humidity": {"device_class": "humidity", "unit": "%", "precision": 1},
|
||||||
|
"barometer": {"device_class": "atmospheric_pressure", "unit": "hPa", "precision": 1},
|
||||||
|
"voltage": {"device_class": "voltage", "unit": "V", "precision": 2},
|
||||||
|
"current": {"device_class": "current", "unit": "mA", "precision": 1},
|
||||||
|
"luminosity": {"device_class": "illuminance", "unit": "lux", "precision": 0},
|
||||||
|
"power": {"device_class": "power", "unit": "W", "precision": 1},
|
||||||
|
"energy": {"device_class": "energy", "unit": "kWh", "precision": 2},
|
||||||
|
"distance": {"device_class": "distance", "unit": "mm", "precision": 0},
|
||||||
|
"concentration": {"device_class": None, "unit": "ppm", "precision": 0},
|
||||||
|
"direction": {"device_class": None, "unit": "°", "precision": 0},
|
||||||
|
"altitude": {"device_class": None, "unit": "m", "precision": 1},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _lpp_sensor_key(type_name: str, channel: int) -> str:
|
||||||
|
"""Build the flat telemetry-payload key for an LPP sensor."""
|
||||||
|
return f"lpp_{type_name}_ch{channel}"
|
||||||
|
|
||||||
|
|
||||||
|
def _lpp_discovery_configs(
|
||||||
|
prefix: str,
|
||||||
|
pub_key: str,
|
||||||
|
device: dict,
|
||||||
|
lpp_sensors: list[dict],
|
||||||
|
state_topic: str,
|
||||||
|
) -> list[tuple[str, dict]]:
|
||||||
|
"""Build HA discovery configs for a repeater's LPP sensors."""
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
for sensor in lpp_sensors:
|
||||||
|
type_name = sensor.get("type_name", "unknown")
|
||||||
|
channel = sensor.get("channel", 0)
|
||||||
|
field = _lpp_sensor_key(type_name, channel)
|
||||||
|
meta = _LPP_HA_META.get(type_name, {})
|
||||||
|
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
object_id = field
|
||||||
|
display = type_name.replace("_", " ").title()
|
||||||
|
name = f"{display} (Ch {channel})"
|
||||||
|
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": name,
|
||||||
|
"unique_id": f"meshcore_{nid}_{object_id}",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ value_json." + field + " }}",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"expire_after": 36000,
|
||||||
|
}
|
||||||
|
if meta.get("device_class"):
|
||||||
|
cfg["device_class"] = meta["device_class"]
|
||||||
|
if meta.get("unit"):
|
||||||
|
cfg["unit_of_measurement"] = meta["unit"]
|
||||||
|
if meta.get("precision") is not None:
|
||||||
|
cfg["suggested_display_precision"] = meta["precision"]
|
||||||
|
|
||||||
|
topic = f"homeassistant/sensor/meshcore_{nid}/{object_id}/config"
|
||||||
|
configs.append((topic, cfg))
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
# ── Local radio sensor definitions ────────────────────────────────────────
|
||||||
|
|
||||||
|
_RADIO_SENSORS: list[dict[str, Any]] = [
|
||||||
|
{
|
||||||
|
"field": "noise_floor_dbm",
|
||||||
|
"name": "Noise Floor",
|
||||||
|
"object_id": "noise_floor",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "battery_volts",
|
||||||
|
"name": "Battery",
|
||||||
|
"object_id": "battery",
|
||||||
|
"device_class": "voltage",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "V",
|
||||||
|
"precision": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "uptime_secs",
|
||||||
|
"name": "Uptime",
|
||||||
|
"object_id": "uptime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": None,
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_rssi",
|
||||||
|
"name": "Last RSSI",
|
||||||
|
"object_id": "last_rssi",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_snr",
|
||||||
|
"name": "Last SNR",
|
||||||
|
"object_id": "last_snr",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dB",
|
||||||
|
"precision": 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "tx_air_secs",
|
||||||
|
"name": "TX Airtime",
|
||||||
|
"object_id": "tx_airtime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "rx_air_secs",
|
||||||
|
"name": "RX Airtime",
|
||||||
|
"object_id": "rx_airtime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_recv",
|
||||||
|
"name": "Packets Received",
|
||||||
|
"object_id": "packets_received",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_sent",
|
||||||
|
"name": "Packets Sent",
|
||||||
|
"object_id": "packets_sent",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _node_id(public_key: str) -> str:
|
||||||
|
"""Derive a stable, MQTT-safe node identifier from a public key."""
|
||||||
|
return public_key[:12].lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _device_payload(
|
||||||
|
public_key: str,
|
||||||
|
name: str,
|
||||||
|
model: str,
|
||||||
|
*,
|
||||||
|
via_device_key: str | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Build an HA device registry fragment."""
|
||||||
|
dev: dict[str, Any] = {
|
||||||
|
"identifiers": [f"meshcore_{_node_id(public_key)}"],
|
||||||
|
"name": name or public_key[:12],
|
||||||
|
"manufacturer": "MeshCore",
|
||||||
|
"model": model,
|
||||||
|
}
|
||||||
|
if via_device_key:
|
||||||
|
dev["via_device"] = f"meshcore_{_node_id(via_device_key)}"
|
||||||
|
return dev
|
||||||
|
|
||||||
|
|
||||||
|
# ── MQTT publisher subclass ───────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
class _HaMqttPublisher(BaseMqttPublisher):
|
||||||
|
"""Thin MQTT lifecycle wrapper for the HA discovery module."""
|
||||||
|
|
||||||
|
_backoff_max = 30
|
||||||
|
_log_prefix = "HA-MQTT"
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._on_connected_callback: Any = None
|
||||||
|
|
||||||
|
def _is_configured(self) -> bool:
|
||||||
|
s = self._settings
|
||||||
|
return bool(s and s.broker_host)
|
||||||
|
|
||||||
|
def _build_client_kwargs(self, settings: object) -> dict[str, Any]:
|
||||||
|
s: Any = settings
|
||||||
|
kw: dict[str, Any] = {
|
||||||
|
"hostname": s.broker_host,
|
||||||
|
"port": s.broker_port,
|
||||||
|
"username": s.username or None,
|
||||||
|
"password": s.password or None,
|
||||||
|
}
|
||||||
|
if s.use_tls:
|
||||||
|
ctx = ssl.create_default_context()
|
||||||
|
if s.tls_insecure:
|
||||||
|
ctx.check_hostname = False
|
||||||
|
ctx.verify_mode = ssl.CERT_NONE
|
||||||
|
kw["tls_context"] = ctx
|
||||||
|
return kw
|
||||||
|
|
||||||
|
def _on_connected(self, settings: object) -> tuple[str, str]:
|
||||||
|
s: Any = settings
|
||||||
|
return ("HA MQTT connected", f"{s.broker_host}:{s.broker_port}")
|
||||||
|
|
||||||
|
def _on_error(self) -> tuple[str, str]:
|
||||||
|
return ("HA MQTT connection failure", "Please correct the settings or disable.")
|
||||||
|
|
||||||
|
async def _on_connected_async(self, settings: object) -> None:
|
||||||
|
if self._on_connected_callback:
|
||||||
|
await self._on_connected_callback()
|
||||||
|
|
||||||
|
|
||||||
|
# ── Discovery config builders ─────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
def _radio_discovery_configs(
|
||||||
|
prefix: str,
|
||||||
|
radio_key: str,
|
||||||
|
radio_name: str,
|
||||||
|
) -> list[tuple[str, dict]]:
|
||||||
|
"""Build HA discovery config payloads for the local radio device."""
|
||||||
|
nid = _node_id(radio_key)
|
||||||
|
device = _device_payload(radio_key, radio_name, "Radio")
|
||||||
|
state_topic = f"{prefix}/{nid}/health"
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
|
||||||
|
# binary_sensor: connected
|
||||||
|
configs.append(
|
||||||
|
(
|
||||||
|
f"homeassistant/binary_sensor/meshcore_{nid}/connected/config",
|
||||||
|
{
|
||||||
|
"name": "Connected",
|
||||||
|
"unique_id": f"meshcore_{nid}_connected",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ 'ON' if value_json.connected else 'OFF' }}",
|
||||||
|
"device_class": "connectivity",
|
||||||
|
"payload_on": "ON",
|
||||||
|
"payload_off": "OFF",
|
||||||
|
"expire_after": 120,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# sensors from _RADIO_SENSORS (noise floor, battery, uptime, RSSI, etc.)
|
||||||
|
for sensor in _RADIO_SENSORS:
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": sensor["name"],
|
||||||
|
"unique_id": f"meshcore_{nid}_{sensor['object_id']}",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ value_json." + sensor["field"] + " }}", # type: ignore[operator]
|
||||||
|
"expire_after": 120,
|
||||||
|
}
|
||||||
|
if sensor["device_class"]:
|
||||||
|
cfg["device_class"] = sensor["device_class"]
|
||||||
|
if sensor["state_class"]:
|
||||||
|
cfg["state_class"] = sensor["state_class"]
|
||||||
|
if sensor["unit"]:
|
||||||
|
cfg["unit_of_measurement"] = sensor["unit"]
|
||||||
|
if sensor.get("precision") is not None:
|
||||||
|
cfg["suggested_display_precision"] = sensor["precision"]
|
||||||
|
|
||||||
|
topic = f"homeassistant/sensor/meshcore_{nid}/{sensor['object_id']}/config"
|
||||||
|
configs.append((topic, cfg))
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
def _repeater_discovery_configs(
|
||||||
|
prefix: str,
|
||||||
|
pub_key: str,
|
||||||
|
name: str,
|
||||||
|
radio_key: str | None,
|
||||||
|
) -> list[tuple[str, dict]]:
|
||||||
|
"""Build HA discovery config payloads for a tracked repeater."""
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
device = _device_payload(pub_key, name, "Repeater", via_device_key=radio_key)
|
||||||
|
state_topic = f"{prefix}/{nid}/telemetry"
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
|
||||||
|
for sensor in _REPEATER_SENSORS:
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": sensor["name"],
|
||||||
|
"unique_id": f"meshcore_{nid}_{sensor['object_id']}",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ value_json." + sensor["field"] + " }}", # type: ignore[operator]
|
||||||
|
}
|
||||||
|
if sensor["device_class"]:
|
||||||
|
cfg["device_class"] = sensor["device_class"]
|
||||||
|
if sensor["state_class"]:
|
||||||
|
cfg["state_class"] = sensor["state_class"]
|
||||||
|
if sensor["unit"]:
|
||||||
|
cfg["unit_of_measurement"] = sensor["unit"]
|
||||||
|
if sensor.get("precision") is not None:
|
||||||
|
cfg["suggested_display_precision"] = sensor["precision"]
|
||||||
|
# 10 hours — margin over the 8-hour auto-collect cycle
|
||||||
|
cfg["expire_after"] = 36000
|
||||||
|
|
||||||
|
topic = f"homeassistant/sensor/meshcore_{nid}/{sensor['object_id']}/config"
|
||||||
|
configs.append((topic, cfg))
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
def _contact_tracker_discovery_config(
|
||||||
|
prefix: str,
|
||||||
|
pub_key: str,
|
||||||
|
name: str,
|
||||||
|
radio_key: str | None,
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""Build HA discovery config for a tracked contact's device_tracker."""
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
device = _device_payload(pub_key, name, "Node", via_device_key=radio_key)
|
||||||
|
topic = f"homeassistant/device_tracker/meshcore_{nid}/config"
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": name or pub_key[:12],
|
||||||
|
"unique_id": f"meshcore_{nid}_tracker",
|
||||||
|
"device": device,
|
||||||
|
"json_attributes_topic": f"{prefix}/{nid}/gps",
|
||||||
|
"source_type": "gps",
|
||||||
|
}
|
||||||
|
return topic, cfg
|
||||||
|
|
||||||
|
|
||||||
|
def _message_event_discovery_config(
|
||||||
|
prefix: str, radio_key: str, radio_name: str
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""Build HA discovery config for the message event entity."""
|
||||||
|
nid = _node_id(radio_key)
|
||||||
|
device = _device_payload(radio_key, radio_name, "Radio")
|
||||||
|
topic = f"homeassistant/event/meshcore_{nid}/messages/config"
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": "MeshCore Messages",
|
||||||
|
"unique_id": f"meshcore_{nid}_messages",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": f"{prefix}/{nid}/events/message",
|
||||||
|
"event_types": ["message_received"],
|
||||||
|
}
|
||||||
|
return topic, cfg
|
||||||
|
|
||||||
|
|
||||||
|
# ── Module class ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
def _config_to_settings(config: dict) -> SimpleNamespace:
|
||||||
|
return SimpleNamespace(
|
||||||
|
broker_host=config.get("broker_host", ""),
|
||||||
|
broker_port=config.get("broker_port", 1883),
|
||||||
|
username=config.get("username", ""),
|
||||||
|
password=config.get("password", ""),
|
||||||
|
use_tls=config.get("use_tls", False),
|
||||||
|
tls_insecure=config.get("tls_insecure", False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MqttHaModule(FanoutModule):
|
||||||
|
"""Home Assistant MQTT Discovery fanout module."""
|
||||||
|
|
||||||
|
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
||||||
|
super().__init__(config_id, config, name=name)
|
||||||
|
self._publisher = _HaMqttPublisher()
|
||||||
|
self._publisher.set_integration_name(name or config_id)
|
||||||
|
self._publisher._on_connected_callback = self._publish_discovery
|
||||||
|
self._discovery_topics: list[str] = []
|
||||||
|
self._radio_key: str | None = None
|
||||||
|
self._radio_name: str | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _prefix(self) -> str:
|
||||||
|
return self.config.get("topic_prefix", "meshcore")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _tracked_contacts(self) -> list[str]:
|
||||||
|
return self.config.get("tracked_contacts") or []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _tracked_repeaters(self) -> list[str]:
|
||||||
|
return self.config.get("tracked_repeaters") or []
|
||||||
|
|
||||||
|
# ── Lifecycle ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
self._seed_radio_identity_from_runtime()
|
||||||
|
settings = _config_to_settings(self.config)
|
||||||
|
await self._publisher.start(settings)
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
await self._remove_discovery()
|
||||||
|
await self._publisher.stop()
|
||||||
|
self._discovery_topics.clear()
|
||||||
|
|
||||||
|
# ── Discovery publishing ──────────────────────────────────────────
|
||||||
|
|
||||||
|
async def _publish_discovery(self) -> None:
|
||||||
|
"""Publish all HA discovery configs with retain=True."""
|
||||||
|
if not self._radio_key:
|
||||||
|
# Don't publish discovery until we know the radio identity —
|
||||||
|
# the first health heartbeat will provide it and trigger this.
|
||||||
|
return
|
||||||
|
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
|
||||||
|
radio_name = self._radio_name or "MeshCore Radio"
|
||||||
|
configs.extend(_radio_discovery_configs(self._prefix, self._radio_key, radio_name))
|
||||||
|
|
||||||
|
# Tracked repeaters — resolve names and LPP sensors from DB best-effort
|
||||||
|
for pub_key in self._tracked_repeaters:
|
||||||
|
rname = await self._resolve_contact_name(pub_key)
|
||||||
|
configs.extend(
|
||||||
|
_repeater_discovery_configs(self._prefix, pub_key, rname, self._radio_key)
|
||||||
|
)
|
||||||
|
# Dynamic LPP sensor entities from last known telemetry snapshot
|
||||||
|
lpp_sensors = await self._resolve_lpp_sensors(pub_key)
|
||||||
|
if lpp_sensors:
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
device = _device_payload(pub_key, rname, "Repeater", via_device_key=self._radio_key)
|
||||||
|
state_topic = f"{self._prefix}/{nid}/telemetry"
|
||||||
|
configs.extend(
|
||||||
|
_lpp_discovery_configs(self._prefix, pub_key, device, lpp_sensors, state_topic)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Tracked contacts — resolve names from DB best-effort
|
||||||
|
for pub_key in self._tracked_contacts:
|
||||||
|
cname = await self._resolve_contact_name(pub_key)
|
||||||
|
configs.append(
|
||||||
|
_contact_tracker_discovery_config(self._prefix, pub_key, cname, self._radio_key)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Message event entity (namespaced to this radio)
|
||||||
|
configs.append(_message_event_discovery_config(self._prefix, self._radio_key, radio_name))
|
||||||
|
|
||||||
|
self._discovery_topics = [topic for topic, _ in configs]
|
||||||
|
|
||||||
|
for topic, payload in configs:
|
||||||
|
await self._publisher.publish(topic, payload, retain=True)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"HA MQTT: published %d discovery configs (%d repeaters, %d contacts)",
|
||||||
|
len(configs),
|
||||||
|
len(self._tracked_repeaters),
|
||||||
|
len(self._tracked_contacts),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _clear_retained_topics(self, topics: list[str]) -> None:
|
||||||
|
"""Publish empty retained payloads to remove entries from broker."""
|
||||||
|
for topic in topics:
|
||||||
|
try:
|
||||||
|
if self._publisher._client:
|
||||||
|
await self._publisher._client.publish(topic, b"", retain=True)
|
||||||
|
except Exception:
|
||||||
|
pass # best-effort cleanup
|
||||||
|
|
||||||
|
async def _remove_discovery(self) -> None:
|
||||||
|
"""Publish empty retained payloads to remove all HA entities."""
|
||||||
|
if not self._publisher.connected or not self._discovery_topics:
|
||||||
|
return
|
||||||
|
await self._clear_retained_topics(self._discovery_topics)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _resolve_contact_name(pub_key: str) -> str:
|
||||||
|
"""Look up a contact's display name, falling back to 12-char prefix."""
|
||||||
|
try:
|
||||||
|
from app.repository.contacts import ContactRepository
|
||||||
|
|
||||||
|
contact = await ContactRepository.get_by_key(pub_key)
|
||||||
|
if contact and contact.name:
|
||||||
|
return contact.name
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return pub_key[:12]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _resolve_lpp_sensors(pub_key: str) -> list[dict]:
|
||||||
|
"""Return the LPP sensor list from the most recent telemetry snapshot, or []."""
|
||||||
|
try:
|
||||||
|
from app.repository.repeater_telemetry import RepeaterTelemetryRepository
|
||||||
|
|
||||||
|
latest = await RepeaterTelemetryRepository.get_latest(pub_key)
|
||||||
|
if latest:
|
||||||
|
return latest.get("data", {}).get("lpp_sensors", [])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _seed_radio_identity_from_runtime(self) -> None:
|
||||||
|
"""Best-effort bootstrap from the currently connected radio session."""
|
||||||
|
try:
|
||||||
|
from app.services.radio_runtime import radio_runtime
|
||||||
|
|
||||||
|
if not radio_runtime.is_connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
mc = radio_runtime.meshcore
|
||||||
|
self_info = mc.self_info if mc is not None else None
|
||||||
|
if not isinstance(self_info, dict):
|
||||||
|
return
|
||||||
|
|
||||||
|
pub_key = self_info.get("public_key")
|
||||||
|
if isinstance(pub_key, str) and pub_key.strip():
|
||||||
|
self._radio_key = pub_key.strip().lower()
|
||||||
|
|
||||||
|
name = self_info.get("name")
|
||||||
|
if isinstance(name, str) and name.strip():
|
||||||
|
self._radio_name = name.strip()
|
||||||
|
except Exception:
|
||||||
|
logger.debug("HA MQTT: failed to seed radio identity from runtime", exc_info=True)
|
||||||
|
|
||||||
|
# ── Event handlers ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async def on_health(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Cache radio identity for discovery config generation
|
||||||
|
pub_key = data.get("public_key")
|
||||||
|
if pub_key:
|
||||||
|
new_name = data.get("name")
|
||||||
|
key_changed = pub_key != self._radio_key
|
||||||
|
name_changed = new_name and new_name != self._radio_name
|
||||||
|
|
||||||
|
if key_changed:
|
||||||
|
old_key = self._radio_key
|
||||||
|
old_topics = list(self._discovery_topics)
|
||||||
|
if old_topics:
|
||||||
|
await self._clear_retained_topics(old_topics)
|
||||||
|
self._discovery_topics.clear()
|
||||||
|
self._radio_key = pub_key
|
||||||
|
self._radio_name = new_name
|
||||||
|
# Remove stale discovery entries from the old identity (e.g.
|
||||||
|
# "unknown" placeholder from before the radio key was known),
|
||||||
|
# then re-publish with the real identity.
|
||||||
|
if old_key is not None and not old_topics:
|
||||||
|
await self._clear_retained_topics(
|
||||||
|
[t for t, _ in _radio_discovery_configs(self._prefix, old_key, "")]
|
||||||
|
)
|
||||||
|
await self._publish_discovery()
|
||||||
|
elif name_changed:
|
||||||
|
self._radio_name = new_name
|
||||||
|
await self._publish_discovery()
|
||||||
|
|
||||||
|
# Don't publish health state until we know the radio identity —
|
||||||
|
# otherwise we create a stale "unknown" device in HA.
|
||||||
|
if not self._radio_key:
|
||||||
|
return
|
||||||
|
|
||||||
|
nid = _node_id(self._radio_key)
|
||||||
|
payload: dict[str, Any] = {"connected": data.get("connected", False)}
|
||||||
|
for sensor in _RADIO_SENSORS:
|
||||||
|
field = sensor["field"]
|
||||||
|
if field is not None:
|
||||||
|
payload[field] = data.get(field)
|
||||||
|
|
||||||
|
# Normalize battery from millivolts to volts for consistency with
|
||||||
|
# repeater battery and the discovery config (unit: V, precision: 2).
|
||||||
|
battery_mv = data.get("battery_mv")
|
||||||
|
if battery_mv is not None:
|
||||||
|
payload["battery_volts"] = battery_mv / 1000.0
|
||||||
|
|
||||||
|
await self._publisher.publish(f"{self._prefix}/{nid}/health", payload)
|
||||||
|
|
||||||
|
async def on_contact(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
pub_key = data.get("public_key", "")
|
||||||
|
if pub_key not in self._tracked_contacts:
|
||||||
|
return
|
||||||
|
|
||||||
|
lat = data.get("lat")
|
||||||
|
lon = data.get("lon")
|
||||||
|
if lat is None or lon is None or (lat == 0.0 and lon == 0.0):
|
||||||
|
return
|
||||||
|
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
await self._publisher.publish(
|
||||||
|
f"{self._prefix}/{nid}/gps",
|
||||||
|
{
|
||||||
|
"latitude": lat,
|
||||||
|
"longitude": lon,
|
||||||
|
"gps_accuracy": 0,
|
||||||
|
"source_type": "gps",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def on_telemetry(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
pub_key = data.get("public_key", "")
|
||||||
|
if pub_key not in self._tracked_repeaters:
|
||||||
|
return
|
||||||
|
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
# Publish the full telemetry dict — HA sensors use value_template
|
||||||
|
# to extract individual fields
|
||||||
|
payload: dict[str, Any] = {}
|
||||||
|
for s in _REPEATER_SENSORS:
|
||||||
|
field = s["field"]
|
||||||
|
if field is not None:
|
||||||
|
payload[field] = data.get(field)
|
||||||
|
|
||||||
|
# Flatten LPP sensors into the same payload so HA value_templates work
|
||||||
|
lpp_sensors: list[dict] = data.get("lpp_sensors", [])
|
||||||
|
rediscover = False
|
||||||
|
for sensor in lpp_sensors:
|
||||||
|
key = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||||
|
payload[key] = sensor.get("value")
|
||||||
|
# Check if discovery for this sensor has been published yet
|
||||||
|
expected_topic = f"homeassistant/sensor/meshcore_{nid}/{key}/config"
|
||||||
|
if expected_topic not in self._discovery_topics:
|
||||||
|
rediscover = True
|
||||||
|
|
||||||
|
# If new LPP sensor types appeared, re-publish discovery *before*
|
||||||
|
# the state payload so HA already knows the entity when the value arrives.
|
||||||
|
if rediscover:
|
||||||
|
await self._publish_discovery()
|
||||||
|
|
||||||
|
await self._publisher.publish(f"{self._prefix}/{nid}/telemetry", payload)
|
||||||
|
|
||||||
|
async def on_message(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected or not self._radio_key:
|
||||||
|
return
|
||||||
|
|
||||||
|
text = get_fanout_message_text(data)
|
||||||
|
nid = _node_id(self._radio_key)
|
||||||
|
await self._publisher.publish(
|
||||||
|
f"{self._prefix}/{nid}/events/message",
|
||||||
|
{
|
||||||
|
"event_type": "message_received",
|
||||||
|
"sender_name": data.get("sender_name", ""),
|
||||||
|
"sender_key": data.get("sender_key", ""),
|
||||||
|
"text": text,
|
||||||
|
"conversation_key": data.get("conversation_key", ""),
|
||||||
|
"message_type": data.get("type", ""),
|
||||||
|
"channel_name": data.get("channel_name"),
|
||||||
|
"outgoing": data.get("outgoing", False),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# ── Status ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> str:
|
||||||
|
if not self.config.get("broker_host"):
|
||||||
|
return "disconnected"
|
||||||
|
if self._publisher.last_error:
|
||||||
|
return "error"
|
||||||
|
return "connected" if self._publisher.connected else "disconnected"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_error(self) -> str | None:
|
||||||
|
return self._publisher.last_error
|
||||||
@@ -148,6 +148,39 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
|||||||
"type": "image/png",
|
"type": "image/png",
|
||||||
"purpose": "maskable",
|
"purpose": "maskable",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}favicon.svg",
|
||||||
|
"sizes": "any",
|
||||||
|
"type": "image/svg+xml",
|
||||||
|
"purpose": "any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}favicon-256x256.png",
|
||||||
|
"sizes": "256x256",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "any",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"screenshots": [
|
||||||
|
{
|
||||||
|
"src": f"{base}screenshot-wide.png",
|
||||||
|
"sizes": "1367x909",
|
||||||
|
"type": "image/png",
|
||||||
|
"form_factor": "wide",
|
||||||
|
"label": "RemoteTerm desktop view",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}screenshot-mobile.png",
|
||||||
|
"sizes": "1170x2532",
|
||||||
|
"type": "image/png",
|
||||||
|
"label": "RemoteTerm mobile view",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}screenshot-mobile-2.png",
|
||||||
|
"sizes": "750x1334",
|
||||||
|
"type": "image/png",
|
||||||
|
"label": "RemoteTerm mobile conversation",
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
|
|||||||
+13
-16
@@ -9,6 +9,7 @@ The path_len wire byte is packed as [hash_mode:2][hop_count:6]:
|
|||||||
Mode 3 (hash_size=4) is reserved and rejected.
|
Mode 3 (hash_size=4) is reserved and rejected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Iterable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
MAX_PATH_SIZE = 64
|
MAX_PATH_SIZE = 64
|
||||||
@@ -246,30 +247,26 @@ def parse_explicit_hop_route(route_text: str) -> tuple[str, int, int]:
|
|||||||
return "".join(hops), len(hops), hash_size - 1
|
return "".join(hops), len(hops), hash_size - 1
|
||||||
|
|
||||||
|
|
||||||
async def bucket_path_hash_widths(cursor, *, batch_size: int = 500) -> dict[str, int | float]:
|
def bucket_path_hash_widths(rows: Iterable) -> dict[str, int | float]:
|
||||||
"""Bucket raw packet rows by hop hash width and return counts + percentages.
|
"""Bucket raw packet rows by hop hash width and return counts + percentages.
|
||||||
|
|
||||||
*cursor* must be an already-executed async cursor whose rows have a ``data``
|
*rows* must be an already-fetched list whose elements have a ``data``
|
||||||
column containing raw packet bytes.
|
column containing raw packet bytes.
|
||||||
"""
|
"""
|
||||||
single_byte = 0
|
single_byte = 0
|
||||||
double_byte = 0
|
double_byte = 0
|
||||||
triple_byte = 0
|
triple_byte = 0
|
||||||
|
|
||||||
while True:
|
for row in rows:
|
||||||
rows = await cursor.fetchmany(batch_size)
|
envelope = parse_packet_envelope(bytes(row["data"]))
|
||||||
if not rows:
|
if envelope is None:
|
||||||
break
|
continue
|
||||||
for row in rows:
|
if envelope.hash_size == 1:
|
||||||
envelope = parse_packet_envelope(bytes(row["data"]))
|
single_byte += 1
|
||||||
if envelope is None:
|
elif envelope.hash_size == 2:
|
||||||
continue
|
double_byte += 1
|
||||||
if envelope.hash_size == 1:
|
elif envelope.hash_size == 3:
|
||||||
single_byte += 1
|
triple_byte += 1
|
||||||
elif envelope.hash_size == 2:
|
|
||||||
double_byte += 1
|
|
||||||
elif envelope.hash_size == 3:
|
|
||||||
triple_byte += 1
|
|
||||||
|
|
||||||
total = single_byte + double_byte + triple_byte
|
total = single_byte + double_byte + triple_byte
|
||||||
if total == 0:
|
if total == 0:
|
||||||
|
|||||||
+41
-6
@@ -1295,7 +1295,13 @@ async def stop_background_contact_reconciliation() -> None:
|
|||||||
|
|
||||||
|
|
||||||
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
||||||
"""Return the contacts that would be loaded onto the radio right now."""
|
"""Return the contacts that would be loaded onto the radio right now.
|
||||||
|
|
||||||
|
Fill order:
|
||||||
|
1. Favorites (up to full capacity)
|
||||||
|
2. Most recently DM-active non-repeaters (sent or received, up to 80% refill target)
|
||||||
|
3. Most recently advertised non-repeaters (up to 80% refill target)
|
||||||
|
"""
|
||||||
app_settings = await AppSettingsRepository.get()
|
app_settings = await AppSettingsRepository.get()
|
||||||
max_contacts = _effective_radio_capacity(app_settings.max_radio_contacts)
|
max_contacts = _effective_radio_capacity(app_settings.max_radio_contacts)
|
||||||
refill_target, _full_sync_trigger = _compute_radio_contact_limits(max_contacts)
|
refill_target, _full_sync_trigger = _compute_radio_contact_limits(max_contacts)
|
||||||
@@ -1315,7 +1321,7 @@ async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
|||||||
break
|
break
|
||||||
|
|
||||||
if len(selected_contacts) < refill_target:
|
if len(selected_contacts) < refill_target:
|
||||||
for contact in await ContactRepository.get_recently_contacted_non_repeaters(
|
for contact in await ContactRepository.get_recently_dm_active_non_repeaters(
|
||||||
limit=max_contacts
|
limit=max_contacts
|
||||||
):
|
):
|
||||||
key = contact.public_key.lower()
|
key = contact.public_key.lower()
|
||||||
@@ -1354,8 +1360,8 @@ async def _sync_contacts_to_radio_inner(mc: MeshCore) -> dict:
|
|||||||
|
|
||||||
Fill order is:
|
Fill order is:
|
||||||
1. Favorite contacts
|
1. Favorite contacts
|
||||||
2. Most recently interacted-with non-repeaters
|
2. Most recently DM-active non-repeaters (sent or received)
|
||||||
3. Most recently advert-heard non-repeaters without interaction history
|
3. Most recently advert-heard non-repeaters
|
||||||
|
|
||||||
Favorite contacts are always reloaded first, up to the configured capacity.
|
Favorite contacts are always reloaded first, up to the configured capacity.
|
||||||
Additional non-favorite fill stops at the refill target (80% of capacity).
|
Additional non-favorite fill stops at the refill target (80% of capacity).
|
||||||
@@ -1489,8 +1495,8 @@ async def sync_recent_contacts_to_radio(force: bool = False, mc: MeshCore | None
|
|||||||
"""
|
"""
|
||||||
Load contacts to the radio for DM ACK support.
|
Load contacts to the radio for DM ACK support.
|
||||||
|
|
||||||
Fill order is favorites, then recently contacted non-repeaters,
|
Fill order is favorites, then recently DM-active non-repeaters (sent or
|
||||||
then recently advert-heard non-repeaters. Favorites are always reloaded
|
received), then recently advert-heard non-repeaters. Favorites are always reloaded
|
||||||
up to the configured capacity; additional non-favorite fill stops at the
|
up to the configured capacity; additional non-favorite fill stops at the
|
||||||
80% refill target.
|
80% refill target.
|
||||||
Only runs at most once every CONTACT_SYNC_THROTTLE_SECONDS unless forced.
|
Only runs at most once every CONTACT_SYNC_THROTTLE_SECONDS unless forced.
|
||||||
@@ -1584,6 +1590,35 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
|||||||
"full_events": status.get("full_evts", 0),
|
"full_events": status.get("full_evts", 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Best-effort LPP sensor fetch — failure here does not fail the overall
|
||||||
|
# collection; status telemetry is still recorded without sensor data.
|
||||||
|
try:
|
||||||
|
lpp_raw = await mc.commands.req_telemetry_sync(
|
||||||
|
contact.public_key, timeout=10, min_timeout=5
|
||||||
|
)
|
||||||
|
if lpp_raw:
|
||||||
|
lpp_sensors = []
|
||||||
|
for entry in lpp_raw:
|
||||||
|
value = entry.get("value", 0)
|
||||||
|
# Skip multi-value sensors (GPS, accelerometer, etc.)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
continue
|
||||||
|
lpp_sensors.append(
|
||||||
|
{
|
||||||
|
"channel": entry.get("channel", 0),
|
||||||
|
"type_name": str(entry.get("type", "unknown")),
|
||||||
|
"value": value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if lpp_sensors:
|
||||||
|
data["lpp_sensors"] = lpp_sensors
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(
|
||||||
|
"Telemetry collect: LPP sensor fetch failed for %s (non-fatal): %s",
|
||||||
|
contact.public_key[:12],
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
await RepeaterTelemetryRepository.record(
|
await RepeaterTelemetryRepository.record(
|
||||||
|
|||||||
@@ -294,6 +294,28 @@ class ContactRepository:
|
|||||||
rows = await cursor.fetchall()
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_recently_dm_active_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||||
|
"""Get non-repeater contacts with the most recent DM activity (sent or received)."""
|
||||||
|
cursor = await db.conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT c.*
|
||||||
|
FROM contacts c
|
||||||
|
INNER JOIN (
|
||||||
|
SELECT conversation_key, MAX(received_at) AS last_dm
|
||||||
|
FROM messages
|
||||||
|
WHERE type = 'PRIV'
|
||||||
|
GROUP BY conversation_key
|
||||||
|
) m ON c.public_key = m.conversation_key
|
||||||
|
WHERE c.type != 2 AND length(c.public_key) = 64
|
||||||
|
ORDER BY m.last_dm DESC
|
||||||
|
LIMIT ?
|
||||||
|
""",
|
||||||
|
(limit,),
|
||||||
|
)
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_recently_advertised_non_repeaters(limit: int = 200) -> list[Contact]:
|
async def get_recently_advertised_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||||
"""Get recently advert-heard non-repeater contacts."""
|
"""Get recently advert-heard non-repeater contacts."""
|
||||||
|
|||||||
@@ -868,7 +868,8 @@ class MessageRepository:
|
|||||||
""",
|
""",
|
||||||
(conversation_key, t_24h),
|
(conversation_key, t_24h),
|
||||||
)
|
)
|
||||||
path_hash_width_24h = await bucket_path_hash_widths(cursor3)
|
rows3 = await cursor3.fetchall()
|
||||||
|
path_hash_width_24h = bucket_path_hash_widths(rows3)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"message_counts": message_counts,
|
"message_counts": message_counts,
|
||||||
|
|||||||
@@ -74,41 +74,52 @@ class RawPacketRepository:
|
|||||||
async def stream_all_undecrypted(
|
async def stream_all_undecrypted(
|
||||||
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
||||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||||
"""Yield all undecrypted packets as (id, data, timestamp) in bounded batches."""
|
"""Yield all undecrypted packets as (id, data, timestamp) in bounded batches.
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"SELECT id, data, timestamp FROM raw_packets WHERE message_id IS NULL ORDER BY timestamp ASC"
|
Uses keyset pagination so each batch is a fresh query with a fully
|
||||||
)
|
consumed cursor — no open statement held across yield boundaries.
|
||||||
try:
|
"""
|
||||||
while True:
|
last_id = -1
|
||||||
rows = await cursor.fetchmany(batch_size)
|
while True:
|
||||||
if not rows:
|
cursor = await db.conn.execute(
|
||||||
break
|
"SELECT id, data, timestamp FROM raw_packets "
|
||||||
for row in rows:
|
"WHERE message_id IS NULL AND id > ? ORDER BY id ASC LIMIT ?",
|
||||||
yield (row["id"], bytes(row["data"]), row["timestamp"])
|
(last_id, batch_size),
|
||||||
finally:
|
)
|
||||||
|
rows = await cursor.fetchall()
|
||||||
await cursor.close()
|
await cursor.close()
|
||||||
|
if not rows:
|
||||||
|
break
|
||||||
|
for row in rows:
|
||||||
|
last_id = row["id"]
|
||||||
|
yield (row["id"], bytes(row["data"]), row["timestamp"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def stream_undecrypted_text_messages(
|
async def stream_undecrypted_text_messages(
|
||||||
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
||||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||||
"""Yield undecrypted TEXT_MESSAGE packets in bounded-size batches."""
|
"""Yield undecrypted TEXT_MESSAGE packets in bounded-size batches.
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"SELECT id, data, timestamp FROM raw_packets WHERE message_id IS NULL ORDER BY timestamp ASC"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
rows = await cursor.fetchmany(batch_size)
|
|
||||||
if not rows:
|
|
||||||
break
|
|
||||||
|
|
||||||
for row in rows:
|
Uses keyset pagination so each batch is a fresh query with a fully
|
||||||
data = bytes(row["data"])
|
consumed cursor — no open statement held across yield boundaries.
|
||||||
payload_type = get_packet_payload_type(data)
|
"""
|
||||||
if payload_type == PayloadType.TEXT_MESSAGE:
|
last_id = -1
|
||||||
yield (row["id"], data, row["timestamp"])
|
while True:
|
||||||
finally:
|
cursor = await db.conn.execute(
|
||||||
|
"SELECT id, data, timestamp FROM raw_packets "
|
||||||
|
"WHERE message_id IS NULL AND id > ? ORDER BY id ASC LIMIT ?",
|
||||||
|
(last_id, batch_size),
|
||||||
|
)
|
||||||
|
rows = await cursor.fetchall()
|
||||||
await cursor.close()
|
await cursor.close()
|
||||||
|
if not rows:
|
||||||
|
break
|
||||||
|
for row in rows:
|
||||||
|
last_id = row["id"]
|
||||||
|
data = bytes(row["data"])
|
||||||
|
payload_type = get_packet_payload_type(data)
|
||||||
|
if payload_type == PayloadType.TEXT_MESSAGE:
|
||||||
|
yield (row["id"], data, row["timestamp"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def count_undecrypted_text_messages(
|
async def count_undecrypted_text_messages(
|
||||||
|
|||||||
@@ -73,3 +73,24 @@ class RepeaterTelemetryRepository:
|
|||||||
}
|
}
|
||||||
for row in rows
|
for row in rows
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_latest(public_key: str) -> dict | None:
|
||||||
|
"""Return the most recent telemetry row for a repeater, or None."""
|
||||||
|
cursor = await db.conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT timestamp, data
|
||||||
|
FROM repeater_telemetry_history
|
||||||
|
WHERE public_key = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
""",
|
||||||
|
(public_key,),
|
||||||
|
)
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
return {
|
||||||
|
"timestamp": row["timestamp"],
|
||||||
|
"data": json.loads(row["data"]),
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ SECONDS_1H = 3600
|
|||||||
SECONDS_24H = 86400
|
SECONDS_24H = 86400
|
||||||
SECONDS_72H = 259200
|
SECONDS_72H = 259200
|
||||||
SECONDS_7D = 604800
|
SECONDS_7D = 604800
|
||||||
RAW_PACKET_STATS_BATCH_SIZE = 500
|
|
||||||
|
|
||||||
|
|
||||||
class AppSettingsRepository:
|
class AppSettingsRepository:
|
||||||
@@ -302,7 +301,8 @@ class StatisticsRepository:
|
|||||||
"SELECT data FROM raw_packets WHERE timestamp >= ?",
|
"SELECT data FROM raw_packets WHERE timestamp >= ?",
|
||||||
(now - SECONDS_24H,),
|
(now - SECONDS_24H,),
|
||||||
)
|
)
|
||||||
return await bucket_path_hash_widths(cursor, batch_size=RAW_PACKET_STATS_BATCH_SIZE)
|
rows = await cursor.fetchall()
|
||||||
|
return bucket_path_hash_widths(rows)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all() -> dict:
|
async def get_all() -> dict:
|
||||||
|
|||||||
+26
-2
@@ -16,7 +16,16 @@ from app.repository.fanout import FanoutConfigRepository
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter(prefix="/fanout", tags=["fanout"])
|
router = APIRouter(prefix="/fanout", tags=["fanout"])
|
||||||
|
|
||||||
_VALID_TYPES = {"mqtt_private", "mqtt_community", "bot", "webhook", "apprise", "sqs", "map_upload"}
|
_VALID_TYPES = {
|
||||||
|
"mqtt_private",
|
||||||
|
"mqtt_community",
|
||||||
|
"mqtt_ha",
|
||||||
|
"bot",
|
||||||
|
"webhook",
|
||||||
|
"apprise",
|
||||||
|
"sqs",
|
||||||
|
"map_upload",
|
||||||
|
}
|
||||||
|
|
||||||
_IATA_RE = re.compile(r"^[A-Z]{3}$")
|
_IATA_RE = re.compile(r"^[A-Z]{3}$")
|
||||||
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
|
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
|
||||||
@@ -96,6 +105,8 @@ def _validate_and_normalize_config(config_type: str, config: dict) -> dict:
|
|||||||
_validate_sqs_config(normalized)
|
_validate_sqs_config(normalized)
|
||||||
elif config_type == "map_upload":
|
elif config_type == "map_upload":
|
||||||
_validate_map_upload_config(normalized)
|
_validate_map_upload_config(normalized)
|
||||||
|
elif config_type == "mqtt_ha":
|
||||||
|
_validate_mqtt_ha_config(normalized)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@@ -318,6 +329,19 @@ def _validate_map_upload_config(config: dict) -> None:
|
|||||||
config["geofence_radius_km"] = radius
|
config["geofence_radius_km"] = radius
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_mqtt_ha_config(config: dict) -> None:
|
||||||
|
"""Validate mqtt_ha config blob."""
|
||||||
|
if not config.get("broker_host"):
|
||||||
|
raise HTTPException(status_code=400, detail="broker_host is required for mqtt_ha")
|
||||||
|
port = config.get("broker_port", 1883)
|
||||||
|
if not isinstance(port, int) or port < 1 or port > 65535:
|
||||||
|
raise HTTPException(status_code=400, detail="broker_port must be between 1 and 65535")
|
||||||
|
for field in ("tracked_contacts", "tracked_repeaters"):
|
||||||
|
value = config.get(field)
|
||||||
|
if value is not None and not isinstance(value, list):
|
||||||
|
raise HTTPException(status_code=400, detail=f"{field} must be a list of public keys")
|
||||||
|
|
||||||
|
|
||||||
def _enforce_scope(config_type: str, scope: dict) -> dict:
|
def _enforce_scope(config_type: str, scope: dict) -> dict:
|
||||||
"""Enforce type-specific scope constraints. Returns normalized scope."""
|
"""Enforce type-specific scope constraints. Returns normalized scope."""
|
||||||
if config_type == "mqtt_community":
|
if config_type == "mqtt_community":
|
||||||
@@ -326,7 +350,7 @@ def _enforce_scope(config_type: str, scope: dict) -> dict:
|
|||||||
return {"messages": "none", "raw_packets": "all"}
|
return {"messages": "none", "raw_packets": "all"}
|
||||||
if config_type == "bot":
|
if config_type == "bot":
|
||||||
return {"messages": "all", "raw_packets": "none"}
|
return {"messages": "all", "raw_packets": "none"}
|
||||||
if config_type in ("webhook", "apprise"):
|
if config_type in ("webhook", "apprise", "mqtt_ha"):
|
||||||
messages = scope.get("messages", "all")
|
messages = scope.get("messages", "all")
|
||||||
if messages not in ("all", "none") and not isinstance(messages, dict):
|
if messages not in ("all", "none") and not isinstance(messages, dict):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|||||||
@@ -94,6 +94,7 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
|||||||
contact = await _resolve_contact_or_404(public_key)
|
contact = await _resolve_contact_or_404(public_key)
|
||||||
_require_repeater(contact)
|
_require_repeater(contact)
|
||||||
|
|
||||||
|
lpp_raw = None
|
||||||
async with radio_manager.radio_operation(
|
async with radio_manager.radio_operation(
|
||||||
"repeater_status", pause_polling=True, suspend_auto_fetch=True
|
"repeater_status", pause_polling=True, suspend_auto_fetch=True
|
||||||
) as mc:
|
) as mc:
|
||||||
@@ -102,6 +103,15 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
|||||||
|
|
||||||
status = await mc.commands.req_status_sync(contact.public_key, timeout=10, min_timeout=5)
|
status = await mc.commands.req_status_sync(contact.public_key, timeout=10, min_timeout=5)
|
||||||
|
|
||||||
|
# Best-effort LPP sensor fetch while we still hold the lock
|
||||||
|
if status is not None:
|
||||||
|
try:
|
||||||
|
lpp_raw = await mc.commands.req_telemetry_sync(
|
||||||
|
contact.public_key, timeout=10, min_timeout=5
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug("LPP sensor fetch failed for %s (non-fatal): %s", public_key[:12], e)
|
||||||
|
|
||||||
if status is None:
|
if status is None:
|
||||||
raise HTTPException(status_code=504, detail="No status response from repeater")
|
raise HTTPException(status_code=504, detail="No status response from repeater")
|
||||||
|
|
||||||
@@ -128,6 +138,24 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
|||||||
# Record to telemetry history as a JSON blob (best-effort)
|
# Record to telemetry history as a JSON blob (best-effort)
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
status_dict = response.model_dump(exclude={"telemetry_history"})
|
status_dict = response.model_dump(exclude={"telemetry_history"})
|
||||||
|
|
||||||
|
# Attach scalar LPP sensors to the stored snapshot (same logic as auto-collect)
|
||||||
|
if lpp_raw:
|
||||||
|
lpp_sensors = []
|
||||||
|
for entry in lpp_raw:
|
||||||
|
value = entry.get("value", 0)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
continue
|
||||||
|
lpp_sensors.append(
|
||||||
|
{
|
||||||
|
"channel": entry.get("channel", 0),
|
||||||
|
"type_name": str(entry.get("type", "unknown")),
|
||||||
|
"value": value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if lpp_sensors:
|
||||||
|
status_dict["lpp_sensors"] = lpp_sensors
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await RepeaterTelemetryRepository.record(
|
await RepeaterTelemetryRepository.record(
|
||||||
public_key=contact.public_key,
|
public_key=contact.public_key,
|
||||||
|
|||||||
+6
-1
@@ -13,8 +13,13 @@
|
|||||||
<link rel="icon" type="image/png" href="./favicon-96x96.png" sizes="96x96" />
|
<link rel="icon" type="image/png" href="./favicon-96x96.png" sizes="96x96" />
|
||||||
<link rel="shortcut icon" href="./favicon.ico" />
|
<link rel="shortcut icon" href="./favicon.ico" />
|
||||||
<link rel="apple-touch-icon" sizes="180x180" href="./apple-touch-icon.png" />
|
<link rel="apple-touch-icon" sizes="180x180" href="./apple-touch-icon.png" />
|
||||||
<link rel="manifest" href="./site.webmanifest" />
|
<link rel="manifest" href="./site.webmanifest" crossorigin="use-credentials" />
|
||||||
<script>
|
<script>
|
||||||
|
// Register minimal service worker for PWA installability.
|
||||||
|
if ('serviceWorker' in navigator) {
|
||||||
|
navigator.serviceWorker.register('./sw.js').catch(function() {});
|
||||||
|
}
|
||||||
|
|
||||||
// Start critical data fetches before React/Vite JS loads.
|
// Start critical data fetches before React/Vite JS loads.
|
||||||
// Must be in <head> BEFORE the module script so the browser queues these
|
// Must be in <head> BEFORE the module script so the browser queues these
|
||||||
// fetches before it discovers and starts downloading the JS bundle.
|
// fetches before it discovers and starts downloading the JS bundle.
|
||||||
|
|||||||
Generated
+2
-2
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"version": "3.8.0",
|
"version": "3.11.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"version": "3.8.0",
|
"version": "3.11.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/lang-python": "^6.2.1",
|
"@codemirror/lang-python": "^6.2.1",
|
||||||
"@codemirror/theme-one-dark": "^6.1.3",
|
"@codemirror/theme-one-dark": "^6.1.3",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "3.11.0",
|
"version": "3.11.3",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
|
|||||||
Binary file not shown.
|
After Width: | Height: | Size: 122 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 426 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 109 KiB |
@@ -0,0 +1,12 @@
|
|||||||
|
// Minimal service worker required for PWA installability.
|
||||||
|
// No caching — this app is network-dependent. All fetches pass through.
|
||||||
|
|
||||||
|
self.addEventListener("install", function () {
|
||||||
|
self.skipWaiting();
|
||||||
|
});
|
||||||
|
|
||||||
|
self.addEventListener("activate", function (event) {
|
||||||
|
event.waitUntil(self.clients.claim());
|
||||||
|
});
|
||||||
|
|
||||||
|
self.addEventListener("fetch", function () {});
|
||||||
@@ -43,6 +43,7 @@ interface CommandPaletteProps {
|
|||||||
|
|
||||||
interface Searchable {
|
interface Searchable {
|
||||||
searchText: string;
|
searchText: string;
|
||||||
|
keyText?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SearchableContact extends Searchable {
|
interface SearchableContact extends Searchable {
|
||||||
@@ -106,7 +107,9 @@ function filterList<T extends Searchable>(items: T[], query: string): T[] {
|
|||||||
if (!query) return items.slice(0, MAX_PER_GROUP);
|
if (!query) return items.slice(0, MAX_PER_GROUP);
|
||||||
const results: T[] = [];
|
const results: T[] = [];
|
||||||
for (const item of items) {
|
for (const item of items) {
|
||||||
if (fuzzyMatch(item.searchText, query)) {
|
const nameMatch = fuzzyMatch(item.searchText, query);
|
||||||
|
const keyMatch = item.keyText ? item.keyText.startsWith(query) : false;
|
||||||
|
if (nameMatch || keyMatch) {
|
||||||
results.push(item);
|
results.push(item);
|
||||||
if (results.length >= MAX_PER_GROUP) break;
|
if (results.length >= MAX_PER_GROUP) break;
|
||||||
}
|
}
|
||||||
@@ -159,7 +162,8 @@ export function CommandPalette({
|
|||||||
const entry: SearchableContact = {
|
const entry: SearchableContact = {
|
||||||
contact: c,
|
contact: c,
|
||||||
displayName,
|
displayName,
|
||||||
searchText: `${displayName} ${c.public_key}`.toLowerCase(),
|
searchText: displayName.toLowerCase(),
|
||||||
|
keyText: c.public_key.toLowerCase(),
|
||||||
};
|
};
|
||||||
if (c.type === CONTACT_TYPE_REPEATER) {
|
if (c.type === CONTACT_TYPE_REPEATER) {
|
||||||
(c.favorite ? fr : rp).push(entry);
|
(c.favorite ? fr : rp).push(entry);
|
||||||
@@ -174,7 +178,8 @@ export function CommandPalette({
|
|||||||
for (const ch of channels) {
|
for (const ch of channels) {
|
||||||
const entry: SearchableChannel = {
|
const entry: SearchableChannel = {
|
||||||
channel: ch,
|
channel: ch,
|
||||||
searchText: `${ch.name} ${ch.key}`.toLowerCase(),
|
searchText: ch.name.toLowerCase(),
|
||||||
|
keyText: ch.key.toLowerCase(),
|
||||||
};
|
};
|
||||||
(ch.favorite ? fch : rch).push(entry);
|
(ch.favorite ? fch : rch).push(entry);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -265,6 +265,12 @@ export function Sidebar({
|
|||||||
const sortContactsByOrder = useCallback(
|
const sortContactsByOrder = useCallback(
|
||||||
(items: Contact[], order: SortOrder) =>
|
(items: Contact[], order: SortOrder) =>
|
||||||
[...items].sort((a, b) => {
|
[...items].sort((a, b) => {
|
||||||
|
// Unread DM contacts always float to the top
|
||||||
|
const unreadA = unreadCounts[getStateKey('contact', a.public_key)] || 0;
|
||||||
|
const unreadB = unreadCounts[getStateKey('contact', b.public_key)] || 0;
|
||||||
|
if (unreadA > 0 && unreadB === 0) return -1;
|
||||||
|
if (unreadA === 0 && unreadB > 0) return 1;
|
||||||
|
|
||||||
if (order === 'recent') {
|
if (order === 'recent') {
|
||||||
const timeA = getContactRecentTime(a);
|
const timeA = getContactRecentTime(a);
|
||||||
const timeB = getContactRecentTime(b);
|
const timeB = getContactRecentTime(b);
|
||||||
@@ -274,7 +280,7 @@ export function Sidebar({
|
|||||||
}
|
}
|
||||||
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
||||||
}),
|
}),
|
||||||
[getContactRecentTime]
|
[getContactRecentTime, unreadCounts]
|
||||||
);
|
);
|
||||||
|
|
||||||
const sortRepeatersByOrder = useCallback(
|
const sortRepeatersByOrder = useCallback(
|
||||||
@@ -364,7 +370,7 @@ export function Sidebar({
|
|||||||
() =>
|
() =>
|
||||||
query
|
query
|
||||||
? sortedChannels.filter(
|
? sortedChannels.filter(
|
||||||
(c) => c.name.toLowerCase().includes(query) || c.key.toLowerCase().includes(query)
|
(c) => c.name.toLowerCase().includes(query) || c.key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: sortedChannels,
|
: sortedChannels,
|
||||||
[sortedChannels, query]
|
[sortedChannels, query]
|
||||||
@@ -374,7 +380,8 @@ export function Sidebar({
|
|||||||
const visible = sortedNonRepeaterContacts.filter((c) => !isContactBlocked(c));
|
const visible = sortedNonRepeaterContacts.filter((c) => !isContactBlocked(c));
|
||||||
return query
|
return query
|
||||||
? visible.filter(
|
? visible.filter(
|
||||||
(c) => c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().includes(query)
|
(c) =>
|
||||||
|
c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: visible;
|
: visible;
|
||||||
}, [sortedNonRepeaterContacts, query, isContactBlocked]);
|
}, [sortedNonRepeaterContacts, query, isContactBlocked]);
|
||||||
@@ -383,7 +390,8 @@ export function Sidebar({
|
|||||||
const visible = sortedRooms.filter((c) => !isContactBlocked(c));
|
const visible = sortedRooms.filter((c) => !isContactBlocked(c));
|
||||||
return query
|
return query
|
||||||
? visible.filter(
|
? visible.filter(
|
||||||
(c) => c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().includes(query)
|
(c) =>
|
||||||
|
c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: visible;
|
: visible;
|
||||||
}, [sortedRooms, query, isContactBlocked]);
|
}, [sortedRooms, query, isContactBlocked]);
|
||||||
@@ -392,7 +400,8 @@ export function Sidebar({
|
|||||||
const visible = sortedRepeaters.filter((c) => !isContactBlocked(c));
|
const visible = sortedRepeaters.filter((c) => !isContactBlocked(c));
|
||||||
return query
|
return query
|
||||||
? visible.filter(
|
? visible.filter(
|
||||||
(c) => c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().includes(query)
|
(c) =>
|
||||||
|
c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: visible;
|
: visible;
|
||||||
}, [sortedRepeaters, query, isContactBlocked]);
|
}, [sortedRepeaters, query, isContactBlocked]);
|
||||||
|
|||||||
@@ -224,8 +224,8 @@ export function TracePane({ contacts, config, onRunTracePath }: TracePaneProps)
|
|||||||
const matching = query
|
const matching = query
|
||||||
? repeaters.filter(
|
? repeaters.filter(
|
||||||
(contact) =>
|
(contact) =>
|
||||||
contact.public_key.toLowerCase().includes(query) ||
|
(contact.name ?? '').toLowerCase().includes(query) ||
|
||||||
(contact.name ?? '').toLowerCase().includes(query)
|
contact.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: repeaters;
|
: repeaters;
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { RepeaterPane, NotFetched, LppSensorRow } from './repeaterPaneShared';
|
import { RepeaterPane, NotFetched, LppSensorRow } from './repeaterPaneShared';
|
||||||
|
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||||
import type { RepeaterLppTelemetryResponse, PaneState } from '../../types';
|
import type { RepeaterLppTelemetryResponse, PaneState } from '../../types';
|
||||||
|
|
||||||
export function LppTelemetryPane({
|
export function LppTelemetryPane({
|
||||||
@@ -12,6 +13,7 @@ export function LppTelemetryPane({
|
|||||||
onRefresh: () => void;
|
onRefresh: () => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
}) {
|
}) {
|
||||||
|
const { distanceUnit } = useDistanceUnit();
|
||||||
return (
|
return (
|
||||||
<RepeaterPane title="LPP Sensors" state={state} onRefresh={onRefresh} disabled={disabled}>
|
<RepeaterPane title="LPP Sensors" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||||
{!data ? (
|
{!data ? (
|
||||||
@@ -21,7 +23,7 @@ export function LppTelemetryPane({
|
|||||||
) : (
|
) : (
|
||||||
<div className="space-y-0.5">
|
<div className="space-y-0.5">
|
||||||
{data.sensors.map((sensor, i) => (
|
{data.sensors.map((sensor, i) => (
|
||||||
<LppSensorRow key={i} sensor={sensor} />
|
<LppSensorRow key={i} sensor={sensor} unitPref={distanceUnit} />
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -1,6 +1,15 @@
|
|||||||
import { RepeaterPane, NotFetched, KvRow } from './repeaterPaneShared';
|
import { RepeaterPane, NotFetched, KvRow } from './repeaterPaneShared';
|
||||||
import type { RepeaterOwnerInfoResponse, PaneState } from '../../types';
|
import type { RepeaterOwnerInfoResponse, PaneState } from '../../types';
|
||||||
|
|
||||||
|
function LabeledBlock({ label, value }: { label: string; value: string }) {
|
||||||
|
return (
|
||||||
|
<div className="py-0.5">
|
||||||
|
<span className="text-sm text-muted-foreground whitespace-nowrap">{label}</span>
|
||||||
|
<p className="text-sm font-medium mt-0.5 break-words">{value}</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export function OwnerInfoPane({
|
export function OwnerInfoPane({
|
||||||
data,
|
data,
|
||||||
state,
|
state,
|
||||||
@@ -17,8 +26,8 @@ export function OwnerInfoPane({
|
|||||||
{!data ? (
|
{!data ? (
|
||||||
<NotFetched />
|
<NotFetched />
|
||||||
) : (
|
) : (
|
||||||
<div className="break-all">
|
<div className="space-y-1">
|
||||||
<KvRow label="Owner Info" value={data.owner_info ?? '—'} />
|
<LabeledBlock label="Owner Info" value={data.owner_info ?? '—'} />
|
||||||
<KvRow label="Guest Password" value={data.guest_password ?? '—'} />
|
<KvRow label="Guest Password" value={data.guest_password ?? '—'} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -11,19 +11,37 @@ import {
|
|||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
import { Separator } from '../ui/separator';
|
import { Separator } from '../ui/separator';
|
||||||
import type { TelemetryHistoryEntry, Contact } from '../../types';
|
import { lppDisplayUnit } from './repeaterPaneShared';
|
||||||
|
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||||
|
import type { TelemetryHistoryEntry, TelemetryLppSensor, Contact } from '../../types';
|
||||||
|
|
||||||
const MAX_TRACKED = 8;
|
const MAX_TRACKED = 8;
|
||||||
|
|
||||||
type Metric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
type BuiltinMetric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
||||||
|
|
||||||
const METRIC_CONFIG: Record<Metric, { label: string; unit: string; color: string }> = {
|
interface MetricConfig {
|
||||||
|
label: string;
|
||||||
|
unit: string;
|
||||||
|
color: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const BUILTIN_METRIC_CONFIG: Record<BuiltinMetric, MetricConfig> = {
|
||||||
battery_volts: { label: 'Voltage', unit: 'V', color: '#22c55e' },
|
battery_volts: { label: 'Voltage', unit: 'V', color: '#22c55e' },
|
||||||
noise_floor_dbm: { label: 'Noise Floor', unit: 'dBm', color: '#8b5cf6' },
|
noise_floor_dbm: { label: 'Noise Floor', unit: 'dBm', color: '#8b5cf6' },
|
||||||
packets: { label: 'Packets', unit: '', color: '#0ea5e9' },
|
packets: { label: 'Packets', unit: '', color: '#0ea5e9' },
|
||||||
uptime_seconds: { label: 'Uptime', unit: 's', color: '#f59e0b' },
|
uptime_seconds: { label: 'Uptime', unit: 's', color: '#f59e0b' },
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const BUILTIN_METRICS: BuiltinMetric[] = Object.keys(BUILTIN_METRIC_CONFIG) as BuiltinMetric[];
|
||||||
|
|
||||||
|
// Stable color rotation for dynamic LPP sensors
|
||||||
|
const LPP_COLORS = ['#ec4899', '#14b8a6', '#f97316', '#6366f1', '#84cc16', '#e11d48'];
|
||||||
|
|
||||||
|
/** Build a flat data key for an LPP sensor: lpp_{type_name}_ch{channel} */
|
||||||
|
function lppKey(s: TelemetryLppSensor): string {
|
||||||
|
return `lpp_${s.type_name}_ch${s.channel}`;
|
||||||
|
}
|
||||||
|
|
||||||
const TOOLTIP_STYLE = {
|
const TOOLTIP_STYLE = {
|
||||||
contentStyle: {
|
contentStyle: {
|
||||||
backgroundColor: 'hsl(var(--popover))',
|
backgroundColor: 'hsl(var(--popover))',
|
||||||
@@ -66,18 +84,62 @@ export function TelemetryHistoryPane({
|
|||||||
trackedTelemetryRepeaters,
|
trackedTelemetryRepeaters,
|
||||||
onToggleTrackedTelemetry,
|
onToggleTrackedTelemetry,
|
||||||
}: TelemetryHistoryPaneProps) {
|
}: TelemetryHistoryPaneProps) {
|
||||||
const [metric, setMetric] = useState<Metric>('battery_volts');
|
const { distanceUnit } = useDistanceUnit();
|
||||||
|
const [metric, setMetric] = useState<string>('battery_volts');
|
||||||
const [toggling, setToggling] = useState(false);
|
const [toggling, setToggling] = useState(false);
|
||||||
|
|
||||||
const isTracked = trackedTelemetryRepeaters.includes(publicKey);
|
const isTracked = trackedTelemetryRepeaters.includes(publicKey);
|
||||||
const slotsFull = trackedTelemetryRepeaters.length >= MAX_TRACKED && !isTracked;
|
const slotsFull = trackedTelemetryRepeaters.length >= MAX_TRACKED && !isTracked;
|
||||||
|
|
||||||
const config = METRIC_CONFIG[metric];
|
// Discover unique LPP sensors across all history entries
|
||||||
|
const lppMetrics = useMemo(() => {
|
||||||
|
const seen = new Map<string, { type_name: string; channel: number }>();
|
||||||
|
for (const e of entries) {
|
||||||
|
for (const s of e.data.lpp_sensors ?? []) {
|
||||||
|
const k = lppKey(s);
|
||||||
|
if (!seen.has(k)) seen.set(k, { type_name: s.type_name, channel: s.channel });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const result: { key: string; config: MetricConfig; type_name: string; channel: number }[] = [];
|
||||||
|
let colorIdx = 0;
|
||||||
|
for (const [k, info] of seen) {
|
||||||
|
const label =
|
||||||
|
info.type_name.charAt(0).toUpperCase() +
|
||||||
|
info.type_name.slice(1).replace(/_/g, ' ') +
|
||||||
|
` Ch${info.channel}`;
|
||||||
|
const { unit } = lppDisplayUnit(info.type_name, 0, distanceUnit);
|
||||||
|
result.push({
|
||||||
|
key: k,
|
||||||
|
config: { label, unit, color: LPP_COLORS[colorIdx % LPP_COLORS.length] },
|
||||||
|
type_name: info.type_name,
|
||||||
|
channel: info.channel,
|
||||||
|
});
|
||||||
|
colorIdx++;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}, [entries, distanceUnit]);
|
||||||
|
|
||||||
|
const allMetricKeys = useMemo(
|
||||||
|
() => [...BUILTIN_METRICS, ...lppMetrics.map((m) => m.key)],
|
||||||
|
[lppMetrics]
|
||||||
|
);
|
||||||
|
|
||||||
|
// If the selected metric disappears (e.g. different repeater), reset to default
|
||||||
|
const activeMetric = allMetricKeys.includes(metric) ? metric : 'battery_volts';
|
||||||
|
|
||||||
|
const isBuiltin = BUILTIN_METRICS.includes(activeMetric as BuiltinMetric);
|
||||||
|
const activeConfig: MetricConfig = isBuiltin
|
||||||
|
? BUILTIN_METRIC_CONFIG[activeMetric as BuiltinMetric]
|
||||||
|
: (lppMetrics.find((m) => m.key === activeMetric)?.config ?? {
|
||||||
|
label: activeMetric,
|
||||||
|
unit: '',
|
||||||
|
color: '#888',
|
||||||
|
});
|
||||||
|
|
||||||
const chartData = useMemo(() => {
|
const chartData = useMemo(() => {
|
||||||
return entries.map((e) => {
|
return entries.map((e) => {
|
||||||
const d = e.data;
|
const d = e.data;
|
||||||
return {
|
const point: Record<string, number | undefined> = {
|
||||||
timestamp: e.timestamp,
|
timestamp: e.timestamp,
|
||||||
battery_volts: d.battery_volts,
|
battery_volts: d.battery_volts,
|
||||||
noise_floor_dbm: d.noise_floor_dbm,
|
noise_floor_dbm: d.noise_floor_dbm,
|
||||||
@@ -85,19 +147,27 @@ export function TelemetryHistoryPane({
|
|||||||
packets_sent: d.packets_sent,
|
packets_sent: d.packets_sent,
|
||||||
uptime_seconds: d.uptime_seconds,
|
uptime_seconds: d.uptime_seconds,
|
||||||
};
|
};
|
||||||
|
// Flatten LPP sensors into the point, converting units as needed
|
||||||
|
for (const s of d.lpp_sensors ?? []) {
|
||||||
|
if (typeof s.value === 'number') {
|
||||||
|
point[lppKey(s)] = lppDisplayUnit(s.type_name, s.value, distanceUnit).value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return point;
|
||||||
});
|
});
|
||||||
}, [entries]);
|
}, [entries, distanceUnit]);
|
||||||
|
|
||||||
const dataKeys = metric === 'packets' ? ['packets_received', 'packets_sent'] : [metric];
|
const dataKeys =
|
||||||
|
activeMetric === 'packets' ? ['packets_received', 'packets_sent'] : [activeMetric];
|
||||||
|
|
||||||
const yDomain = useMemo<[number, number] | undefined>(() => {
|
const yDomain = useMemo<[number, number] | undefined>(() => {
|
||||||
if (metric !== 'battery_volts' || chartData.length === 0) return undefined;
|
if (activeMetric !== 'battery_volts' || chartData.length === 0) return undefined;
|
||||||
const values = chartData.map((d) => d.battery_volts).filter((v) => v != null) as number[];
|
const values = chartData.map((d) => d.battery_volts).filter((v) => v != null) as number[];
|
||||||
if (values.length === 0) return [3, 5];
|
if (values.length === 0) return [3, 5];
|
||||||
const lo = Math.min(...values);
|
const lo = Math.min(...values);
|
||||||
const hi = Math.max(...values);
|
const hi = Math.max(...values);
|
||||||
return [Math.min(3, Math.floor(lo) - 1), Math.max(5, Math.ceil(hi) + 1)];
|
return [Math.min(3, Math.floor(lo) - 1), Math.max(5, Math.ceil(hi) + 1)];
|
||||||
}, [metric, chartData]);
|
}, [activeMetric, chartData]);
|
||||||
|
|
||||||
const handleToggle = async () => {
|
const handleToggle = async () => {
|
||||||
setToggling(true);
|
setToggling(true);
|
||||||
@@ -181,20 +251,35 @@ export function TelemetryHistoryPane({
|
|||||||
<Separator className="mb-3" />
|
<Separator className="mb-3" />
|
||||||
|
|
||||||
{/* Metric selector */}
|
{/* Metric selector */}
|
||||||
<div className="flex gap-1 mb-2">
|
<div className="flex flex-wrap gap-1 mb-2">
|
||||||
{(Object.keys(METRIC_CONFIG) as Metric[]).map((m) => (
|
{BUILTIN_METRICS.map((m) => (
|
||||||
<button
|
<button
|
||||||
key={m}
|
key={m}
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setMetric(m)}
|
onClick={() => setMetric(m)}
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
||||||
metric === m
|
activeMetric === m
|
||||||
? 'bg-primary text-primary-foreground'
|
? 'bg-primary text-primary-foreground'
|
||||||
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{METRIC_CONFIG[m].label}
|
{BUILTIN_METRIC_CONFIG[m].label}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
{lppMetrics.map((m) => (
|
||||||
|
<button
|
||||||
|
key={m.key}
|
||||||
|
type="button"
|
||||||
|
onClick={() => setMetric(m.key)}
|
||||||
|
className={cn(
|
||||||
|
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
||||||
|
activeMetric === m.key
|
||||||
|
? 'bg-primary text-primary-foreground'
|
||||||
|
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{m.config.label}
|
||||||
</button>
|
</button>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
@@ -221,7 +306,9 @@ export function TelemetryHistoryPane({
|
|||||||
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
||||||
tickLine={false}
|
tickLine={false}
|
||||||
axisLine={false}
|
axisLine={false}
|
||||||
tickFormatter={(v) => (metric === 'uptime_seconds' ? formatUptime(v) : `${v}`)}
|
tickFormatter={(v) =>
|
||||||
|
activeMetric === 'uptime_seconds' ? formatUptime(v) : `${v}`
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
<RechartsTooltip
|
<RechartsTooltip
|
||||||
{...TOOLTIP_STYLE}
|
{...TOOLTIP_STYLE}
|
||||||
@@ -234,15 +321,20 @@ export function TelemetryHistoryPane({
|
|||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
formatter={(value: any, name: any) => {
|
formatter={(value: any, name: any) => {
|
||||||
const numVal = typeof value === 'number' ? value : Number(value);
|
const numVal = typeof value === 'number' ? value : Number(value);
|
||||||
const display = metric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
const display =
|
||||||
|
activeMetric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
||||||
const suffix =
|
const suffix =
|
||||||
metric === 'uptime_seconds' ? '' : config.unit ? ` ${config.unit}` : '';
|
activeMetric === 'uptime_seconds'
|
||||||
|
? ''
|
||||||
|
: activeConfig.unit
|
||||||
|
? ` ${activeConfig.unit}`
|
||||||
|
: '';
|
||||||
const label =
|
const label =
|
||||||
metric === 'packets'
|
activeMetric === 'packets'
|
||||||
? name === 'packets_received'
|
? name === 'packets_received'
|
||||||
? 'Received'
|
? 'Received'
|
||||||
: 'Sent'
|
: 'Sent'
|
||||||
: config.label;
|
: activeConfig.label;
|
||||||
return [`${display}${suffix}`, label];
|
return [`${display}${suffix}`, label];
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
@@ -251,19 +343,41 @@ export function TelemetryHistoryPane({
|
|||||||
key={key}
|
key={key}
|
||||||
type="linear"
|
type="linear"
|
||||||
dataKey={key}
|
dataKey={key}
|
||||||
stroke={metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color}
|
stroke={
|
||||||
fill={metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color}
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color
|
||||||
|
}
|
||||||
|
fill={
|
||||||
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color
|
||||||
|
}
|
||||||
fillOpacity={0.15}
|
fillOpacity={0.15}
|
||||||
strokeWidth={1.5}
|
strokeWidth={1.5}
|
||||||
dot={{
|
dot={{
|
||||||
r: 4,
|
r: 4,
|
||||||
fill: metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color,
|
fill:
|
||||||
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color,
|
||||||
strokeWidth: 1.5,
|
strokeWidth: 1.5,
|
||||||
stroke: 'hsl(var(--popover))',
|
stroke: 'hsl(var(--popover))',
|
||||||
}}
|
}}
|
||||||
activeDot={{
|
activeDot={{
|
||||||
r: 6,
|
r: 6,
|
||||||
fill: metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color,
|
fill:
|
||||||
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color,
|
||||||
strokeWidth: 2,
|
strokeWidth: 2,
|
||||||
stroke: 'hsl(var(--popover))',
|
stroke: 'hsl(var(--popover))',
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -223,11 +223,26 @@ export const LPP_UNIT_MAP: Record<string, string> = {
|
|||||||
colour: '',
|
colour: '',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the display unit and converted value for an LPP sensor,
|
||||||
|
* respecting the user's unit preference for temperature.
|
||||||
|
*/
|
||||||
|
export function lppDisplayUnit(
|
||||||
|
typeName: string,
|
||||||
|
value: number,
|
||||||
|
unitPref: 'metric' | 'imperial' | string
|
||||||
|
): { unit: string; value: number } {
|
||||||
|
if (typeName === 'temperature' && unitPref === 'imperial') {
|
||||||
|
return { unit: '°F', value: (value * 9) / 5 + 32 };
|
||||||
|
}
|
||||||
|
return { unit: LPP_UNIT_MAP[typeName] ?? '', value };
|
||||||
|
}
|
||||||
|
|
||||||
export function formatLppLabel(typeName: string): string {
|
export function formatLppLabel(typeName: string): string {
|
||||||
return typeName.charAt(0).toUpperCase() + typeName.slice(1).replace(/_/g, ' ');
|
return typeName.charAt(0).toUpperCase() + typeName.slice(1).replace(/_/g, ' ');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function LppSensorRow({ sensor }: { sensor: LppSensor }) {
|
export function LppSensorRow({ sensor, unitPref }: { sensor: LppSensor; unitPref?: string }) {
|
||||||
const label = formatLppLabel(sensor.type_name);
|
const label = formatLppLabel(sensor.type_name);
|
||||||
|
|
||||||
if (typeof sensor.value === 'object' && sensor.value !== null) {
|
if (typeof sensor.value === 'object' && sensor.value !== null) {
|
||||||
@@ -248,10 +263,10 @@ export function LppSensorRow({ sensor }: { sensor: LppSensor }) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const unit = LPP_UNIT_MAP[sensor.type_name] ?? '';
|
const display = lppDisplayUnit(sensor.type_name, sensor.value as number, unitPref ?? 'metric');
|
||||||
const formatted =
|
const formatted =
|
||||||
typeof sensor.value === 'number'
|
typeof sensor.value === 'number'
|
||||||
? `${sensor.value % 1 === 0 ? sensor.value : sensor.value.toFixed(2)}${unit ? ` ${unit}` : ''}`
|
? `${display.value % 1 === 0 ? display.value : display.value.toFixed(2)}${display.unit ? ` ${display.unit}` : ''}`
|
||||||
: String(sensor.value);
|
: String(sensor.value);
|
||||||
|
|
||||||
return <KvRow label={label} value={formatted} />;
|
return <KvRow label={label} value={formatted} />;
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import { Separator } from '../ui/separator';
|
|||||||
import { toast } from '../ui/sonner';
|
import { toast } from '../ui/sonner';
|
||||||
import { api } from '../../api';
|
import { api } from '../../api';
|
||||||
import { formatTime } from '../../utils/messageParser';
|
import { formatTime } from '../../utils/messageParser';
|
||||||
|
import { lppDisplayUnit } from '../repeater/repeaterPaneShared';
|
||||||
|
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||||
import { BulkDeleteContactsModal } from './BulkDeleteContactsModal';
|
import { BulkDeleteContactsModal } from './BulkDeleteContactsModal';
|
||||||
import type {
|
import type {
|
||||||
AppSettings,
|
AppSettings,
|
||||||
@@ -44,6 +46,7 @@ export function SettingsDatabaseSection({
|
|||||||
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
||||||
className?: string;
|
className?: string;
|
||||||
}) {
|
}) {
|
||||||
|
const { distanceUnit } = useDistanceUnit();
|
||||||
const [retentionDays, setRetentionDays] = useState('14');
|
const [retentionDays, setRetentionDays] = useState('14');
|
||||||
const [cleaning, setCleaning] = useState(false);
|
const [cleaning, setCleaning] = useState(false);
|
||||||
const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false);
|
const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false);
|
||||||
@@ -308,6 +311,22 @@ export function SettingsDatabaseSection({
|
|||||||
<span>
|
<span>
|
||||||
tx {d.packets_sent != null ? d.packets_sent.toLocaleString() : '?'}
|
tx {d.packets_sent != null ? d.packets_sent.toLocaleString() : '?'}
|
||||||
</span>
|
</span>
|
||||||
|
{d.lpp_sensors?.map((s) => {
|
||||||
|
const display = lppDisplayUnit(s.type_name, s.value, distanceUnit);
|
||||||
|
const val =
|
||||||
|
typeof display.value === 'number'
|
||||||
|
? display.value % 1 === 0
|
||||||
|
? display.value
|
||||||
|
: display.value.toFixed(1)
|
||||||
|
: display.value;
|
||||||
|
const label = s.type_name.charAt(0).toUpperCase() + s.type_name.slice(1);
|
||||||
|
return (
|
||||||
|
<span key={`${s.type_name}-${s.channel}`}>
|
||||||
|
{label} {val}
|
||||||
|
{display.unit ? ` ${display.unit}` : ''}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
})}
|
||||||
<span className="ml-auto">checked {formatTime(snap.timestamp)}</span>
|
<span className="ml-auto">checked {formatTime(snap.timestamp)}</span>
|
||||||
</div>
|
</div>
|
||||||
) : snap === null ? (
|
) : snap === null ? (
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ const BotCodeEditor = lazy(() =>
|
|||||||
const TYPE_LABELS: Record<string, string> = {
|
const TYPE_LABELS: Record<string, string> = {
|
||||||
mqtt_private: 'Private MQTT',
|
mqtt_private: 'Private MQTT',
|
||||||
mqtt_community: 'Community Sharing',
|
mqtt_community: 'Community Sharing',
|
||||||
|
mqtt_ha: 'Home Assistant',
|
||||||
bot: 'Python Bot',
|
bot: 'Python Bot',
|
||||||
webhook: 'Webhook',
|
webhook: 'Webhook',
|
||||||
apprise: 'Apprise',
|
apprise: 'Apprise',
|
||||||
@@ -101,6 +102,7 @@ const DEFAULT_BOT_CODE = `def bot(**kwargs) -> str | list[str] | None:
|
|||||||
|
|
||||||
type DraftType =
|
type DraftType =
|
||||||
| 'mqtt_private'
|
| 'mqtt_private'
|
||||||
|
| 'mqtt_ha'
|
||||||
| 'mqtt_community'
|
| 'mqtt_community'
|
||||||
| 'mqtt_community_meshrank'
|
| 'mqtt_community_meshrank'
|
||||||
| 'mqtt_community_letsmesh_us'
|
| 'mqtt_community_letsmesh_us'
|
||||||
@@ -130,7 +132,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
value: 'mqtt_private',
|
value: 'mqtt_private',
|
||||||
savedType: 'mqtt_private',
|
savedType: 'mqtt_private',
|
||||||
label: 'Private MQTT',
|
label: 'Private MQTT',
|
||||||
section: 'Bulk Forwarding',
|
section: 'Private Forwarding',
|
||||||
description:
|
description:
|
||||||
'Customizable-scope forwarding of all or some messages to an MQTT broker of your choosing, in raw and/or decrypted form.',
|
'Customizable-scope forwarding of all or some messages to an MQTT broker of your choosing, in raw and/or decrypted form.',
|
||||||
defaultName: 'Private MQTT',
|
defaultName: 'Private MQTT',
|
||||||
@@ -148,6 +150,30 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
scope: { messages: 'all', raw_packets: 'all' },
|
scope: { messages: 'all', raw_packets: 'all' },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
value: 'mqtt_ha',
|
||||||
|
savedType: 'mqtt_ha',
|
||||||
|
label: 'Home Assistant MQTT Discovery',
|
||||||
|
section: 'Private Forwarding',
|
||||||
|
description:
|
||||||
|
"Publishes MQTT Discovery payloads so mesh devices appear natively in Home Assistant. Requires HA's built-in MQTT integration connected to the same broker. Select specific contacts for GPS tracking and repeaters for telemetry sensors.",
|
||||||
|
defaultName: 'Home Assistant',
|
||||||
|
nameMode: 'fixed',
|
||||||
|
defaults: {
|
||||||
|
config: {
|
||||||
|
broker_host: '',
|
||||||
|
broker_port: 1883,
|
||||||
|
username: '',
|
||||||
|
password: '',
|
||||||
|
use_tls: false,
|
||||||
|
tls_insecure: false,
|
||||||
|
topic_prefix: 'meshcore',
|
||||||
|
tracked_contacts: [],
|
||||||
|
tracked_repeaters: [],
|
||||||
|
},
|
||||||
|
scope: { messages: 'all', raw_packets: 'none' },
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
value: 'mqtt_community',
|
value: 'mqtt_community',
|
||||||
savedType: 'mqtt_community',
|
savedType: 'mqtt_community',
|
||||||
@@ -261,7 +287,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
value: 'sqs',
|
value: 'sqs',
|
||||||
savedType: 'sqs',
|
savedType: 'sqs',
|
||||||
label: 'Amazon SQS',
|
label: 'Amazon SQS',
|
||||||
section: 'Bulk Forwarding',
|
section: 'Private Forwarding',
|
||||||
description: 'Send full or scope-customized raw or decrypted packets to an SQS',
|
description: 'Send full or scope-customized raw or decrypted packets to an SQS',
|
||||||
defaultName: 'Amazon SQS',
|
defaultName: 'Amazon SQS',
|
||||||
nameMode: 'counted',
|
nameMode: 'counted',
|
||||||
@@ -803,6 +829,446 @@ function MqttPrivateConfigEditor({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function MqttHaConfigEditor({
|
||||||
|
config,
|
||||||
|
scope,
|
||||||
|
onChange,
|
||||||
|
onScopeChange,
|
||||||
|
}: {
|
||||||
|
config: Record<string, unknown>;
|
||||||
|
scope: Record<string, unknown>;
|
||||||
|
onChange: (config: Record<string, unknown>) => void;
|
||||||
|
onScopeChange: (scope: Record<string, unknown>) => void;
|
||||||
|
}) {
|
||||||
|
const [contacts, setContacts] = useState<Contact[]>([]);
|
||||||
|
const [trackedRepeaters, setTrackedRepeaters] = useState<string[]>([]);
|
||||||
|
const [contactSearch, setContactSearch] = useState('');
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
(async () => {
|
||||||
|
const all: Contact[] = [];
|
||||||
|
const pageSize = 1000;
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const page = await api.getContacts(pageSize, offset);
|
||||||
|
all.push(...page);
|
||||||
|
if (page.length < pageSize) break;
|
||||||
|
offset += pageSize;
|
||||||
|
}
|
||||||
|
setContacts(all);
|
||||||
|
})().catch(console.error);
|
||||||
|
|
||||||
|
api
|
||||||
|
.getSettings()
|
||||||
|
.then((s) => setTrackedRepeaters(s.tracked_telemetry_repeaters ?? []))
|
||||||
|
.catch(console.error);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const selectedContacts = (config.tracked_contacts as string[]) || [];
|
||||||
|
const selectedRepeaters = (config.tracked_repeaters as string[]) || [];
|
||||||
|
|
||||||
|
const contactOptions = useMemo(
|
||||||
|
() => contacts.filter((c) => c.type === 0 || c.type === 1 || c.type === 3),
|
||||||
|
[contacts]
|
||||||
|
);
|
||||||
|
|
||||||
|
const repeaterOptions = useMemo(
|
||||||
|
() => contacts.filter((c) => c.type === 2 && trackedRepeaters.includes(c.public_key)),
|
||||||
|
[contacts, trackedRepeaters]
|
||||||
|
);
|
||||||
|
|
||||||
|
const contactSearchLower = contactSearch.toLowerCase().trim();
|
||||||
|
const filteredContacts = useMemo(() => {
|
||||||
|
const matches = contactOptions.filter((c) => {
|
||||||
|
if (!contactSearchLower) return true;
|
||||||
|
const name = (c.name || '').toLowerCase();
|
||||||
|
const key = c.public_key.toLowerCase();
|
||||||
|
return name.includes(contactSearchLower) || key.startsWith(contactSearchLower);
|
||||||
|
});
|
||||||
|
// Selected contacts sort to top
|
||||||
|
return matches.sort((a, b) => {
|
||||||
|
const aSelected = selectedContacts.includes(a.public_key) ? 0 : 1;
|
||||||
|
const bSelected = selectedContacts.includes(b.public_key) ? 0 : 1;
|
||||||
|
if (aSelected !== bSelected) return aSelected - bSelected;
|
||||||
|
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
||||||
|
});
|
||||||
|
}, [contactOptions, contactSearchLower, selectedContacts]);
|
||||||
|
|
||||||
|
const selectedContactDetails = contactOptions.filter((c) =>
|
||||||
|
selectedContacts.includes(c.public_key)
|
||||||
|
);
|
||||||
|
|
||||||
|
const toggleTrackedContact = (key: string) => {
|
||||||
|
const current = [...selectedContacts];
|
||||||
|
const idx = current.indexOf(key);
|
||||||
|
if (idx >= 0) current.splice(idx, 1);
|
||||||
|
else current.push(key);
|
||||||
|
onChange({ ...config, tracked_contacts: current });
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleTrackedRepeater = (key: string) => {
|
||||||
|
const current = [...selectedRepeaters];
|
||||||
|
const idx = current.indexOf(key);
|
||||||
|
if (idx >= 0) current.splice(idx, 1);
|
||||||
|
else current.push(key);
|
||||||
|
onChange({ ...config, tracked_repeaters: current });
|
||||||
|
};
|
||||||
|
|
||||||
|
const prefix = ((config.topic_prefix as string) || 'meshcore').trim() || 'meshcore';
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-3">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Uses{' '}
|
||||||
|
<span
|
||||||
|
role="link"
|
||||||
|
tabIndex={0}
|
||||||
|
className="underline cursor-pointer hover:text-primary transition-colors"
|
||||||
|
onClick={() =>
|
||||||
|
window.open('https://www.home-assistant.io/integrations/mqtt/#mqtt-discovery', '_blank')
|
||||||
|
}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === 'Enter')
|
||||||
|
window.open(
|
||||||
|
'https://www.home-assistant.io/integrations/mqtt/#mqtt-discovery',
|
||||||
|
'_blank'
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
MQTT Discovery
|
||||||
|
</span>{' '}
|
||||||
|
to automatically create devices and entities in Home Assistant. Your HA instance must have
|
||||||
|
the MQTT integration configured and connected to the same broker. See{' '}
|
||||||
|
<span
|
||||||
|
role="link"
|
||||||
|
tabIndex={0}
|
||||||
|
className="underline cursor-pointer hover:text-primary transition-colors"
|
||||||
|
onClick={() =>
|
||||||
|
window.open(
|
||||||
|
'https://github.com/jkingsman/Remote-Terminal-for-MeshCore/blob/main/README_HA.md',
|
||||||
|
'_blank'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === 'Enter')
|
||||||
|
window.open(
|
||||||
|
'https://github.com/jkingsman/Remote-Terminal-for-MeshCore/blob/main/README_HA.md',
|
||||||
|
'_blank'
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
README_HA.md
|
||||||
|
</span>{' '}
|
||||||
|
for automation examples and setup details. Note that entities like repeaters and contact GPS
|
||||||
|
won't update until new data is available; there is no caching layer (so devices/entities
|
||||||
|
might take hours to days to appear).
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<details className="group">
|
||||||
|
<summary className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium cursor-pointer select-none flex items-center gap-1">
|
||||||
|
<ChevronDown className="h-3 w-3 transition-transform group-open:rotate-0 -rotate-90" />
|
||||||
|
What gets created in Home Assistant
|
||||||
|
</summary>
|
||||||
|
<div className="mt-2 space-y-2 text-xs text-muted-foreground rounded-md border border-border bg-muted/20 p-3">
|
||||||
|
<div>
|
||||||
|
<span className="font-medium text-foreground">Local radio device</span> (always)
|
||||||
|
<span className="ml-1">— updates every 60s</span>
|
||||||
|
<ul className="mt-0.5 ml-4 list-disc space-y-0.5">
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">binary_sensor.meshcore_*_connected</code> —
|
||||||
|
radio online/offline
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">sensor.meshcore_*_noise_floor</code> —
|
||||||
|
radio noise floor (dBm)
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<span className="font-medium text-foreground">Per tracked repeater</span> —
|
||||||
|
updates on telemetry collect cycle (~8h) or manual dashboard fetch
|
||||||
|
<ul className="mt-0.5 ml-4 list-disc space-y-0.5">
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">sensor.meshcore_*_battery_voltage</code> (V)
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">sensor.meshcore_*_noise_floor</code>,{' '}
|
||||||
|
<code className="text-[0.6875rem]">*_last_rssi</code>,{' '}
|
||||||
|
<code className="text-[0.6875rem]">*_last_snr</code> (dBm/dB)
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">sensor.meshcore_*_packets_received</code>,{' '}
|
||||||
|
<code className="text-[0.6875rem]">*_packets_sent</code>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">sensor.meshcore_*_uptime</code> (seconds)
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">sensor.meshcore_*_lpp_temperature_ch*</code>,{' '}
|
||||||
|
<code className="text-[0.6875rem]">*_lpp_humidity_ch*</code>, etc. —
|
||||||
|
CayenneLPP sensors (auto-detected from repeater)
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<span className="font-medium text-foreground">Per tracked contact</span> — updates
|
||||||
|
passively when advertisements with GPS are heard
|
||||||
|
<ul className="mt-0.5 ml-4 list-disc space-y-0.5">
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">device_tracker.meshcore_*</code> —
|
||||||
|
latitude/longitude
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<span className="font-medium text-foreground">Message events</span> — fires for
|
||||||
|
each message matching the scope below
|
||||||
|
<ul className="mt-0.5 ml-4 list-disc space-y-0.5">
|
||||||
|
<li>
|
||||||
|
<code className="text-[0.6875rem]">event.meshcore_messages</code> — trigger
|
||||||
|
automations on sender, channel, or message content
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p className="text-[0.6875rem] mt-1.5">
|
||||||
|
Entity IDs use the first 12 characters of the node's public key. Entities are
|
||||||
|
removed from HA when this integration is disabled or deleted. State topics are published
|
||||||
|
under{' '}
|
||||||
|
<code className="text-[0.6875rem]">{prefix}/<node_id>/health|telemetry|gps</code>.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
|
MQTT Broker
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-ha-host">Broker Host</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-ha-host"
|
||||||
|
type="text"
|
||||||
|
placeholder="e.g. 192.168.1.100"
|
||||||
|
value={(config.broker_host as string) || ''}
|
||||||
|
onChange={(e) => onChange({ ...config, broker_host: e.target.value })}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-ha-port">Broker Port</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-ha-port"
|
||||||
|
type="number"
|
||||||
|
min="1"
|
||||||
|
max="65535"
|
||||||
|
value={getNumberInputValue(config.broker_port, 1883)}
|
||||||
|
onChange={(e) =>
|
||||||
|
onChange({ ...config, broker_port: parseIntegerInputValue(e.target.value) })
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-ha-user">Username</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-ha-user"
|
||||||
|
type="text"
|
||||||
|
placeholder="Optional"
|
||||||
|
value={(config.username as string) || ''}
|
||||||
|
onChange={(e) => onChange({ ...config, username: e.target.value })}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-ha-pass">Password</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-ha-pass"
|
||||||
|
type="password"
|
||||||
|
placeholder="Optional"
|
||||||
|
value={(config.password as string) || ''}
|
||||||
|
onChange={(e) => onChange({ ...config, password: e.target.value })}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<label className="flex items-center gap-3 cursor-pointer">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={!!config.use_tls}
|
||||||
|
onChange={(e) => onChange({ ...config, use_tls: e.target.checked })}
|
||||||
|
className="h-4 w-4 rounded border-border"
|
||||||
|
/>
|
||||||
|
<span className="text-sm">Use TLS</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
{!!config.use_tls && (
|
||||||
|
<label className="flex items-center gap-3 cursor-pointer ml-7">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={!!config.tls_insecure}
|
||||||
|
onChange={(e) => onChange({ ...config, tls_insecure: e.target.checked })}
|
||||||
|
className="h-4 w-4 rounded border-border"
|
||||||
|
/>
|
||||||
|
<span className="text-sm">Skip certificate verification</span>
|
||||||
|
</label>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-ha-prefix">Topic Prefix</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-ha-prefix"
|
||||||
|
type="text"
|
||||||
|
placeholder="meshcore"
|
||||||
|
value={(config.topic_prefix as string | undefined) ?? ''}
|
||||||
|
onChange={(e) => onChange({ ...config, topic_prefix: e.target.value })}
|
||||||
|
/>
|
||||||
|
<p className="text-[0.6875rem] text-muted-foreground">
|
||||||
|
State updates publish under <code className="text-[0.6875rem]">{prefix}/</code>. Discovery
|
||||||
|
configs always use the <code className="text-[0.6875rem]">homeassistant/</code> prefix.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<div className="space-y-2">
|
||||||
|
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
|
GPS Tracked Contacts
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Each selected contact becomes a <code className="text-[0.6875rem]">device_tracker</code>{' '}
|
||||||
|
in HA, updated whenever an advertisement with GPS coordinates is heard. Useful for
|
||||||
|
tracking mobile nodes on an HA map dashboard.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{selectedContactDetails.length > 0 && (
|
||||||
|
<div className="flex flex-wrap gap-1.5">
|
||||||
|
{selectedContactDetails.map((c) => (
|
||||||
|
<span
|
||||||
|
key={c.public_key}
|
||||||
|
className="inline-flex items-center gap-1 text-[0.6875rem] px-2 py-0.5 rounded-full bg-primary/10 text-primary"
|
||||||
|
>
|
||||||
|
{c.name || c.public_key.slice(0, 12)}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="ml-0.5 hover:text-destructive transition-colors"
|
||||||
|
onClick={() => toggleTrackedContact(c.public_key)}
|
||||||
|
aria-label={`Remove ${c.name || c.public_key.slice(0, 12)}`}
|
||||||
|
>
|
||||||
|
×
|
||||||
|
</button>
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{contactOptions.length === 0 ? (
|
||||||
|
<p className="text-xs text-muted-foreground italic">No contacts available.</p>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder={`Search ${contactOptions.length} contacts...`}
|
||||||
|
value={contactSearch}
|
||||||
|
onChange={(e) => setContactSearch(e.target.value)}
|
||||||
|
className="h-8 text-sm"
|
||||||
|
/>
|
||||||
|
<div className="max-h-48 overflow-y-auto space-y-1 rounded border border-border p-2">
|
||||||
|
{filteredContacts.length === 0 ? (
|
||||||
|
<p className="text-xs text-muted-foreground italic py-1">
|
||||||
|
No contacts match “{contactSearch}”
|
||||||
|
</p>
|
||||||
|
) : (
|
||||||
|
filteredContacts.map((c) => (
|
||||||
|
<label
|
||||||
|
key={c.public_key}
|
||||||
|
className="flex items-center gap-2 cursor-pointer text-sm"
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={selectedContacts.includes(c.public_key)}
|
||||||
|
onChange={() => toggleTrackedContact(c.public_key)}
|
||||||
|
className="h-3.5 w-3.5 rounded border-border"
|
||||||
|
/>
|
||||||
|
<span className="truncate">{c.name || c.public_key.slice(0, 12)}</span>
|
||||||
|
<span className="text-[0.625rem] text-muted-foreground ml-auto font-mono shrink-0">
|
||||||
|
{c.public_key.slice(0, 12)}
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
))
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<div className="space-y-2">
|
||||||
|
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
|
Telemetry Tracked Repeaters
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Each selected repeater becomes an HA device with sensors for battery voltage, RSSI, SNR,
|
||||||
|
noise floor, packet counts, and uptime. Data updates whenever telemetry is collected
|
||||||
|
(auto-collect runs every ~8 hours, or on manual dashboard fetch). Only repeaters already
|
||||||
|
in the auto-telemetry tracking list appear here (add new repeaters by logging into the
|
||||||
|
repeater and opting in at the bottom of the page).
|
||||||
|
</p>
|
||||||
|
{trackedRepeaters.length === 0 ? (
|
||||||
|
<div className="rounded-md border border-muted bg-muted/30 px-3 py-2 text-xs text-muted-foreground">
|
||||||
|
No repeaters are being auto-tracked for telemetry. Add repeaters to the auto-telemetry
|
||||||
|
tracking list in the Radio section first, then return here to select which ones to
|
||||||
|
expose to HA.
|
||||||
|
</div>
|
||||||
|
) : repeaterOptions.length === 0 ? (
|
||||||
|
<p className="text-xs text-muted-foreground italic">
|
||||||
|
Auto-tracked repeaters not found in contact list.
|
||||||
|
</p>
|
||||||
|
) : (
|
||||||
|
<div className="max-h-40 overflow-y-auto space-y-1 rounded border border-border p-2">
|
||||||
|
{repeaterOptions.map((c) => (
|
||||||
|
<label key={c.public_key} className="flex items-center gap-2 cursor-pointer text-sm">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={selectedRepeaters.includes(c.public_key)}
|
||||||
|
onChange={() => toggleTrackedRepeater(c.public_key)}
|
||||||
|
className="h-3.5 w-3.5 rounded border-border"
|
||||||
|
/>
|
||||||
|
<span className="truncate">{c.name || c.public_key.slice(0, 12)}</span>
|
||||||
|
<span className="text-[0.625rem] text-muted-foreground ml-auto font-mono">
|
||||||
|
{c.public_key.slice(0, 12)}
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<div className="space-y-2">
|
||||||
|
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
|
Message Events
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Matching messages fire an{' '}
|
||||||
|
<code className="text-[0.6875rem]">event.meshcore_messages</code> entity in HA with
|
||||||
|
sender, text, channel, and direction attributes. Use HA automations to trigger actions on
|
||||||
|
specific messages, channels, or contacts.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<ScopeSelector scope={scope} onChange={onScopeChange} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function MqttCommunityConfigEditor({
|
function MqttCommunityConfigEditor({
|
||||||
config,
|
config,
|
||||||
onChange,
|
onChange,
|
||||||
@@ -2185,6 +2651,15 @@ export function SettingsFanoutSection({
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{detailType === 'mqtt_ha' && (
|
||||||
|
<MqttHaConfigEditor
|
||||||
|
config={editConfig}
|
||||||
|
scope={editScope}
|
||||||
|
onChange={setEditConfig}
|
||||||
|
onScopeChange={setEditScope}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
{detailType === 'mqtt_community' && (
|
{detailType === 'mqtt_community' && (
|
||||||
<MqttCommunityConfigEditor config={editConfig} onChange={setEditConfig} />
|
<MqttCommunityConfigEditor config={editConfig} onChange={setEditConfig} />
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ describe('SettingsFanoutSection', () => {
|
|||||||
const optionButtons = within(dialog)
|
const optionButtons = within(dialog)
|
||||||
.getAllByRole('button')
|
.getAllByRole('button')
|
||||||
.filter((button) => button.hasAttribute('aria-pressed'));
|
.filter((button) => button.hasAttribute('aria-pressed'));
|
||||||
expect(optionButtons).toHaveLength(10);
|
expect(optionButtons).toHaveLength(11);
|
||||||
expect(within(dialog).getByRole('button', { name: 'Close' })).toBeInTheDocument();
|
expect(within(dialog).getByRole('button', { name: 'Close' })).toBeInTheDocument();
|
||||||
expect(within(dialog).getByRole('button', { name: 'Create' })).toBeInTheDocument();
|
expect(within(dialog).getByRole('button', { name: 'Create' })).toBeInTheDocument();
|
||||||
expect(
|
expect(
|
||||||
|
|||||||
@@ -513,6 +513,42 @@ describe('Sidebar section summaries', () => {
|
|||||||
expect(contactRows).toEqual(['DM Recent', 'Advert Only', 'No Recency']);
|
expect(contactRows).toEqual(['DM Recent', 'Advert Only', 'No Recency']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('floats contacts with unread DMs above read contacts regardless of recency', () => {
|
||||||
|
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||||
|
const readRecent = makeContact('11'.repeat(32), 'Read Recent', 1, { last_advert: 500 });
|
||||||
|
const unreadOld = makeContact('22'.repeat(32), 'Unread Old', 1, { last_advert: 100 });
|
||||||
|
|
||||||
|
render(
|
||||||
|
<Sidebar
|
||||||
|
contacts={[readRecent, unreadOld]}
|
||||||
|
channels={[publicChannel]}
|
||||||
|
activeConversation={null}
|
||||||
|
onSelectConversation={vi.fn()}
|
||||||
|
onNewMessage={vi.fn()}
|
||||||
|
lastMessageTimes={{
|
||||||
|
[getStateKey('contact', readRecent.public_key)]: 500,
|
||||||
|
[getStateKey('contact', unreadOld.public_key)]: 200,
|
||||||
|
}}
|
||||||
|
unreadCounts={{
|
||||||
|
[getStateKey('contact', unreadOld.public_key)]: 3,
|
||||||
|
}}
|
||||||
|
mentions={{}}
|
||||||
|
showCracker={false}
|
||||||
|
crackerRunning={false}
|
||||||
|
onToggleCracker={vi.fn()}
|
||||||
|
onMarkAllRead={vi.fn()}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
const contactRows = screen
|
||||||
|
.getAllByText(/^(Read Recent|Unread Old)$/)
|
||||||
|
.map((node) => node.textContent)
|
||||||
|
.filter((text): text is string => Boolean(text));
|
||||||
|
|
||||||
|
// Unread Old has unread DMs so it floats above Read Recent despite older recency
|
||||||
|
expect(contactRows).toEqual(['Unread Old', 'Read Recent']);
|
||||||
|
});
|
||||||
|
|
||||||
it('sorts repeaters by heard recency even when message times disagree', () => {
|
it('sorts repeaters by heard recency even when message times disagree', () => {
|
||||||
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||||
const staleMessageRelay = makeContact(
|
const staleMessageRelay = makeContact(
|
||||||
|
|||||||
@@ -487,9 +487,15 @@ export interface PaneState {
|
|||||||
fetched_at?: number | null;
|
fetched_at?: number | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface TelemetryLppSensor {
|
||||||
|
channel: number;
|
||||||
|
type_name: string;
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
|
||||||
export interface TelemetryHistoryEntry {
|
export interface TelemetryHistoryEntry {
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
data: Record<string, number>;
|
data: Record<string, number> & { lpp_sensors?: TelemetryLppSensor[] };
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TraceResponse {
|
export interface TraceResponse {
|
||||||
|
|||||||
+1
-1
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "remoteterm-meshcore"
|
name = "remoteterm-meshcore"
|
||||||
version = "3.11.0"
|
version = "3.11.3"
|
||||||
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
|
|||||||
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Executable
+210
@@ -0,0 +1,210 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Start a fresh Home Assistant + Mosquitto environment for testing the
|
||||||
|
# mqtt_ha fanout integration. Runs everything on the host network so
|
||||||
|
# RemoteTerm (running locally) can reach the broker at localhost:1883.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/setup/start_ha_test_env.sh
|
||||||
|
#
|
||||||
|
# After this script completes:
|
||||||
|
# 1. HA is at http://localhost:8123 (login: dev / dev)
|
||||||
|
# 2. Mosquitto is at localhost:1883 (no auth)
|
||||||
|
# 3. HA's MQTT integration is configured and connected to Mosquitto
|
||||||
|
#
|
||||||
|
# Then in RemoteTerm:
|
||||||
|
# Settings > Integrations > Add > Home Assistant
|
||||||
|
# Broker Host: 127.0.0.1 Port: 1883
|
||||||
|
# Select contacts/repeaters and save.
|
||||||
|
#
|
||||||
|
# To tear down:
|
||||||
|
# ./scripts/setup/stop_ha_test_env.sh
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||||
|
HA_CONFIG="$REPO_ROOT/ha_test_config"
|
||||||
|
|
||||||
|
echo "==> Stopping any existing test containers..."
|
||||||
|
docker rm -f ha-test-mosquitto 2>/dev/null || true
|
||||||
|
docker rm -f ha-test-homeassistant 2>/dev/null || true
|
||||||
|
|
||||||
|
echo "==> Wiping HA config for fresh start..."
|
||||||
|
sudo rm -rf "$HA_CONFIG"
|
||||||
|
mkdir -p "$HA_CONFIG"
|
||||||
|
|
||||||
|
# ── Mosquitto ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo "==> Starting Mosquitto (port 1883, no auth)..."
|
||||||
|
MOSQUITTO_CONF=$(mktemp)
|
||||||
|
cat > "$MOSQUITTO_CONF" << 'MQTTEOF'
|
||||||
|
listener 1883 0.0.0.0
|
||||||
|
allow_anonymous true
|
||||||
|
MQTTEOF
|
||||||
|
|
||||||
|
docker run -d \
|
||||||
|
--name ha-test-mosquitto \
|
||||||
|
--network host \
|
||||||
|
-v "$MOSQUITTO_CONF:/mosquitto/config/mosquitto.conf:ro" \
|
||||||
|
eclipse-mosquitto:2
|
||||||
|
|
||||||
|
# Give Mosquitto a moment to bind
|
||||||
|
sleep 2
|
||||||
|
rm -f "$MOSQUITTO_CONF"
|
||||||
|
|
||||||
|
# ── Home Assistant ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo "==> Starting Home Assistant (port 8123)..."
|
||||||
|
docker run -d \
|
||||||
|
--name ha-test-homeassistant \
|
||||||
|
--network host \
|
||||||
|
-v "$HA_CONFIG:/config" \
|
||||||
|
ghcr.io/home-assistant/home-assistant:stable
|
||||||
|
|
||||||
|
echo "==> Waiting for HA to boot..."
|
||||||
|
for i in $(seq 1 90); do
|
||||||
|
HTTP_CODE=$(curl -s -o /dev/null -w '%{http_code}' http://localhost:8123/api/onboarding/users 2>/dev/null || echo "000")
|
||||||
|
if echo "$HTTP_CODE" | grep -q '200\|405'; then
|
||||||
|
echo " HA is up after ${i}s"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if [ "$i" -eq 90 ]; then
|
||||||
|
echo " ERROR: HA did not start within 90s"
|
||||||
|
echo " Check: docker logs ha-test-homeassistant"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
# ── Onboarding ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo "==> Running onboarding (user: dev / pass: dev)..."
|
||||||
|
ONBOARD_RESP=$(curl -s -X POST http://localhost:8123/api/onboarding/users \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"client_id":"http://localhost:8123/","name":"Dev","username":"dev","password":"dev","language":"en"}')
|
||||||
|
|
||||||
|
AUTH_CODE=$(echo "$ONBOARD_RESP" | python3 -c "import sys,json; print(json.load(sys.stdin).get('auth_code',''))" 2>/dev/null || echo "")
|
||||||
|
if [ -z "$AUTH_CODE" ]; then
|
||||||
|
echo " WARNING: Could not extract auth_code from onboarding. HA may already be onboarded."
|
||||||
|
echo " Response: $ONBOARD_RESP"
|
||||||
|
echo ""
|
||||||
|
echo " Skipping MQTT auto-config. Configure MQTT manually:"
|
||||||
|
echo " Settings > Devices & Services > Add Integration > MQTT"
|
||||||
|
echo " Broker: 127.0.0.1 Port: 1883"
|
||||||
|
echo ""
|
||||||
|
echo "==> Done! Open http://localhost:8123"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Exchange auth code for tokens
|
||||||
|
echo "==> Exchanging auth code for access token..."
|
||||||
|
TOKEN_RESP=$(curl -s -X POST http://localhost:8123/auth/token \
|
||||||
|
-d "grant_type=authorization_code&code=$AUTH_CODE&client_id=http://localhost:8123/")
|
||||||
|
|
||||||
|
ACCESS_TOKEN=$(echo "$TOKEN_RESP" | python3 -c "import sys,json; print(json.load(sys.stdin).get('access_token',''))" 2>/dev/null || echo "")
|
||||||
|
if [ -z "$ACCESS_TOKEN" ]; then
|
||||||
|
echo " WARNING: Could not get access token."
|
||||||
|
echo " Configure MQTT manually: Settings > Devices & Services > Add Integration > MQTT"
|
||||||
|
echo " Broker: 127.0.0.1 Port: 1883"
|
||||||
|
echo ""
|
||||||
|
echo "==> Done! Open http://localhost:8123 and log in as dev/dev"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Complete remaining onboarding steps
|
||||||
|
echo "==> Completing onboarding steps..."
|
||||||
|
curl -s -X POST http://localhost:8123/api/onboarding/core_config \
|
||||||
|
-H "Authorization: Bearer $ACCESS_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{}' > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
curl -s -X POST http://localhost:8123/api/onboarding/analytics \
|
||||||
|
-H "Authorization: Bearer $ACCESS_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{}' > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
curl -s -X POST http://localhost:8123/api/onboarding/integration \
|
||||||
|
-H "Authorization: Bearer $ACCESS_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{}' > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# ── Configure MQTT integration ───────────────────────────────────────────
|
||||||
|
|
||||||
|
echo "==> Adding MQTT integration (broker: 127.0.0.1:1883)..."
|
||||||
|
FLOW_RESP=$(curl -s -X POST http://localhost:8123/api/config/config_entries/flow \
|
||||||
|
-H "Authorization: Bearer $ACCESS_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"handler":"mqtt"}')
|
||||||
|
|
||||||
|
FLOW_ID=$(echo "$FLOW_RESP" | python3 -c "import sys,json; print(json.load(sys.stdin).get('flow_id',''))" 2>/dev/null || echo "")
|
||||||
|
if [ -z "$FLOW_ID" ]; then
|
||||||
|
echo " WARNING: Could not start MQTT config flow."
|
||||||
|
echo " Response: $FLOW_RESP"
|
||||||
|
echo " Configure MQTT manually: Settings > Devices & Services > Add Integration > MQTT"
|
||||||
|
echo " Broker: 127.0.0.1 Port: 1883"
|
||||||
|
else
|
||||||
|
MQTT_RESULT=$(curl -s -X POST "http://localhost:8123/api/config/config_entries/flow/$FLOW_ID" \
|
||||||
|
-H "Authorization: Bearer $ACCESS_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"broker":"127.0.0.1","port":1883,"username":"","password":""}')
|
||||||
|
|
||||||
|
RESULT_TYPE=$(echo "$MQTT_RESULT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('type',''))" 2>/dev/null || echo "")
|
||||||
|
if [ "$RESULT_TYPE" = "create_entry" ]; then
|
||||||
|
echo " MQTT integration configured successfully."
|
||||||
|
else
|
||||||
|
echo " WARNING: MQTT config flow returned: $RESULT_TYPE"
|
||||||
|
echo " Response: $MQTT_RESULT"
|
||||||
|
echo " You may need to configure MQTT manually."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── Debug logging ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo "==> Enabling MQTT debug logging..."
|
||||||
|
sudo tee -a "$HA_CONFIG/configuration.yaml" > /dev/null << 'EOF'
|
||||||
|
|
||||||
|
logger:
|
||||||
|
default: warning
|
||||||
|
logs:
|
||||||
|
homeassistant.components.mqtt: debug
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Gracefully stop the backgrounded HA so it flushes config to disk
|
||||||
|
# (docker rm -f sends SIGKILL which loses in-memory state like the MQTT config entry)
|
||||||
|
echo "==> Stopping background HA (graceful, flushing config)..."
|
||||||
|
docker stop ha-test-homeassistant > /dev/null 2>&1
|
||||||
|
docker rm ha-test-homeassistant > /dev/null 2>&1
|
||||||
|
|
||||||
|
# ── Summary ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "============================================================"
|
||||||
|
echo " HA test environment ready!"
|
||||||
|
echo "============================================================"
|
||||||
|
echo ""
|
||||||
|
echo " Home Assistant: http://localhost:8123 (dev / dev)"
|
||||||
|
echo " Mosquitto: localhost:1883 (no auth)"
|
||||||
|
echo " MQTT integration: pre-configured"
|
||||||
|
echo ""
|
||||||
|
echo " Next steps:"
|
||||||
|
echo " 1. Start RemoteTerm as usual"
|
||||||
|
echo " 2. In RemoteTerm: Settings > Integrations > Add > Home Assistant"
|
||||||
|
echo " 3. Set Broker Host: 127.0.0.1, Port: 1883"
|
||||||
|
echo " 4. Select contacts for GPS tracking and/or repeaters for telemetry"
|
||||||
|
echo " 5. Save and enable"
|
||||||
|
echo " 6. In HA: Settings > Devices & Services > MQTT"
|
||||||
|
echo " You should see MeshCore devices appearing automatically"
|
||||||
|
echo ""
|
||||||
|
echo " MQTT debug tool (in another terminal):"
|
||||||
|
echo " docker exec ha-test-mosquitto mosquitto_sub -h 127.0.0.1 -t '#' -v"
|
||||||
|
echo ""
|
||||||
|
echo " Tear down: Ctrl+C here, then ./scripts/setup/stop_ha_test_env.sh"
|
||||||
|
echo ""
|
||||||
|
echo "==> Starting Home Assistant in foreground (Ctrl+C to stop)..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
exec docker run --rm \
|
||||||
|
--name ha-test-homeassistant \
|
||||||
|
--network host \
|
||||||
|
-v "$HA_CONFIG:/config" \
|
||||||
|
ghcr.io/home-assistant/home-assistant:stable
|
||||||
Executable
+29
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Stop and remove the HA + Mosquitto test containers.
|
||||||
|
# Optionally pass --clean to also wipe the HA config directory.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/setup/stop_ha_test_env.sh # stop containers only
|
||||||
|
# ./scripts/setup/stop_ha_test_env.sh --clean # stop + wipe ha_test_config
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||||
|
HA_CONFIG="$REPO_ROOT/ha_test_config"
|
||||||
|
|
||||||
|
echo "==> Stopping test containers..."
|
||||||
|
docker rm -f ha-test-mosquitto 2>/dev/null && echo " Removed ha-test-mosquitto" || echo " ha-test-mosquitto not running"
|
||||||
|
docker rm -f ha-test-homeassistant 2>/dev/null && echo " Removed ha-test-homeassistant" || echo " ha-test-homeassistant not running"
|
||||||
|
|
||||||
|
if [ "${1:-}" = "--clean" ]; then
|
||||||
|
echo "==> Wiping $HA_CONFIG..."
|
||||||
|
sudo rm -rf "$HA_CONFIG"
|
||||||
|
echo " Done."
|
||||||
|
else
|
||||||
|
echo ""
|
||||||
|
echo " HA config preserved at $HA_CONFIG"
|
||||||
|
echo " Run with --clean to remove it."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Done."
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
import type { FullConfig } from '@playwright/test';
|
import type { FullConfig } from '@playwright/test';
|
||||||
|
|
||||||
const BASE_URL = 'http://localhost:8001';
|
const BASE_URL = 'http://localhost:8001';
|
||||||
const MAX_RETRIES = 10;
|
// Post-connect sync (contact offload, channel sync, key export) can take
|
||||||
const RETRY_DELAY_MS = 2000;
|
// 30-60s on a radio with many contacts, so allow generous polling here.
|
||||||
|
const MAX_RETRIES = 60;
|
||||||
|
const RETRY_DELAY_MS = 3000;
|
||||||
|
|
||||||
interface HealthStatus {
|
interface HealthStatus {
|
||||||
radio_connected: boolean;
|
radio_connected: boolean;
|
||||||
|
|||||||
@@ -63,7 +63,6 @@ export default defineConfig({
|
|||||||
timeout: 180_000,
|
timeout: 180_000,
|
||||||
env: {
|
env: {
|
||||||
MESHCORE_DATABASE_PATH: path.join(tmpDir, 'e2e-test.db'),
|
MESHCORE_DATABASE_PATH: path.join(tmpDir, 'e2e-test.db'),
|
||||||
MESHCORE_SKIP_POST_CONNECT_SYNC: 'true',
|
|
||||||
// Pass through the serial port from the environment
|
// Pass through the serial port from the environment
|
||||||
...(process.env.MESHCORE_SERIAL_PORT
|
...(process.env.MESHCORE_SERIAL_PORT
|
||||||
? { MESHCORE_SERIAL_PORT: process.env.MESHCORE_SERIAL_PORT }
|
? { MESHCORE_SERIAL_PORT: process.env.MESHCORE_SERIAL_PORT }
|
||||||
|
|||||||
@@ -0,0 +1,698 @@
|
|||||||
|
"""Tests for the Home Assistant MQTT Discovery fanout module."""
|
||||||
|
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from unittest.mock import AsyncMock, MagicMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from app.fanout.mqtt_ha import (
|
||||||
|
MqttHaModule,
|
||||||
|
_contact_tracker_discovery_config,
|
||||||
|
_device_payload,
|
||||||
|
_lpp_discovery_configs,
|
||||||
|
_lpp_sensor_key,
|
||||||
|
_message_event_discovery_config,
|
||||||
|
_node_id,
|
||||||
|
_radio_discovery_configs,
|
||||||
|
_repeater_discovery_configs,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helper builders
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _base_config(**overrides) -> dict:
|
||||||
|
cfg = {
|
||||||
|
"broker_host": "127.0.0.1",
|
||||||
|
"broker_port": 1883,
|
||||||
|
"username": "",
|
||||||
|
"password": "",
|
||||||
|
"use_tls": False,
|
||||||
|
"tls_insecure": False,
|
||||||
|
"topic_prefix": "meshcore",
|
||||||
|
"tracked_contacts": [],
|
||||||
|
"tracked_repeaters": [],
|
||||||
|
}
|
||||||
|
cfg.update(overrides)
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Unit: node_id and device_payload
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestNodeId:
|
||||||
|
def test_returns_12_char_prefix(self):
|
||||||
|
assert _node_id("aabbccddeeff11223344") == "aabbccddeeff"
|
||||||
|
|
||||||
|
def test_lowercases(self):
|
||||||
|
assert _node_id("AABBCCDDEEFF") == "aabbccddeeff"
|
||||||
|
|
||||||
|
|
||||||
|
class TestDevicePayload:
|
||||||
|
def test_basic(self):
|
||||||
|
dev = _device_payload("aabbccddeeff1122", "MyRadio", "Radio")
|
||||||
|
assert dev["identifiers"] == ["meshcore_aabbccddeeff"]
|
||||||
|
assert dev["name"] == "MyRadio"
|
||||||
|
assert dev["manufacturer"] == "MeshCore"
|
||||||
|
assert dev["model"] == "Radio"
|
||||||
|
assert "via_device" not in dev
|
||||||
|
|
||||||
|
def test_via_device(self):
|
||||||
|
dev = _device_payload("ccdd", "Repeater1", "Repeater", via_device_key="aabb")
|
||||||
|
assert dev["via_device"] == "meshcore_aabb"
|
||||||
|
|
||||||
|
def test_name_fallback(self):
|
||||||
|
dev = _device_payload("aabbccddeeff", "", "Radio")
|
||||||
|
assert dev["name"] == "aabbccddeeff"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Unit: discovery config builders
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestRadioDiscovery:
|
||||||
|
def test_produces_discovery_configs(self):
|
||||||
|
configs = _radio_discovery_configs("meshcore", "aabbccddeeff1122", "MyRadio")
|
||||||
|
# 1 binary_sensor (connected) + 9 sensors from _RADIO_SENSORS
|
||||||
|
assert len(configs) == 10
|
||||||
|
|
||||||
|
topics = [t for t, _ in configs]
|
||||||
|
assert "homeassistant/binary_sensor/meshcore_aabbccddeeff/connected/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/noise_floor/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/battery/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/uptime/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/last_rssi/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/last_snr/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/packets_received/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_aabbccddeeff/packets_sent/config" in topics
|
||||||
|
|
||||||
|
def test_connected_binary_sensor_shape(self):
|
||||||
|
configs = _radio_discovery_configs("mc", "aabbccddeeff", "R")
|
||||||
|
topic, cfg = configs[0]
|
||||||
|
assert cfg["device_class"] == "connectivity"
|
||||||
|
assert cfg["state_topic"] == "mc/aabbccddeeff/health"
|
||||||
|
assert cfg["unique_id"] == "meshcore_aabbccddeeff_connected"
|
||||||
|
assert cfg["expire_after"] == 120
|
||||||
|
|
||||||
|
def test_sensor_configs_have_expire_after(self):
|
||||||
|
configs = _radio_discovery_configs("mc", "aabbccddeeff", "R")
|
||||||
|
# All sensor configs (skip the binary_sensor at index 0)
|
||||||
|
for _, cfg in configs[1:]:
|
||||||
|
assert cfg["expire_after"] == 120
|
||||||
|
|
||||||
|
def test_sensor_configs_have_display_precision(self):
|
||||||
|
configs = _radio_discovery_configs("mc", "aabbccddeeff", "R")
|
||||||
|
# All sensor configs (skip the binary_sensor at index 0)
|
||||||
|
for _, cfg in configs[1:]:
|
||||||
|
assert "suggested_display_precision" in cfg
|
||||||
|
assert isinstance(cfg["suggested_display_precision"], int)
|
||||||
|
|
||||||
|
def test_battery_sensor_uses_volts(self):
|
||||||
|
configs = _radio_discovery_configs("mc", "aabbccddeeff", "R")
|
||||||
|
battery_cfgs = [(t, c) for t, c in configs if "battery" in t]
|
||||||
|
assert len(battery_cfgs) == 1
|
||||||
|
_, cfg = battery_cfgs[0]
|
||||||
|
assert cfg["unit_of_measurement"] == "V"
|
||||||
|
assert cfg["suggested_display_precision"] == 2
|
||||||
|
|
||||||
|
|
||||||
|
class TestRepeaterDiscovery:
|
||||||
|
def test_produces_sensor_per_field(self):
|
||||||
|
configs = _repeater_discovery_configs("mc", "ccdd11223344", "Rep1", "aabb")
|
||||||
|
assert len(configs) == 7 # matches _REPEATER_SENSORS length
|
||||||
|
|
||||||
|
topics = [t for t, _ in configs]
|
||||||
|
assert "homeassistant/sensor/meshcore_ccdd11223344/battery_voltage/config" in topics
|
||||||
|
assert "homeassistant/sensor/meshcore_ccdd11223344/uptime/config" in topics
|
||||||
|
|
||||||
|
def test_via_device_set(self):
|
||||||
|
configs = _repeater_discovery_configs("mc", "ccdd", "Rep1", "aabb")
|
||||||
|
_, cfg = configs[0]
|
||||||
|
assert cfg["device"]["via_device"] == "meshcore_aabb"
|
||||||
|
|
||||||
|
def test_sensors_have_expire_after(self):
|
||||||
|
configs = _repeater_discovery_configs("mc", "ccdd", "Rep1", None)
|
||||||
|
for _, cfg in configs:
|
||||||
|
assert cfg["expire_after"] == 36000
|
||||||
|
|
||||||
|
def test_sensors_have_display_precision(self):
|
||||||
|
configs = _repeater_discovery_configs("mc", "ccdd", "Rep1", None)
|
||||||
|
for _, cfg in configs:
|
||||||
|
assert "suggested_display_precision" in cfg
|
||||||
|
|
||||||
|
|
||||||
|
class TestContactTrackerDiscovery:
|
||||||
|
def test_config_shape(self):
|
||||||
|
topic, cfg = _contact_tracker_discovery_config("mc", "eeff11223344", "Alice", "aabb")
|
||||||
|
assert topic == "homeassistant/device_tracker/meshcore_eeff11223344/config"
|
||||||
|
assert cfg["unique_id"] == "meshcore_eeff11223344_tracker"
|
||||||
|
assert cfg["source_type"] == "gps"
|
||||||
|
assert cfg["json_attributes_topic"] == "mc/eeff11223344/gps"
|
||||||
|
assert "state_topic" not in cfg
|
||||||
|
|
||||||
|
|
||||||
|
class TestMessageEventDiscovery:
|
||||||
|
def test_config_shape(self):
|
||||||
|
topic, cfg = _message_event_discovery_config("mc", "aabbccddeeff", "MyRadio")
|
||||||
|
assert topic == "homeassistant/event/meshcore_aabbccddeeff/messages/config"
|
||||||
|
assert "message_received" in cfg["event_types"]
|
||||||
|
assert cfg["state_topic"] == "mc/aabbccddeeff/events/message"
|
||||||
|
assert cfg["unique_id"] == "meshcore_aabbccddeeff_messages"
|
||||||
|
assert cfg["device"]["identifiers"] == ["meshcore_aabbccddeeff"]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Module: filtering
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaFiltering:
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_contact_ignores_untracked(self):
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_contacts=["aaaa"]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_contact({"public_key": "bbbb", "lat": 1.0, "lon": 2.0})
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_not_called()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_contact_publishes_tracked(self):
|
||||||
|
key = "aabbccddeeff"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_contacts=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_contact({"public_key": key, "lat": 37.7, "lon": -122.4})
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_called_once()
|
||||||
|
topic = mod._publisher.publish.call_args[0][0]
|
||||||
|
payload = mod._publisher.publish.call_args[0][1]
|
||||||
|
assert topic == f"meshcore/{_node_id(key)}/gps"
|
||||||
|
assert payload["latitude"] == 37.7
|
||||||
|
assert payload["longitude"] == -122.4
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_contact_skips_zero_gps(self):
|
||||||
|
key = "aabbccddeeff"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_contacts=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_contact({"public_key": key, "lat": 0.0, "lon": 0.0})
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_not_called()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_ignores_untracked(self):
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=["aaaa"]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_telemetry({"public_key": "bbbb", "battery_volts": 4.1})
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_not_called()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_publishes_tracked(self):
|
||||||
|
key = "ccdd11223344"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_telemetry(
|
||||||
|
{
|
||||||
|
"public_key": key,
|
||||||
|
"battery_volts": 4.1,
|
||||||
|
"noise_floor_dbm": -112,
|
||||||
|
"last_rssi_dbm": -80,
|
||||||
|
"last_snr_db": 10.0,
|
||||||
|
"packets_received": 500,
|
||||||
|
"packets_sent": 300,
|
||||||
|
"uptime_seconds": 86400,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_called_once()
|
||||||
|
topic = mod._publisher.publish.call_args[0][0]
|
||||||
|
payload = mod._publisher.publish.call_args[0][1]
|
||||||
|
assert topic == f"meshcore/{_node_id(key)}/telemetry"
|
||||||
|
assert payload["battery_volts"] == 4.1
|
||||||
|
assert payload["uptime_seconds"] == 86400
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaHealth:
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_health_publishes_state(self):
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._radio_key = "aabbccddeeff"
|
||||||
|
|
||||||
|
await mod.on_health(
|
||||||
|
{
|
||||||
|
"connected": True,
|
||||||
|
"public_key": "aabbccddeeff",
|
||||||
|
"name": "MyRadio",
|
||||||
|
"noise_floor_dbm": -110,
|
||||||
|
"battery_mv": 4150,
|
||||||
|
"uptime_secs": 3600,
|
||||||
|
"last_rssi": -85,
|
||||||
|
"last_snr": 9.5,
|
||||||
|
"packets_recv": 500,
|
||||||
|
"packets_sent": 250,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should publish health state
|
||||||
|
calls = mod._publisher.publish.call_args_list
|
||||||
|
# Last call should be health state (discovery may also be published)
|
||||||
|
health_calls = [c for c in calls if "/health" in c[0][0]]
|
||||||
|
assert len(health_calls) >= 1
|
||||||
|
payload = health_calls[-1][0][1]
|
||||||
|
assert payload["connected"] is True
|
||||||
|
assert payload["noise_floor_dbm"] == -110
|
||||||
|
assert payload["battery_volts"] == 4.15
|
||||||
|
assert payload["uptime_secs"] == 3600
|
||||||
|
assert payload["last_rssi"] == -85
|
||||||
|
assert payload["packets_recv"] == 500
|
||||||
|
assert payload["packets_sent"] == 250
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_health_caches_radio_key(self):
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_health(
|
||||||
|
{
|
||||||
|
"connected": True,
|
||||||
|
"public_key": "aabbccddeeff",
|
||||||
|
"name": "MyRadio",
|
||||||
|
"noise_floor_dbm": None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mod._radio_key == "aabbccddeeff"
|
||||||
|
assert mod._radio_name == "MyRadio"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_health_key_change_clears_all_existing_discovery_topics(self):
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._radio_key = "aabbccddeeff"
|
||||||
|
mod._radio_name = "OldRadio"
|
||||||
|
mod._discovery_topics = [
|
||||||
|
"homeassistant/sensor/meshcore_aabbccddeeff/noise_floor/config",
|
||||||
|
"homeassistant/event/meshcore_aabbccddeeff/messages/config",
|
||||||
|
"homeassistant/device_tracker/meshcore_ccdd11223344/config",
|
||||||
|
"homeassistant/sensor/meshcore_eeff11223344/battery_voltage/config",
|
||||||
|
]
|
||||||
|
|
||||||
|
mod._clear_retained_topics = AsyncMock()
|
||||||
|
|
||||||
|
async def publish_discovery_side_effect():
|
||||||
|
assert mod._discovery_topics == []
|
||||||
|
mod._discovery_topics = [
|
||||||
|
"homeassistant/sensor/meshcore_112233445566/noise_floor/config",
|
||||||
|
"homeassistant/event/meshcore_112233445566/messages/config",
|
||||||
|
]
|
||||||
|
|
||||||
|
mod._publish_discovery = AsyncMock(side_effect=publish_discovery_side_effect)
|
||||||
|
|
||||||
|
await mod.on_health(
|
||||||
|
{
|
||||||
|
"connected": True,
|
||||||
|
"public_key": "112233445566",
|
||||||
|
"name": "NewRadio",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
mod._clear_retained_topics.assert_awaited_once_with(
|
||||||
|
[
|
||||||
|
"homeassistant/sensor/meshcore_aabbccddeeff/noise_floor/config",
|
||||||
|
"homeassistant/event/meshcore_aabbccddeeff/messages/config",
|
||||||
|
"homeassistant/device_tracker/meshcore_ccdd11223344/config",
|
||||||
|
"homeassistant/sensor/meshcore_eeff11223344/battery_voltage/config",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
mod._publish_discovery.assert_awaited_once()
|
||||||
|
assert mod._radio_key == "112233445566"
|
||||||
|
assert mod._radio_name == "NewRadio"
|
||||||
|
assert mod._discovery_topics == [
|
||||||
|
"homeassistant/sensor/meshcore_112233445566/noise_floor/config",
|
||||||
|
"homeassistant/event/meshcore_112233445566/messages/config",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaLifecycle:
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_start_seeds_radio_identity_from_connected_runtime(self, monkeypatch):
|
||||||
|
from app.services.radio_runtime import radio_runtime
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
radio_runtime.manager,
|
||||||
|
"_meshcore",
|
||||||
|
SimpleNamespace(
|
||||||
|
is_connected=True,
|
||||||
|
self_info={"public_key": "AABBCCDDEEFF", "name": "MyRadio"},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.start = AsyncMock()
|
||||||
|
|
||||||
|
await mod.start()
|
||||||
|
|
||||||
|
assert mod._radio_key == "aabbccddeeff"
|
||||||
|
assert mod._radio_name == "MyRadio"
|
||||||
|
mod._publisher.start.assert_awaited_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaMessage:
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_message_publishes_event(self):
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._radio_key = "aabbccddeeff"
|
||||||
|
|
||||||
|
await mod.on_message(
|
||||||
|
{
|
||||||
|
"type": "PRIV",
|
||||||
|
"conversation_key": "pk1",
|
||||||
|
"text": "hello",
|
||||||
|
"sender_name": "Alice",
|
||||||
|
"sender_key": "aabb",
|
||||||
|
"outgoing": False,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_called_once()
|
||||||
|
topic = mod._publisher.publish.call_args[0][0]
|
||||||
|
payload = mod._publisher.publish.call_args[0][1]
|
||||||
|
assert topic == "meshcore/aabbccddeeff/events/message"
|
||||||
|
assert payload["event_type"] == "message_received"
|
||||||
|
assert payload["text"] == "hello"
|
||||||
|
assert payload["sender_name"] == "Alice"
|
||||||
|
assert payload["outgoing"] is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_message_skips_without_radio_key(self):
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
# _radio_key is None — should not publish
|
||||||
|
await mod.on_message({"type": "PRIV", "text": "hi", "sender_name": "X"})
|
||||||
|
mod._publisher.publish.assert_not_called()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_message_strips_channel_sender_prefix(self):
|
||||||
|
mod = MqttHaModule("test", _base_config())
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._radio_key = "aabbccddeeff"
|
||||||
|
|
||||||
|
await mod.on_message(
|
||||||
|
{
|
||||||
|
"type": "CHAN",
|
||||||
|
"conversation_key": "ch1",
|
||||||
|
"text": "Alice: hello from channel",
|
||||||
|
"sender_name": "Alice",
|
||||||
|
"sender_key": "aabb",
|
||||||
|
"outgoing": False,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
payload = mod._publisher.publish.call_args[0][1]
|
||||||
|
assert payload["text"] == "hello from channel"
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaStatus:
|
||||||
|
def test_disconnected_without_host(self):
|
||||||
|
mod = MqttHaModule("test", _base_config(broker_host=""))
|
||||||
|
assert mod.status == "disconnected"
|
||||||
|
|
||||||
|
def test_disconnected_with_host_but_not_connected(self):
|
||||||
|
mod = MqttHaModule("test", _base_config(broker_host="192.168.1.1"))
|
||||||
|
assert mod.status == "disconnected"
|
||||||
|
|
||||||
|
def test_error_when_publisher_has_error(self):
|
||||||
|
mod = MqttHaModule("test", _base_config(broker_host="192.168.1.1"))
|
||||||
|
mod._publisher._last_error = "connection refused"
|
||||||
|
assert mod.status == "error"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Router validation
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaValidation:
|
||||||
|
def test_valid_config_passes(self):
|
||||||
|
from app.routers.fanout import _validate_mqtt_ha_config
|
||||||
|
|
||||||
|
_validate_mqtt_ha_config({"broker_host": "192.168.1.1", "broker_port": 1883})
|
||||||
|
|
||||||
|
def test_missing_host_fails(self):
|
||||||
|
from app.routers.fanout import _validate_mqtt_ha_config
|
||||||
|
|
||||||
|
with pytest.raises(Exception, match="broker_host"):
|
||||||
|
_validate_mqtt_ha_config({"broker_host": ""})
|
||||||
|
|
||||||
|
def test_bad_port_fails(self):
|
||||||
|
from app.routers.fanout import _validate_mqtt_ha_config
|
||||||
|
|
||||||
|
with pytest.raises(Exception, match="broker_port"):
|
||||||
|
_validate_mqtt_ha_config({"broker_host": "x", "broker_port": 99999})
|
||||||
|
|
||||||
|
def test_tracked_contacts_must_be_list(self):
|
||||||
|
from app.routers.fanout import _validate_mqtt_ha_config
|
||||||
|
|
||||||
|
with pytest.raises(Exception, match="tracked_contacts"):
|
||||||
|
_validate_mqtt_ha_config(
|
||||||
|
{
|
||||||
|
"broker_host": "x",
|
||||||
|
"tracked_contacts": "not-a-list",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_scope_enforced_no_raw_packets(self):
|
||||||
|
from app.routers.fanout import _enforce_scope
|
||||||
|
|
||||||
|
result = _enforce_scope("mqtt_ha", {"messages": "all", "raw_packets": "all"})
|
||||||
|
assert result["raw_packets"] == "none"
|
||||||
|
assert result["messages"] == "all"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# LPP sensor discovery and telemetry
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestLppSensorKey:
|
||||||
|
def test_basic(self):
|
||||||
|
assert _lpp_sensor_key("temperature", 1) == "lpp_temperature_ch1"
|
||||||
|
|
||||||
|
def test_zero_channel(self):
|
||||||
|
assert _lpp_sensor_key("humidity", 0) == "lpp_humidity_ch0"
|
||||||
|
|
||||||
|
|
||||||
|
class TestLppDiscoveryConfigs:
|
||||||
|
def test_produces_config_per_sensor(self):
|
||||||
|
nid = "ccdd11223344"
|
||||||
|
device = _device_payload(nid, "Rep1", "Repeater")
|
||||||
|
sensors = [
|
||||||
|
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||||
|
{"channel": 2, "type_name": "humidity", "value": 45.0},
|
||||||
|
]
|
||||||
|
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||||
|
|
||||||
|
assert len(configs) == 2
|
||||||
|
topics = [t for t, _ in configs]
|
||||||
|
assert f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config" in topics
|
||||||
|
assert f"homeassistant/sensor/meshcore_{nid}/lpp_humidity_ch2/config" in topics
|
||||||
|
|
||||||
|
def test_sensor_config_shape(self):
|
||||||
|
nid = "ccdd11223344"
|
||||||
|
device = _device_payload(nid, "Rep1", "Repeater")
|
||||||
|
sensors = [{"channel": 1, "type_name": "temperature", "value": 23.5}]
|
||||||
|
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||||
|
|
||||||
|
_, cfg = configs[0]
|
||||||
|
assert cfg["name"] == "Temperature (Ch 1)"
|
||||||
|
assert cfg["unique_id"] == f"meshcore_{nid}_lpp_temperature_ch1"
|
||||||
|
assert cfg["device_class"] == "temperature"
|
||||||
|
assert cfg["unit_of_measurement"] == "°C"
|
||||||
|
assert cfg["state_class"] == "measurement"
|
||||||
|
assert cfg["expire_after"] == 36000
|
||||||
|
assert cfg["suggested_display_precision"] == 1
|
||||||
|
assert "lpp_temperature_ch1" in cfg["value_template"]
|
||||||
|
|
||||||
|
def test_unknown_sensor_type_no_device_class(self):
|
||||||
|
nid = "ccdd11223344"
|
||||||
|
device = _device_payload(nid, "Rep1", "Repeater")
|
||||||
|
sensors = [{"channel": 0, "type_name": "exotic_sensor", "value": 1.0}]
|
||||||
|
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||||
|
|
||||||
|
_, cfg = configs[0]
|
||||||
|
assert "device_class" not in cfg
|
||||||
|
assert "unit_of_measurement" not in cfg
|
||||||
|
|
||||||
|
|
||||||
|
class TestMqttHaTelemetryWithLpp:
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_flattens_lpp_sensors(self):
|
||||||
|
key = "ccdd11223344"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
# Pretend discovery already covers these sensors
|
||||||
|
nid = _node_id(key)
|
||||||
|
mod._discovery_topics = [
|
||||||
|
f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config",
|
||||||
|
f"homeassistant/sensor/meshcore_{nid}/lpp_humidity_ch2/config",
|
||||||
|
]
|
||||||
|
|
||||||
|
await mod.on_telemetry(
|
||||||
|
{
|
||||||
|
"public_key": key,
|
||||||
|
"battery_volts": 4.1,
|
||||||
|
"lpp_sensors": [
|
||||||
|
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||||
|
{"channel": 2, "type_name": "humidity", "value": 45.0},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
mod._publisher.publish.assert_called_once()
|
||||||
|
payload = mod._publisher.publish.call_args[0][1]
|
||||||
|
assert payload["battery_volts"] == 4.1
|
||||||
|
assert payload["lpp_temperature_ch1"] == 23.5
|
||||||
|
assert payload["lpp_humidity_ch2"] == 45.0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_triggers_rediscovery_for_new_lpp_sensor(self):
|
||||||
|
key = "ccdd11223344"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._discovery_topics = [] # No sensors discovered yet
|
||||||
|
mod._publish_discovery = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_telemetry(
|
||||||
|
{
|
||||||
|
"public_key": key,
|
||||||
|
"battery_volts": 4.1,
|
||||||
|
"lpp_sensors": [
|
||||||
|
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
mod._publish_discovery.assert_awaited_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_discovery_published_before_state(self):
|
||||||
|
"""Discovery configs must arrive before the state payload so HA knows the entity."""
|
||||||
|
key = "ccdd11223344"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._discovery_topics = [] # New sensor triggers rediscovery
|
||||||
|
|
||||||
|
call_order: list[str] = []
|
||||||
|
|
||||||
|
async def fake_discovery():
|
||||||
|
call_order.append("discovery")
|
||||||
|
|
||||||
|
mod._publish_discovery = AsyncMock(side_effect=fake_discovery)
|
||||||
|
|
||||||
|
original_publish = mod._publisher.publish
|
||||||
|
|
||||||
|
async def tracking_publish(topic, payload, **kw):
|
||||||
|
if "/telemetry" in topic:
|
||||||
|
call_order.append("state")
|
||||||
|
return await original_publish(topic, payload, **kw)
|
||||||
|
|
||||||
|
mod._publisher.publish = AsyncMock(side_effect=tracking_publish)
|
||||||
|
|
||||||
|
await mod.on_telemetry(
|
||||||
|
{
|
||||||
|
"public_key": key,
|
||||||
|
"battery_volts": 4.1,
|
||||||
|
"lpp_sensors": [
|
||||||
|
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert call_order == ["discovery", "state"]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_no_rediscovery_when_already_known(self):
|
||||||
|
key = "ccdd11223344"
|
||||||
|
nid = _node_id(key)
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
mod._discovery_topics = [
|
||||||
|
f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config",
|
||||||
|
]
|
||||||
|
mod._publish_discovery = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_telemetry(
|
||||||
|
{
|
||||||
|
"public_key": key,
|
||||||
|
"battery_volts": 4.1,
|
||||||
|
"lpp_sensors": [
|
||||||
|
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
mod._publish_discovery.assert_not_awaited()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_telemetry_without_lpp_sensors(self):
|
||||||
|
"""Existing behavior: no lpp_sensors key means no LPP fields in payload."""
|
||||||
|
key = "ccdd11223344"
|
||||||
|
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||||
|
mod._publisher = MagicMock()
|
||||||
|
mod._publisher.connected = True
|
||||||
|
mod._publisher.publish = AsyncMock()
|
||||||
|
|
||||||
|
await mod.on_telemetry(
|
||||||
|
{
|
||||||
|
"public_key": key,
|
||||||
|
"battery_volts": 4.1,
|
||||||
|
"noise_floor_dbm": -112,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
payload = mod._publisher.publish.call_args[0][1]
|
||||||
|
assert payload["battery_volts"] == 4.1
|
||||||
|
# No lpp keys
|
||||||
|
assert not any(k.startswith("lpp_") for k in payload)
|
||||||
@@ -5,7 +5,7 @@ undecrypted count endpoint, and the maintenance endpoint.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -307,38 +307,37 @@ class TestDecryptHistoricalPackets:
|
|||||||
|
|
||||||
class TestUndecryptedTextPacketStreaming:
|
class TestUndecryptedTextPacketStreaming:
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_count_undecrypted_text_messages_uses_batched_streaming(self, test_db):
|
async def test_count_undecrypted_text_messages_uses_keyset_pagination(self, test_db):
|
||||||
"""Counting undecrypted DM packets should stream batches and filter by payload type."""
|
"""Counting undecrypted DM packets should use keyset pagination and filter by payload type."""
|
||||||
|
|
||||||
class FakeCursor:
|
# Simulate keyset pagination: each execute() call returns a cursor
|
||||||
def __init__(self):
|
# whose fetchall() yields one batch. The generator stops when a
|
||||||
self._batches = [
|
# batch is empty.
|
||||||
[
|
batches = [
|
||||||
{"id": 1, "data": b"\x09\x00dm", "timestamp": 1000},
|
[
|
||||||
{"id": 2, "data": b"\x15\x00chan", "timestamp": 1001},
|
{"id": 1, "data": b"\x09\x00dm", "timestamp": 1000},
|
||||||
],
|
{"id": 2, "data": b"\x15\x00chan", "timestamp": 1001},
|
||||||
[{"id": 3, "data": b"\x09\x00dm2", "timestamp": 1002}],
|
],
|
||||||
[],
|
[{"id": 3, "data": b"\x09\x00dm2", "timestamp": 1002}],
|
||||||
]
|
[],
|
||||||
self.fetchall_called = False
|
]
|
||||||
|
|
||||||
async def fetchmany(self, size):
|
async def fake_execute(*_args, **_kwargs):
|
||||||
assert size > 0
|
batch = batches.pop(0)
|
||||||
return self._batches.pop(0)
|
|
||||||
|
|
||||||
async def close(self):
|
class FakeCursor:
|
||||||
return None
|
async def fetchall(self):
|
||||||
|
return batch
|
||||||
|
|
||||||
async def fetchall(self):
|
async def close(self):
|
||||||
self.fetchall_called = True
|
pass
|
||||||
raise AssertionError("fetchall() should not be used")
|
|
||||||
|
|
||||||
fake_cursor = FakeCursor()
|
return FakeCursor()
|
||||||
|
|
||||||
with patch.object(test_db.conn, "execute", new=AsyncMock(return_value=fake_cursor)):
|
with patch.object(test_db.conn, "execute", side_effect=fake_execute):
|
||||||
count = await RawPacketRepository.count_undecrypted_text_messages(batch_size=2)
|
count = await RawPacketRepository.count_undecrypted_text_messages(batch_size=2)
|
||||||
|
|
||||||
assert fake_cursor.fetchall_called is False
|
# header byte 0x09 -> payload type 2 (TEXT_MESSAGE); 0x15 -> type 5 (not TEXT_MESSAGE)
|
||||||
assert count == 2
|
assert count == 2
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
+289
-7
@@ -377,14 +377,22 @@ class TestSyncRecentContactsToRadio:
|
|||||||
assert result["loaded"] == 2
|
assert result["loaded"] == 2
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_fills_remaining_slots_with_recently_contacted_then_advertised(self, test_db):
|
async def test_fills_remaining_slots_with_dm_active_then_advertised(self, test_db):
|
||||||
"""Fill order is favorites, then recent contacts, then recent adverts."""
|
"""Fill order is favorites, then DM-active contacts, then recent adverts."""
|
||||||
await _insert_contact(KEY_A, "Alice", last_contacted=100)
|
await _insert_contact(KEY_A, "Alice")
|
||||||
await _insert_contact(KEY_B, "Bob", last_contacted=2000)
|
await _insert_contact(KEY_B, "Bob")
|
||||||
await _insert_contact("cc" * 32, "Carol", last_contacted=1000)
|
await _insert_contact("cc" * 32, "Carol")
|
||||||
await _insert_contact("dd" * 32, "Dave", last_advert=3000)
|
await _insert_contact("dd" * 32, "Dave", last_advert=3000)
|
||||||
await _insert_contact("ee" * 32, "Eve", last_advert=2500)
|
await _insert_contact("ee" * 32, "Eve", last_advert=2500)
|
||||||
|
|
||||||
|
# Create DM activity for Alice (oldest), Bob (most recent), Carol (middle)
|
||||||
|
for key, ts in [(KEY_A, 100), (KEY_B, 2000), ("cc" * 32, 1000)]:
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'hi', ?)",
|
||||||
|
(key, ts),
|
||||||
|
)
|
||||||
|
await test_db.conn.commit()
|
||||||
|
|
||||||
await AppSettingsRepository.update(max_radio_contacts=5)
|
await AppSettingsRepository.update(max_radio_contacts=5)
|
||||||
await ContactRepository.set_favorite(KEY_A, True)
|
await ContactRepository.set_favorite(KEY_A, True)
|
||||||
|
|
||||||
@@ -401,6 +409,7 @@ class TestSyncRecentContactsToRadio:
|
|||||||
loaded_keys = [
|
loaded_keys = [
|
||||||
call.args[0]["public_key"] for call in mock_mc.commands.add_contact.call_args_list
|
call.args[0]["public_key"] for call in mock_mc.commands.add_contact.call_args_list
|
||||||
]
|
]
|
||||||
|
# Alice (favorite), then Bob & Carol (DM-active, most recent first), then Dave (advert)
|
||||||
assert loaded_keys == [KEY_A, KEY_B, "cc" * 32, "dd" * 32]
|
assert loaded_keys == [KEY_A, KEY_B, "cc" * 32, "dd" * 32]
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -509,8 +518,15 @@ class TestSyncAndOffloadAll:
|
|||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_duplicate_favorite_not_loaded_twice(self, test_db):
|
async def test_duplicate_favorite_not_loaded_twice(self, test_db):
|
||||||
"""Duplicate favorite entries still load the contact only once."""
|
"""Duplicate favorite entries still load the contact only once."""
|
||||||
await _insert_contact(KEY_A, "Alice", last_contacted=2000)
|
await _insert_contact(KEY_A, "Alice")
|
||||||
await _insert_contact(KEY_B, "Bob", last_contacted=1000)
|
await _insert_contact(KEY_B, "Bob")
|
||||||
|
|
||||||
|
# Bob has DM activity so he appears in tier 2
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'hi', 1000)",
|
||||||
|
(KEY_B,),
|
||||||
|
)
|
||||||
|
await test_db.conn.commit()
|
||||||
|
|
||||||
await AppSettingsRepository.update(max_radio_contacts=2)
|
await AppSettingsRepository.update(max_radio_contacts=2)
|
||||||
await ContactRepository.set_favorite(KEY_A, True)
|
await ContactRepository.set_favorite(KEY_A, True)
|
||||||
@@ -1695,3 +1711,269 @@ class TestPeriodicSyncLoopRaces:
|
|||||||
mock_cleanup.assert_called_once()
|
mock_cleanup.assert_called_once()
|
||||||
mock_sync.assert_not_called()
|
mock_sync.assert_not_called()
|
||||||
mock_time.assert_called_once_with(mock_mc)
|
mock_time.assert_called_once_with(mock_mc)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# _collect_repeater_telemetry — LPP sensor collection
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestCollectRepeaterTelemetryLpp:
|
||||||
|
"""Verify that _collect_repeater_telemetry fetches LPP sensors."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_lpp_sensors_included_in_data(self):
|
||||||
|
from app.radio_sync import _collect_repeater_telemetry
|
||||||
|
|
||||||
|
mc = MagicMock()
|
||||||
|
mc.commands.add_contact = AsyncMock()
|
||||||
|
mc.commands.req_status_sync = AsyncMock(
|
||||||
|
return_value={"bat": 4100, "noise_floor": -110, "nb_recv": 10, "nb_sent": 5}
|
||||||
|
)
|
||||||
|
mc.commands.req_telemetry_sync = AsyncMock(
|
||||||
|
return_value=[
|
||||||
|
{"channel": 1, "type": "temperature", "value": 23.5},
|
||||||
|
{"channel": 2, "type": "humidity", "value": 45.0},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
contact = MagicMock()
|
||||||
|
contact.public_key = "aabbccddeeff11223344"
|
||||||
|
contact.name = "TestRepeater"
|
||||||
|
contact.to_radio_dict.return_value = {}
|
||||||
|
|
||||||
|
recorded_data = {}
|
||||||
|
|
||||||
|
async def mock_record(public_key, timestamp, data):
|
||||||
|
recorded_data.update(data)
|
||||||
|
|
||||||
|
mock_fanout = MagicMock()
|
||||||
|
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
side_effect=mock_record,
|
||||||
|
),
|
||||||
|
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||||
|
):
|
||||||
|
result = await _collect_repeater_telemetry(mc, contact)
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
assert "lpp_sensors" in recorded_data
|
||||||
|
assert len(recorded_data["lpp_sensors"]) == 2
|
||||||
|
assert recorded_data["lpp_sensors"][0]["type_name"] == "temperature"
|
||||||
|
assert recorded_data["lpp_sensors"][0]["value"] == 23.5
|
||||||
|
assert recorded_data["lpp_sensors"][1]["type_name"] == "humidity"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_lpp_failure_does_not_fail_collection(self):
|
||||||
|
from app.radio_sync import _collect_repeater_telemetry
|
||||||
|
|
||||||
|
mc = MagicMock()
|
||||||
|
mc.commands.add_contact = AsyncMock()
|
||||||
|
mc.commands.req_status_sync = AsyncMock(return_value={"bat": 4100, "noise_floor": -110})
|
||||||
|
mc.commands.req_telemetry_sync = AsyncMock(side_effect=Exception("no sensors"))
|
||||||
|
|
||||||
|
contact = MagicMock()
|
||||||
|
contact.public_key = "aabbccddeeff11223344"
|
||||||
|
contact.name = "TestRepeater"
|
||||||
|
contact.to_radio_dict.return_value = {}
|
||||||
|
|
||||||
|
recorded_data = {}
|
||||||
|
|
||||||
|
async def mock_record(public_key, timestamp, data):
|
||||||
|
recorded_data.update(data)
|
||||||
|
|
||||||
|
mock_fanout = MagicMock()
|
||||||
|
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
side_effect=mock_record,
|
||||||
|
),
|
||||||
|
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||||
|
):
|
||||||
|
result = await _collect_repeater_telemetry(mc, contact)
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
assert "lpp_sensors" not in recorded_data
|
||||||
|
# Status data still present
|
||||||
|
assert recorded_data["battery_volts"] == 4.1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_lpp_multivalue_sensors_skipped(self):
|
||||||
|
from app.radio_sync import _collect_repeater_telemetry
|
||||||
|
|
||||||
|
mc = MagicMock()
|
||||||
|
mc.commands.add_contact = AsyncMock()
|
||||||
|
mc.commands.req_status_sync = AsyncMock(return_value={"bat": 4000})
|
||||||
|
mc.commands.req_telemetry_sync = AsyncMock(
|
||||||
|
return_value=[
|
||||||
|
{"channel": 1, "type": "temperature", "value": 23.5},
|
||||||
|
{"channel": 3, "type": "gps", "value": {"lat": 1.0, "lon": 2.0, "alt": 3.0}},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
contact = MagicMock()
|
||||||
|
contact.public_key = "aabbccddeeff11223344"
|
||||||
|
contact.name = "TestRepeater"
|
||||||
|
contact.to_radio_dict.return_value = {}
|
||||||
|
|
||||||
|
recorded_data = {}
|
||||||
|
|
||||||
|
async def mock_record(public_key, timestamp, data):
|
||||||
|
recorded_data.update(data)
|
||||||
|
|
||||||
|
mock_fanout = MagicMock()
|
||||||
|
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
side_effect=mock_record,
|
||||||
|
),
|
||||||
|
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||||
|
):
|
||||||
|
result = await _collect_repeater_telemetry(mc, contact)
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
assert len(recorded_data["lpp_sensors"]) == 1
|
||||||
|
assert recorded_data["lpp_sensors"][0]["type_name"] == "temperature"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_lpp_none_response_no_sensors_key(self):
|
||||||
|
from app.radio_sync import _collect_repeater_telemetry
|
||||||
|
|
||||||
|
mc = MagicMock()
|
||||||
|
mc.commands.add_contact = AsyncMock()
|
||||||
|
mc.commands.req_status_sync = AsyncMock(return_value={"bat": 4000})
|
||||||
|
mc.commands.req_telemetry_sync = AsyncMock(return_value=None)
|
||||||
|
|
||||||
|
contact = MagicMock()
|
||||||
|
contact.public_key = "aabbccddeeff11223344"
|
||||||
|
contact.name = "TestRepeater"
|
||||||
|
contact.to_radio_dict.return_value = {}
|
||||||
|
|
||||||
|
recorded_data = {}
|
||||||
|
|
||||||
|
async def mock_record(public_key, timestamp, data):
|
||||||
|
recorded_data.update(data)
|
||||||
|
|
||||||
|
mock_fanout = MagicMock()
|
||||||
|
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
side_effect=mock_record,
|
||||||
|
),
|
||||||
|
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||||
|
):
|
||||||
|
await _collect_repeater_telemetry(mc, contact)
|
||||||
|
|
||||||
|
assert "lpp_sensors" not in recorded_data
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# get_contacts_selected_for_radio_sync — DM-active prioritization
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestContactSelectionDmActive:
|
||||||
|
"""Verify that tier 2 prioritizes contacts with recent DM activity."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_incoming_dm_contact_selected_over_advert_only(self, test_db):
|
||||||
|
"""A contact who sent us a DM should be prioritized over one who only advertised."""
|
||||||
|
from app.radio_sync import get_contacts_selected_for_radio_sync
|
||||||
|
|
||||||
|
# Create two non-repeater contacts
|
||||||
|
dm_sender_key = "aa" * 32
|
||||||
|
advert_only_key = "bb" * 32
|
||||||
|
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO contacts (public_key, name, type, last_seen, last_advert) VALUES (?, ?, 1, 100, 100)",
|
||||||
|
(dm_sender_key, "DM Sender"),
|
||||||
|
)
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO contacts (public_key, name, type, last_seen, last_advert) VALUES (?, ?, 1, 200, 200)",
|
||||||
|
(advert_only_key, "Advert Only"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# DM Sender sent us a message (incoming DM)
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'hello', 300)",
|
||||||
|
(dm_sender_key,),
|
||||||
|
)
|
||||||
|
await test_db.conn.commit()
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"app.radio_sync.AppSettingsRepository.get",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value=MagicMock(max_radio_contacts=200, tracked_telemetry_repeaters=[]),
|
||||||
|
):
|
||||||
|
selected = await get_contacts_selected_for_radio_sync()
|
||||||
|
|
||||||
|
keys = [c.public_key for c in selected]
|
||||||
|
assert dm_sender_key in keys
|
||||||
|
assert advert_only_key in keys
|
||||||
|
# DM Sender should come before Advert Only (tier 2 before tier 3)
|
||||||
|
assert keys.index(dm_sender_key) < keys.index(advert_only_key)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_outgoing_dm_contact_also_selected(self, test_db):
|
||||||
|
"""A contact we sent a DM to should also appear via DM-active tier."""
|
||||||
|
from app.radio_sync import get_contacts_selected_for_radio_sync
|
||||||
|
|
||||||
|
contact_key = "cc" * 32
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO contacts (public_key, name, type) VALUES (?, ?, 1)",
|
||||||
|
(contact_key, "Outgoing Target"),
|
||||||
|
)
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO messages (type, conversation_key, text, received_at, outgoing) VALUES ('PRIV', ?, 'hey', 300, 1)",
|
||||||
|
(contact_key,),
|
||||||
|
)
|
||||||
|
await test_db.conn.commit()
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"app.radio_sync.AppSettingsRepository.get",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value=MagicMock(max_radio_contacts=200, tracked_telemetry_repeaters=[]),
|
||||||
|
):
|
||||||
|
selected = await get_contacts_selected_for_radio_sync()
|
||||||
|
|
||||||
|
keys = [c.public_key for c in selected]
|
||||||
|
assert contact_key in keys
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repeaters_excluded_from_dm_active_tier(self, test_db):
|
||||||
|
"""Repeater contacts should not appear in tier 2 even with DM activity."""
|
||||||
|
from app.radio_sync import get_contacts_selected_for_radio_sync
|
||||||
|
|
||||||
|
repeater_key = "dd" * 32
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO contacts (public_key, name, type) VALUES (?, ?, 2)",
|
||||||
|
(repeater_key, "Repeater"),
|
||||||
|
)
|
||||||
|
await test_db.conn.execute(
|
||||||
|
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'cmd', 300)",
|
||||||
|
(repeater_key,),
|
||||||
|
)
|
||||||
|
await test_db.conn.commit()
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"app.radio_sync.AppSettingsRepository.get",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value=MagicMock(max_radio_contacts=200, tracked_telemetry_repeaters=[]),
|
||||||
|
):
|
||||||
|
selected = await get_contacts_selected_for_radio_sync()
|
||||||
|
|
||||||
|
keys = [c.public_key for c in selected]
|
||||||
|
assert repeater_key not in keys
|
||||||
|
|||||||
@@ -352,27 +352,14 @@ class TestPathHashWidthStats:
|
|||||||
assert breakdown["triple_byte_pct"] == pytest.approx(100 / 3, rel=1e-3)
|
assert breakdown["triple_byte_pct"] == pytest.approx(100 / 3, rel=1e-3)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_path_hash_width_scan_uses_batched_fetchmany(self, test_db):
|
async def test_path_hash_width_scan_fetches_all_then_buckets(self, test_db):
|
||||||
"""Hash-width stats should stream batches instead of calling fetchall()."""
|
"""Hash-width stats should fetchall() then bucket synchronously."""
|
||||||
|
|
||||||
|
fake_rows = [{"data": b"a"}, {"data": b"b"}, {"data": b"c"}]
|
||||||
|
|
||||||
class FakeCursor:
|
class FakeCursor:
|
||||||
def __init__(self):
|
|
||||||
self._batches = [
|
|
||||||
[{"data": b"a"}, {"data": b"b"}],
|
|
||||||
[{"data": b"c"}],
|
|
||||||
[],
|
|
||||||
]
|
|
||||||
self.fetchall_called = False
|
|
||||||
|
|
||||||
async def fetchmany(self, size):
|
|
||||||
assert size > 0
|
|
||||||
return self._batches.pop(0)
|
|
||||||
|
|
||||||
async def fetchall(self):
|
async def fetchall(self):
|
||||||
self.fetchall_called = True
|
return fake_rows
|
||||||
raise AssertionError("fetchall() should not be used")
|
|
||||||
|
|
||||||
fake_cursor = FakeCursor()
|
|
||||||
|
|
||||||
def fake_parse(raw_packet: bytes):
|
def fake_parse(raw_packet: bytes):
|
||||||
hash_sizes = {
|
hash_sizes = {
|
||||||
@@ -386,12 +373,11 @@ class TestPathHashWidthStats:
|
|||||||
return SimpleNamespace(hash_size=hash_size)
|
return SimpleNamespace(hash_size=hash_size)
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch.object(test_db.conn, "execute", new=AsyncMock(return_value=fake_cursor)),
|
patch.object(test_db.conn, "execute", new=AsyncMock(return_value=FakeCursor())),
|
||||||
patch("app.path_utils.parse_packet_envelope", side_effect=fake_parse),
|
patch("app.path_utils.parse_packet_envelope", side_effect=fake_parse),
|
||||||
):
|
):
|
||||||
breakdown = await StatisticsRepository._path_hash_width_24h()
|
breakdown = await StatisticsRepository._path_hash_width_24h()
|
||||||
|
|
||||||
assert fake_cursor.fetchall_called is False
|
|
||||||
assert breakdown["total_packets"] == 3
|
assert breakdown["total_packets"] == 3
|
||||||
assert breakdown["single_byte"] == 1
|
assert breakdown["single_byte"] == 1
|
||||||
assert breakdown["double_byte"] == 1
|
assert breakdown["double_byte"] == 1
|
||||||
|
|||||||
@@ -983,7 +983,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "remoteterm-meshcore"
|
name = "remoteterm-meshcore"
|
||||||
version = "3.11.0"
|
version = "3.11.3"
|
||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "aiomqtt" },
|
{ name = "aiomqtt" },
|
||||||
|
|||||||
Reference in New Issue
Block a user