mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-05-12 20:36:05 +02:00
Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9ab4e7a9b0 | |||
| af76546287 | |||
| 31bd4a0744 | |||
| 1db724073b | |||
| 4783da8f3e | |||
| 4b69ec4519 | |||
| 8efbbd97bd | |||
| 1437e8e48a | |||
| 5cd8f7e80f | |||
| e8c50d0b2a | |||
| 7f3bb89323 | |||
| 5bfdd0880e | |||
| 0e9bd59b44 | |||
| b1cd6e1aa9 | |||
| 56fc589e0b | |||
| 64502c4ca2 | |||
| d1f657342a | |||
| 86a0ac7beb | |||
| 3b7e2737ee | |||
| 01158ac69f | |||
| 485df05372 | |||
| e5e9eab935 | |||
| 33b2d3c260 | |||
| eccbd0bac5 | |||
| 4f54ec2c93 | |||
| eed38337c8 | |||
| e1ee7fcd24 | |||
| 2756b1ae8d | |||
| ef1d6a5a1a | |||
| 14f42c59fe | |||
| b9414e84ee | |||
| 95a17ca8ee | |||
| e6cedfbd0b |
@@ -197,6 +197,7 @@ This message-layer echo/path handling is independent of raw-packet storage dedup
|
||||
│ ├── event_handlers.py # Radio events
|
||||
│ ├── decoder.py # Packet decryption
|
||||
│ ├── websocket.py # Real-time broadcasts
|
||||
│ ├── push/ # Web Push notification subsystem (VAPID keys, dispatch, send)
|
||||
│ └── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise, SQS (see fanout/AGENTS_fanout.md)
|
||||
├── frontend/ # React frontend
|
||||
│ ├── AGENTS.md # Frontend documentation
|
||||
@@ -380,6 +381,12 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
||||
| DELETE | `/api/fanout/{id}` | Delete fanout config (stops module) |
|
||||
| POST | `/api/fanout/bots/disable-until-restart` | Stop bot fanout modules and keep bots disabled until the process restarts |
|
||||
| GET | `/api/statistics` | Aggregated mesh network statistics |
|
||||
| GET | `/api/push/vapid-public-key` | VAPID public key for browser push subscription |
|
||||
| POST | `/api/push/subscribe` | Register/upsert a push subscription |
|
||||
| GET | `/api/push/subscriptions` | List all push subscriptions |
|
||||
| PATCH | `/api/push/subscriptions/{id}` | Update subscription label or filter preferences |
|
||||
| DELETE | `/api/push/subscriptions/{id}` | Delete a push subscription |
|
||||
| POST | `/api/push/subscriptions/{id}/test` | Send a test push notification |
|
||||
| WS | `/api/ws` | Real-time updates |
|
||||
|
||||
## Key Concepts
|
||||
@@ -434,6 +441,17 @@ All external integrations are managed through the fanout bus (`app/fanout/`). Ea
|
||||
|
||||
Community MQTT forwards raw packets only. Its derived `path` field, when present on direct packets, is a comma-separated list of hop identifiers as reported by the packet format. Token width therefore varies with the packet's path hash mode; it is intentionally not a flat per-byte rendering.
|
||||
|
||||
### Web Push Notifications
|
||||
|
||||
Web Push is a standalone subsystem (`app/push/`) that sends browser push notifications for incoming messages even when the browser tab is closed. It is **not** a fanout module — it manages its own per-browser subscriptions, while the set of push-enabled conversations is stored once per server instance.
|
||||
|
||||
- **Requires HTTPS** (self-signed certificates work) and outbound internet from the server to reach browser push services (Google FCM, Mozilla autopush).
|
||||
- VAPID key pair is auto-generated on first startup and stored in `app_settings`.
|
||||
- Each browser subscription is stored in `push_subscriptions` with device identity and delivery state. The set of push-enabled conversations is stored globally in `app_settings.push_conversations`, so all subscribed browsers receive the same configured rooms/DMs.
|
||||
- `broadcast_event()` in `websocket.py` dispatches to `push_manager.dispatch_message()` alongside fanout for `message` events.
|
||||
- Expired subscriptions (HTTP 404/410 from push service) are auto-deleted.
|
||||
- Frontend: service worker (`sw.js`) handles push display and notification click navigation. The `BellRing` icon in `ChatHeader` toggles per-conversation push. Device management lives in Settings > Local.
|
||||
|
||||
### Server-Side Decryption
|
||||
|
||||
The server can decrypt packets using stored keys, both in real-time and for historical packets.
|
||||
|
||||
@@ -1,3 +1,21 @@
|
||||
## [3.11.3] - 2026-04-12
|
||||
|
||||
* Bugfix: Add icons and screenshots for webmanifest
|
||||
* Bugfix: Use incoming DMs, not just outgoing, for recency ranking for preferential radio contact load
|
||||
|
||||
## [3.11.2] - 2026-04-12
|
||||
|
||||
* Feature: Unread DMs are always at the top of the DM list no matter what
|
||||
* Bugfix: Webmanifest needs withCredentials
|
||||
|
||||
## [3.11.1] - 2026-04-12
|
||||
|
||||
* Feature: Home Assistant MQTT fanout
|
||||
* Feature: Add dummy service worker to enable PWA
|
||||
* Bugfix: DB connection plurality issues
|
||||
* Misc: Migration improvements
|
||||
* Misc: Search keys from beginning
|
||||
|
||||
## [3.11.0] - 2026-04-10
|
||||
|
||||
* Feature: Radio health and contact data accessible on fanout bus
|
||||
|
||||
+26
-1
@@ -50,6 +50,10 @@ app/
|
||||
├── events.py # Typed WS event payload serialization
|
||||
├── websocket.py # WS manager + broadcast helpers
|
||||
├── security.py # Optional app-wide HTTP Basic auth middleware for HTTP + WS
|
||||
├── push/ # Web Push notification subsystem
|
||||
│ ├── vapid.py # VAPID key generation, storage, caching
|
||||
│ ├── send.py # pywebpush wrapper (async via thread executor)
|
||||
│ └── manager.py # Push dispatch: filter, build payload, concurrent send
|
||||
├── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise, SQS (see fanout/AGENTS_fanout.md)
|
||||
├── dependencies.py # Shared FastAPI dependency providers
|
||||
├── path_utils.py # Path hex rendering and hop-width helpers
|
||||
@@ -71,6 +75,7 @@ app/
|
||||
├── fanout.py
|
||||
├── repeaters.py
|
||||
├── statistics.py
|
||||
├── push.py
|
||||
└── ws.py
|
||||
```
|
||||
|
||||
@@ -168,6 +173,17 @@ app/
|
||||
- Community MQTT publishes raw packets only, but its derived `path` field for direct packets is emitted as comma-separated hop identifiers, not flat path bytes.
|
||||
- See `app/fanout/AGENTS_fanout.md` for full architecture details and event payload shapes.
|
||||
|
||||
### Web Push notifications
|
||||
|
||||
Web Push is a standalone subsystem in `app/push/`, separate from the fanout module system. It sends browser push notifications for incoming messages even when the tab is closed.
|
||||
|
||||
- **Not a fanout module** — Web Push manages per-browser subscriptions (N browsers, each with its own endpoint and delivery state), unlike fanout which is one-config-to-one-destination.
|
||||
- **VAPID keys**: auto-generated P-256 key pair on first startup, stored in `app_settings.vapid_private_key` / `vapid_public_key`. Cached in-module by `app/push/vapid.py`.
|
||||
- **Dispatch**: `broadcast_event()` in `websocket.py` fires `push_manager.dispatch_message(data)` alongside fanout for `message` events. The manager checks the global `app_settings.push_conversations` list, then sends to all currently registered subscriptions via `pywebpush` (run in a thread executor).
|
||||
- **Stale cleanup**: HTTP 404/410 from the push service triggers immediate subscription deletion.
|
||||
- **Subscriptions stored** in `push_subscriptions` table with `UNIQUE(endpoint)` for upsert semantics.
|
||||
- Requires HTTPS (self-signed OK) and outbound internet to reach browser push services.
|
||||
|
||||
## API Surface (all under `/api`)
|
||||
|
||||
### Health
|
||||
@@ -258,6 +274,14 @@ app/
|
||||
### Statistics
|
||||
- `GET /statistics` — aggregated mesh network stats (entity counts, message/packet splits, activity windows, busiest channels)
|
||||
|
||||
### Push
|
||||
- `GET /push/vapid-public-key` — VAPID public key for browser `PushManager.subscribe()`
|
||||
- `POST /push/subscribe` — register/upsert push subscription (keyed by endpoint URL)
|
||||
- `GET /push/subscriptions` — list all push subscriptions
|
||||
- `PATCH /push/subscriptions/{id}` — update label or filter preferences
|
||||
- `DELETE /push/subscriptions/{id}` — delete subscription
|
||||
- `POST /push/subscriptions/{id}/test` — send test notification
|
||||
|
||||
### WebSocket
|
||||
- `WS /ws`
|
||||
|
||||
@@ -290,7 +314,8 @@ Main tables:
|
||||
- `contact_name_history` (tracks name changes over time)
|
||||
- `repeater_telemetry_history` (time-series telemetry snapshots for tracked repeaters)
|
||||
- `fanout_configs` (MQTT, bot, webhook, Apprise, SQS integration configs)
|
||||
- `app_settings`
|
||||
- `push_subscriptions` (Web Push browser subscriptions with delivery metadata; UNIQUE on endpoint)
|
||||
- `app_settings` (includes `vapid_private_key` and `vapid_public_key` for Web Push VAPID signing)
|
||||
|
||||
Contact route state is canonicalized on the backend:
|
||||
- stored route inputs: `direct_path`, `direct_path_len`, `direct_path_hash_mode`, `direct_path_updated_at`, plus optional `route_override_*`
|
||||
|
||||
+86
-1
@@ -1,4 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import aiosqlite
|
||||
@@ -108,7 +111,8 @@ CREATE TABLE IF NOT EXISTS app_settings (
|
||||
blocked_names TEXT DEFAULT '[]',
|
||||
discovery_blocked_types TEXT DEFAULT '[]',
|
||||
tracked_telemetry_repeaters TEXT DEFAULT '[]',
|
||||
auto_resend_channel INTEGER DEFAULT 0
|
||||
auto_resend_channel INTEGER DEFAULT 0,
|
||||
telemetry_interval_hours INTEGER DEFAULT 8
|
||||
);
|
||||
INSERT OR IGNORE INTO app_settings (id) VALUES (1);
|
||||
|
||||
@@ -164,9 +168,74 @@ CREATE INDEX IF NOT EXISTS idx_repeater_telemetry_pk_ts
|
||||
|
||||
|
||||
class Database:
|
||||
"""Single-connection aiosqlite wrapper with coroutine-level serialization.
|
||||
|
||||
Why the lock: aiosqlite runs one ``sqlite3.Connection`` on a background
|
||||
worker thread and serializes statement execution there. But SQLite's
|
||||
``COMMIT`` fails with ``OperationalError: cannot commit transaction -
|
||||
SQL statements in progress`` whenever *any* cursor on the connection has
|
||||
a live prepared statement (a ``SELECT`` that returned ``SQLITE_ROW`` but
|
||||
hasn't been fully consumed or closed). Under concurrent coroutines, one
|
||||
task's in-flight ``fetchone()`` can still be in ``SQLITE_ROW`` state when
|
||||
another task's ``commit()`` runs on the worker — triggering the error.
|
||||
|
||||
Fix: all DB work goes through ``tx()`` (writes) or ``readonly()`` (reads),
|
||||
both of which acquire ``self._lock``. The lock is non-reentrant (asyncio
|
||||
default) by design — nested ``tx()`` calls are a bug. Repository methods
|
||||
that compose multiple operations factor the raw SQL into private helpers
|
||||
that take a ``conn`` and don't lock; the public method acquires the lock
|
||||
once and calls those helpers.
|
||||
|
||||
Why reads are also locked: reads must also hold the lock, because a read
|
||||
in ``SQLITE_ROW`` state is precisely the live statement that breaks a
|
||||
concurrent writer's commit. Single-connection aiosqlite cannot safely
|
||||
overlap reads and writes. If we ever split reader/writer connections in
|
||||
the future, ``readonly()`` becomes the seam to point at the reader pool.
|
||||
"""
|
||||
|
||||
def __init__(self, db_path: str):
|
||||
self.db_path = db_path
|
||||
self._connection: aiosqlite.Connection | None = None
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
@asynccontextmanager
|
||||
async def tx(self) -> AsyncIterator[aiosqlite.Connection]:
|
||||
"""Acquire the connection for a write transaction.
|
||||
|
||||
Commits on clean exit, rolls back on exception. Callers MUST close
|
||||
every cursor opened inside the block (use ``async with conn.execute(...)
|
||||
as cursor:``) so no prepared statement is alive when commit runs.
|
||||
|
||||
The lock serializes concurrent writers AND ensures no reader's cursor
|
||||
is alive during the commit. Nested calls will deadlock — factor shared
|
||||
SQL into helpers that accept ``conn`` and do not re-enter ``tx()``.
|
||||
"""
|
||||
async with self._lock:
|
||||
if self._connection is None:
|
||||
raise RuntimeError("Database not connected")
|
||||
conn = self._connection
|
||||
try:
|
||||
yield conn
|
||||
except BaseException:
|
||||
await conn.rollback()
|
||||
raise
|
||||
else:
|
||||
await conn.commit()
|
||||
|
||||
@asynccontextmanager
|
||||
async def readonly(self) -> AsyncIterator[aiosqlite.Connection]:
|
||||
"""Acquire the connection for a read. No commit, no rollback.
|
||||
|
||||
Locked for the same reason writes are: on a single connection, an
|
||||
active read statement blocks a concurrent writer's commit. Callers
|
||||
MUST fully consume or close cursors before the block exits (use
|
||||
``async with conn.execute(...) as cursor:`` + ``fetchall`` /
|
||||
``fetchone``; avoid holding a cursor across ``await`` on other IO).
|
||||
"""
|
||||
async with self._lock:
|
||||
if self._connection is None:
|
||||
raise RuntimeError("Database not connected")
|
||||
yield self._connection
|
||||
|
||||
async def connect(self) -> None:
|
||||
logger.info("Connecting to database at %s", self.db_path)
|
||||
@@ -178,6 +247,22 @@ class Database:
|
||||
# Persists in the DB file but we set it explicitly on every connection.
|
||||
await self._connection.execute("PRAGMA journal_mode = WAL")
|
||||
|
||||
# synchronous = NORMAL is safe with WAL — only the most recent
|
||||
# transaction can be lost on an OS crash (no corruption risk).
|
||||
# Reduces fsync overhead vs. the default FULL.
|
||||
await self._connection.execute("PRAGMA synchronous = NORMAL")
|
||||
|
||||
# Retry for up to 5s on lock contention instead of failing instantly.
|
||||
# Matters when a second connection (e.g. VACUUM) touches the DB.
|
||||
await self._connection.execute("PRAGMA busy_timeout = 5000")
|
||||
|
||||
# Bump page cache to ~64 MB (negative value = KB). Keeps hot pages
|
||||
# in memory for read-heavy queries (unreads, pagination, search).
|
||||
await self._connection.execute("PRAGMA cache_size = -64000")
|
||||
|
||||
# Keep temp tables and sort spills in memory instead of on disk.
|
||||
await self._connection.execute("PRAGMA temp_store = MEMORY")
|
||||
|
||||
# Incremental auto-vacuum: freed pages are reclaimable via
|
||||
# PRAGMA incremental_vacuum without a full VACUUM. Must be set before
|
||||
# the first table is created (for new databases); for existing databases
|
||||
|
||||
@@ -237,7 +237,9 @@ async def on_new_contact(event: "Event") -> None:
|
||||
logger.debug("New contact: %s", public_key[:12])
|
||||
|
||||
contact_upsert = ContactUpsert.from_radio_dict(public_key.lower(), payload, on_radio=False)
|
||||
contact_upsert.last_seen = int(time.time())
|
||||
# Intentionally do not set last_seen here: NEW_CONTACT fires from the
|
||||
# radio's stored contact DB, not an RF observation. last_seen means
|
||||
# "last time we heard this pubkey on RF".
|
||||
await ContactRepository.upsert(contact_upsert)
|
||||
promoted_keys = await promote_prefix_contacts_for_contact(
|
||||
public_key=public_key,
|
||||
|
||||
@@ -144,8 +144,8 @@ Amazon SQS delivery. Config blob:
|
||||
- Supports both decoded messages and raw packets via normal scope selection
|
||||
|
||||
### map_upload (map_upload.py)
|
||||
Uploads heard repeater and room-server advertisements to map.meshcore.dev. Config blob:
|
||||
- `api_url` (optional, default `""`) — upload endpoint; empty falls back to the public map.meshcore.dev API
|
||||
Uploads heard repeater and room-server advertisements to map.meshcore.io. Config blob:
|
||||
- `api_url` (optional, default `""`) — upload endpoint; empty falls back to the public map.meshcore.io API
|
||||
- `dry_run` (bool, default `true`) — when true, logs the payload at INFO level without sending
|
||||
- `geofence_enabled` (bool, default `false`) — when true, only uploads nodes within `geofence_radius_km` of the radio's own configured lat/lon
|
||||
- `geofence_radius_km` (float, default `0`) — filter radius in kilometres
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Fanout module for uploading heard advert packets to map.meshcore.dev.
|
||||
"""Fanout module for uploading heard advert packets to map.meshcore.io.
|
||||
|
||||
Mirrors the logic of the standalone map.meshcore.dev-uploader project:
|
||||
Mirrors the logic of the standalone map.meshcore.dev-uploader project
|
||||
(historical name; the live service is now hosted at map.meshcore.io):
|
||||
- Listens on raw RF packets via on_raw
|
||||
- Filters for ADVERT packets, only processes repeaters (role 2) and rooms (role 3)
|
||||
- Skips nodes with no valid location (lat/lon None)
|
||||
@@ -16,7 +17,7 @@ the raw hex link.
|
||||
Config keys
|
||||
-----------
|
||||
api_url : str, default ""
|
||||
Upload endpoint. Empty string falls back to the public map.meshcore.dev API.
|
||||
Upload endpoint. Empty string falls back to the public map.meshcore.io API.
|
||||
dry_run : bool, default True
|
||||
When True, log the payload at INFO level instead of sending it.
|
||||
geofence_enabled : bool, default False
|
||||
@@ -46,7 +47,7 @@ from app.services.radio_runtime import radio_runtime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_DEFAULT_API_URL = "https://map.meshcore.dev/api/v1/uploader/node"
|
||||
_DEFAULT_API_URL = "https://map.meshcore.io/api/v1/uploader/node"
|
||||
|
||||
# Re-upload guard: skip re-uploading a pubkey seen within this window (AU parity)
|
||||
_REUPLOAD_SECONDS = 3600
|
||||
|
||||
+135
-5
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# ── Repeater telemetry sensor definitions ─────────────────────────────────
|
||||
|
||||
_REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
_REPEATER_SENSORS: list[dict[str, Any]] = [
|
||||
{
|
||||
"field": "battery_volts",
|
||||
"name": "Battery Voltage",
|
||||
@@ -34,6 +34,7 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "voltage",
|
||||
"state_class": "measurement",
|
||||
"unit": "V",
|
||||
"precision": 2,
|
||||
},
|
||||
{
|
||||
"field": "noise_floor_dbm",
|
||||
@@ -42,6 +43,7 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "signal_strength",
|
||||
"state_class": "measurement",
|
||||
"unit": "dBm",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "last_rssi_dbm",
|
||||
@@ -50,6 +52,7 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "signal_strength",
|
||||
"state_class": "measurement",
|
||||
"unit": "dBm",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "last_snr_db",
|
||||
@@ -58,6 +61,7 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": None,
|
||||
"state_class": "measurement",
|
||||
"unit": "dB",
|
||||
"precision": 1,
|
||||
},
|
||||
{
|
||||
"field": "packets_received",
|
||||
@@ -66,6 +70,7 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": None,
|
||||
"state_class": "total_increasing",
|
||||
"unit": None,
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "packets_sent",
|
||||
@@ -74,6 +79,7 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": None,
|
||||
"state_class": "total_increasing",
|
||||
"unit": None,
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "uptime_seconds",
|
||||
@@ -82,13 +88,78 @@ _REPEATER_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "duration",
|
||||
"state_class": None,
|
||||
"unit": "s",
|
||||
"precision": 0,
|
||||
},
|
||||
]
|
||||
|
||||
# ── LPP sensor metadata ─────────────────────────────────────────────────
|
||||
|
||||
_LPP_HA_META: dict[str, dict[str, Any]] = {
|
||||
"temperature": {"device_class": "temperature", "unit": "°C", "precision": 1},
|
||||
"humidity": {"device_class": "humidity", "unit": "%", "precision": 1},
|
||||
"barometer": {"device_class": "atmospheric_pressure", "unit": "hPa", "precision": 1},
|
||||
"voltage": {"device_class": "voltage", "unit": "V", "precision": 2},
|
||||
"current": {"device_class": "current", "unit": "mA", "precision": 1},
|
||||
"luminosity": {"device_class": "illuminance", "unit": "lux", "precision": 0},
|
||||
"power": {"device_class": "power", "unit": "W", "precision": 1},
|
||||
"energy": {"device_class": "energy", "unit": "kWh", "precision": 2},
|
||||
"distance": {"device_class": "distance", "unit": "mm", "precision": 0},
|
||||
"concentration": {"device_class": None, "unit": "ppm", "precision": 0},
|
||||
"direction": {"device_class": None, "unit": "°", "precision": 0},
|
||||
"altitude": {"device_class": None, "unit": "m", "precision": 1},
|
||||
}
|
||||
|
||||
|
||||
def _lpp_sensor_key(type_name: str, channel: int) -> str:
|
||||
"""Build the flat telemetry-payload key for an LPP sensor."""
|
||||
return f"lpp_{type_name}_ch{channel}"
|
||||
|
||||
|
||||
def _lpp_discovery_configs(
|
||||
prefix: str,
|
||||
pub_key: str,
|
||||
device: dict,
|
||||
lpp_sensors: list[dict],
|
||||
state_topic: str,
|
||||
) -> list[tuple[str, dict]]:
|
||||
"""Build HA discovery configs for a repeater's LPP sensors."""
|
||||
configs: list[tuple[str, dict]] = []
|
||||
for sensor in lpp_sensors:
|
||||
type_name = sensor.get("type_name", "unknown")
|
||||
channel = sensor.get("channel", 0)
|
||||
field = _lpp_sensor_key(type_name, channel)
|
||||
meta = _LPP_HA_META.get(type_name, {})
|
||||
|
||||
nid = _node_id(pub_key)
|
||||
object_id = field
|
||||
display = type_name.replace("_", " ").title()
|
||||
name = f"{display} (Ch {channel})"
|
||||
|
||||
cfg: dict[str, Any] = {
|
||||
"name": name,
|
||||
"unique_id": f"meshcore_{nid}_{object_id}",
|
||||
"device": device,
|
||||
"state_topic": state_topic,
|
||||
"value_template": "{{ value_json." + field + " }}",
|
||||
"state_class": "measurement",
|
||||
"expire_after": 36000,
|
||||
}
|
||||
if meta.get("device_class"):
|
||||
cfg["device_class"] = meta["device_class"]
|
||||
if meta.get("unit"):
|
||||
cfg["unit_of_measurement"] = meta["unit"]
|
||||
if meta.get("precision") is not None:
|
||||
cfg["suggested_display_precision"] = meta["precision"]
|
||||
|
||||
topic = f"homeassistant/sensor/meshcore_{nid}/{object_id}/config"
|
||||
configs.append((topic, cfg))
|
||||
|
||||
return configs
|
||||
|
||||
|
||||
# ── Local radio sensor definitions ────────────────────────────────────────
|
||||
|
||||
_RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
_RADIO_SENSORS: list[dict[str, Any]] = [
|
||||
{
|
||||
"field": "noise_floor_dbm",
|
||||
"name": "Noise Floor",
|
||||
@@ -96,14 +167,16 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "signal_strength",
|
||||
"state_class": "measurement",
|
||||
"unit": "dBm",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "battery_mv",
|
||||
"field": "battery_volts",
|
||||
"name": "Battery",
|
||||
"object_id": "battery",
|
||||
"device_class": "voltage",
|
||||
"state_class": "measurement",
|
||||
"unit": "mV",
|
||||
"unit": "V",
|
||||
"precision": 2,
|
||||
},
|
||||
{
|
||||
"field": "uptime_secs",
|
||||
@@ -112,6 +185,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "duration",
|
||||
"state_class": None,
|
||||
"unit": "s",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "last_rssi",
|
||||
@@ -120,6 +194,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "signal_strength",
|
||||
"state_class": "measurement",
|
||||
"unit": "dBm",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "last_snr",
|
||||
@@ -128,6 +203,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": None,
|
||||
"state_class": "measurement",
|
||||
"unit": "dB",
|
||||
"precision": 1,
|
||||
},
|
||||
{
|
||||
"field": "tx_air_secs",
|
||||
@@ -136,6 +212,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "duration",
|
||||
"state_class": "total_increasing",
|
||||
"unit": "s",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "rx_air_secs",
|
||||
@@ -144,6 +221,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": "duration",
|
||||
"state_class": "total_increasing",
|
||||
"unit": "s",
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "packets_recv",
|
||||
@@ -152,6 +230,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": None,
|
||||
"state_class": "total_increasing",
|
||||
"unit": None,
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "packets_sent",
|
||||
@@ -160,6 +239,7 @@ _RADIO_SENSORS: list[dict[str, str | None]] = [
|
||||
"device_class": None,
|
||||
"state_class": "total_increasing",
|
||||
"unit": None,
|
||||
"precision": 0,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -281,6 +361,8 @@ def _radio_discovery_configs(
|
||||
cfg["state_class"] = sensor["state_class"]
|
||||
if sensor["unit"]:
|
||||
cfg["unit_of_measurement"] = sensor["unit"]
|
||||
if sensor.get("precision") is not None:
|
||||
cfg["suggested_display_precision"] = sensor["precision"]
|
||||
|
||||
topic = f"homeassistant/sensor/meshcore_{nid}/{sensor['object_id']}/config"
|
||||
configs.append((topic, cfg))
|
||||
@@ -314,6 +396,8 @@ def _repeater_discovery_configs(
|
||||
cfg["state_class"] = sensor["state_class"]
|
||||
if sensor["unit"]:
|
||||
cfg["unit_of_measurement"] = sensor["unit"]
|
||||
if sensor.get("precision") is not None:
|
||||
cfg["suggested_display_precision"] = sensor["precision"]
|
||||
# 10 hours — margin over the 8-hour auto-collect cycle
|
||||
cfg["expire_after"] = 36000
|
||||
|
||||
@@ -424,12 +508,21 @@ class MqttHaModule(FanoutModule):
|
||||
radio_name = self._radio_name or "MeshCore Radio"
|
||||
configs.extend(_radio_discovery_configs(self._prefix, self._radio_key, radio_name))
|
||||
|
||||
# Tracked repeaters — resolve names from DB best-effort
|
||||
# Tracked repeaters — resolve names and LPP sensors from DB best-effort
|
||||
for pub_key in self._tracked_repeaters:
|
||||
rname = await self._resolve_contact_name(pub_key)
|
||||
configs.extend(
|
||||
_repeater_discovery_configs(self._prefix, pub_key, rname, self._radio_key)
|
||||
)
|
||||
# Dynamic LPP sensor entities from last known telemetry snapshot
|
||||
lpp_sensors = await self._resolve_lpp_sensors(pub_key)
|
||||
if lpp_sensors:
|
||||
nid = _node_id(pub_key)
|
||||
device = _device_payload(pub_key, rname, "Repeater", via_device_key=self._radio_key)
|
||||
state_topic = f"{self._prefix}/{nid}/telemetry"
|
||||
configs.extend(
|
||||
_lpp_discovery_configs(self._prefix, pub_key, device, lpp_sensors, state_topic)
|
||||
)
|
||||
|
||||
# Tracked contacts — resolve names from DB best-effort
|
||||
for pub_key in self._tracked_contacts:
|
||||
@@ -481,6 +574,19 @@ class MqttHaModule(FanoutModule):
|
||||
pass
|
||||
return pub_key[:12]
|
||||
|
||||
@staticmethod
|
||||
async def _resolve_lpp_sensors(pub_key: str) -> list[dict]:
|
||||
"""Return the LPP sensor list from the most recent telemetry snapshot, or []."""
|
||||
try:
|
||||
from app.repository.repeater_telemetry import RepeaterTelemetryRepository
|
||||
|
||||
latest = await RepeaterTelemetryRepository.get_latest(pub_key)
|
||||
if latest:
|
||||
return latest.get("data", {}).get("lpp_sensors", [])
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
|
||||
def _seed_radio_identity_from_runtime(self) -> None:
|
||||
"""Best-effort bootstrap from the currently connected radio session."""
|
||||
try:
|
||||
@@ -548,6 +654,13 @@ class MqttHaModule(FanoutModule):
|
||||
field = sensor["field"]
|
||||
if field is not None:
|
||||
payload[field] = data.get(field)
|
||||
|
||||
# Normalize battery from millivolts to volts for consistency with
|
||||
# repeater battery and the discovery config (unit: V, precision: 2).
|
||||
battery_mv = data.get("battery_mv")
|
||||
if battery_mv is not None:
|
||||
payload["battery_volts"] = battery_mv / 1000.0
|
||||
|
||||
await self._publisher.publish(f"{self._prefix}/{nid}/health", payload)
|
||||
|
||||
async def on_contact(self, data: dict) -> None:
|
||||
@@ -590,6 +703,23 @@ class MqttHaModule(FanoutModule):
|
||||
field = s["field"]
|
||||
if field is not None:
|
||||
payload[field] = data.get(field)
|
||||
|
||||
# Flatten LPP sensors into the same payload so HA value_templates work
|
||||
lpp_sensors: list[dict] = data.get("lpp_sensors", [])
|
||||
rediscover = False
|
||||
for sensor in lpp_sensors:
|
||||
key = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||
payload[key] = sensor.get("value")
|
||||
# Check if discovery for this sensor has been published yet
|
||||
expected_topic = f"homeassistant/sensor/meshcore_{nid}/{key}/config"
|
||||
if expected_topic not in self._discovery_topics:
|
||||
rediscover = True
|
||||
|
||||
# If new LPP sensor types appeared, re-publish discovery *before*
|
||||
# the state payload so HA already knows the entity when the value arrives.
|
||||
if rediscover:
|
||||
await self._publish_discovery()
|
||||
|
||||
await self._publisher.publish(f"{self._prefix}/{nid}/telemetry", payload)
|
||||
|
||||
async def on_message(self, data: dict) -> None:
|
||||
|
||||
@@ -135,7 +135,34 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
||||
"display_override": ["window-controls-overlay", "standalone", "fullscreen"],
|
||||
"theme_color": "#111419",
|
||||
"background_color": "#111419",
|
||||
# Icons are PNG-only on purpose. iOS Safari's manifest parser has
|
||||
# historically been unreliable with SVG icons, and Android/Chrome
|
||||
# PWA install flows prefer PNG for the install prompt.
|
||||
#
|
||||
# The "any" purpose entries are what iOS and desktop Chrome use
|
||||
# for the home-screen / install icon. "maskable" entries are
|
||||
# Android-only (adaptive icon with safe-zone crop); iOS does not
|
||||
# apply the safe-zone mask, so a maskable-only icon set would
|
||||
# render with excessive padding.
|
||||
"icons": [
|
||||
{
|
||||
"src": f"{base}favicon-96x96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image/png",
|
||||
"purpose": "any",
|
||||
},
|
||||
{
|
||||
"src": f"{base}apple-touch-icon.png",
|
||||
"sizes": "180x180",
|
||||
"type": "image/png",
|
||||
"purpose": "any",
|
||||
},
|
||||
{
|
||||
"src": f"{base}favicon-256x256.png",
|
||||
"sizes": "256x256",
|
||||
"type": "image/png",
|
||||
"purpose": "any",
|
||||
},
|
||||
{
|
||||
"src": f"{base}web-app-manifest-192x192.png",
|
||||
"sizes": "192x192",
|
||||
@@ -149,6 +176,27 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
||||
"purpose": "maskable",
|
||||
},
|
||||
],
|
||||
"screenshots": [
|
||||
{
|
||||
"src": f"{base}screenshot-wide.png",
|
||||
"sizes": "1367x909",
|
||||
"type": "image/png",
|
||||
"form_factor": "wide",
|
||||
"label": "RemoteTerm desktop view",
|
||||
},
|
||||
{
|
||||
"src": f"{base}screenshot-mobile.png",
|
||||
"sizes": "1170x2532",
|
||||
"type": "image/png",
|
||||
"label": "RemoteTerm mobile view",
|
||||
},
|
||||
{
|
||||
"src": f"{base}screenshot-mobile-2.png",
|
||||
"sizes": "750x1334",
|
||||
"type": "image/png",
|
||||
"label": "RemoteTerm mobile conversation",
|
||||
},
|
||||
],
|
||||
}
|
||||
return JSONResponse(
|
||||
manifest,
|
||||
|
||||
+10
@@ -67,6 +67,7 @@ from app.routers import (
|
||||
health,
|
||||
messages,
|
||||
packets,
|
||||
push,
|
||||
radio,
|
||||
read_state,
|
||||
repeaters,
|
||||
@@ -102,6 +103,14 @@ async def lifespan(app: FastAPI):
|
||||
await db.connect()
|
||||
logger.info("Database connected")
|
||||
|
||||
# Initialize VAPID keys for Web Push (generates on first run)
|
||||
from app.push.vapid import ensure_vapid_keys
|
||||
|
||||
try:
|
||||
await ensure_vapid_keys()
|
||||
except Exception:
|
||||
logger.warning("Failed to initialize VAPID keys for Web Push", exc_info=True)
|
||||
|
||||
# Ensure default channels exist in the database even before the radio
|
||||
# connects. Without this, a fresh or disconnected instance would return
|
||||
# zero channels from GET /channels until the first successful radio sync.
|
||||
@@ -185,6 +194,7 @@ app.include_router(packets.router, prefix="/api")
|
||||
app.include_router(read_state.router, prefix="/api")
|
||||
app.include_router(settings.router, prefix="/api")
|
||||
app.include_router(statistics.router, prefix="/api")
|
||||
app.include_router(push.router, prefix="/api")
|
||||
app.include_router(ws.router, prefix="/api")
|
||||
|
||||
# Serve frontend static files in production
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
import logging
|
||||
|
||||
import aiosqlite
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def migrate(conn: aiosqlite.Connection) -> None:
|
||||
"""Add telemetry_interval_hours integer column to app_settings."""
|
||||
tables_cursor = await conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
if "app_settings" not in {row[0] for row in await tables_cursor.fetchall()}:
|
||||
await conn.commit()
|
||||
return
|
||||
col_cursor = await conn.execute("PRAGMA table_info(app_settings)")
|
||||
columns = {row[1] for row in await col_cursor.fetchall()}
|
||||
if "telemetry_interval_hours" not in columns:
|
||||
# Default to 8 hours, matching the previous hard-coded interval
|
||||
# so existing users see no behavior change until they opt in.
|
||||
await conn.execute(
|
||||
"ALTER TABLE app_settings ADD COLUMN telemetry_interval_hours INTEGER DEFAULT 8"
|
||||
)
|
||||
await conn.commit()
|
||||
@@ -0,0 +1,49 @@
|
||||
import logging
|
||||
|
||||
import aiosqlite
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def migrate(conn: aiosqlite.Connection) -> None:
|
||||
"""Add Web Push support: VAPID keys, push subscriptions table, and global conversation list."""
|
||||
|
||||
# VAPID key pair + global push conversation list in app_settings
|
||||
table_check = await conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='app_settings'"
|
||||
)
|
||||
if await table_check.fetchone():
|
||||
cursor = await conn.execute("PRAGMA table_info(app_settings)")
|
||||
columns = {row[1] for row in await cursor.fetchall()}
|
||||
|
||||
if "vapid_private_key" not in columns:
|
||||
await conn.execute(
|
||||
"ALTER TABLE app_settings ADD COLUMN vapid_private_key TEXT DEFAULT ''"
|
||||
)
|
||||
if "vapid_public_key" not in columns:
|
||||
await conn.execute(
|
||||
"ALTER TABLE app_settings ADD COLUMN vapid_public_key TEXT DEFAULT ''"
|
||||
)
|
||||
if "push_conversations" not in columns:
|
||||
await conn.execute(
|
||||
"ALTER TABLE app_settings ADD COLUMN push_conversations TEXT DEFAULT '[]'"
|
||||
)
|
||||
|
||||
# Push subscriptions — one row per browser/device
|
||||
await conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id TEXT PRIMARY KEY,
|
||||
endpoint TEXT NOT NULL,
|
||||
p256dh TEXT NOT NULL,
|
||||
auth TEXT NOT NULL,
|
||||
label TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL,
|
||||
last_success_at INTEGER,
|
||||
failure_count INTEGER DEFAULT 0,
|
||||
UNIQUE(endpoint)
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
await conn.commit()
|
||||
@@ -842,6 +842,14 @@ class AppSettings(BaseModel):
|
||||
default_factory=list,
|
||||
description="Public keys of repeaters opted into periodic telemetry collection (max 8)",
|
||||
)
|
||||
telemetry_interval_hours: int = Field(
|
||||
default=8,
|
||||
description=(
|
||||
"User-preferred telemetry collection interval in hours. The backend "
|
||||
"clamps this up to the shortest legal interval given the number of "
|
||||
"tracked repeaters so daily checks stay under a 24/day ceiling."
|
||||
),
|
||||
)
|
||||
auto_resend_channel: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
|
||||
@@ -0,0 +1,172 @@
|
||||
"""Web Push dispatch manager.
|
||||
|
||||
Checks the global push-enabled conversation list (stored in app_settings)
|
||||
and sends push notifications to ALL registered devices when a matching
|
||||
incoming message arrives.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pywebpush import WebPushException
|
||||
|
||||
from app.push.send import send_push
|
||||
from app.push.vapid import get_vapid_private_key
|
||||
from app.repository.push_subscriptions import PushSubscriptionRepository
|
||||
from app.repository.settings import AppSettingsRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_SEND_TIMEOUT = 15 # seconds per push send
|
||||
_VAPID_CLAIMS = {"sub": "mailto:noreply@meshcore.local"}
|
||||
|
||||
|
||||
def _state_key_for_message(data: dict) -> str:
|
||||
"""Derive the conversation state key from a message event payload."""
|
||||
msg_type = data.get("type", "")
|
||||
conversation_key = data.get("conversation_key", "")
|
||||
if msg_type == "PRIV":
|
||||
return f"contact-{conversation_key}"
|
||||
return f"channel-{conversation_key}"
|
||||
|
||||
|
||||
def _build_payload(data: dict) -> str:
|
||||
"""Build the push notification JSON payload from a message event."""
|
||||
msg_type = data.get("type", "")
|
||||
text = data.get("text", "")
|
||||
sender_name = data.get("sender_name") or ""
|
||||
channel_name = data.get("channel_name") or ""
|
||||
|
||||
if msg_type == "PRIV":
|
||||
title = f"Message from {sender_name}" if sender_name else "New direct message"
|
||||
body = text
|
||||
else:
|
||||
title = channel_name if channel_name else "Channel message"
|
||||
body = text
|
||||
|
||||
conversation_key = data.get("conversation_key", "")
|
||||
state_key = _state_key_for_message(data)
|
||||
if msg_type == "PRIV":
|
||||
url_hash = f"#contact/{conversation_key}"
|
||||
else:
|
||||
url_hash = f"#channel/{conversation_key}"
|
||||
|
||||
return json.dumps(
|
||||
{
|
||||
"title": title,
|
||||
"body": body,
|
||||
# Tag per conversation so different conversations coexist in the
|
||||
# notification tray, while repeated messages in the same
|
||||
# conversation replace each other.
|
||||
"tag": f"meshcore-{state_key}",
|
||||
"url_hash": url_hash,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _subscription_info(sub: dict) -> dict:
|
||||
"""Build the subscription_info dict that pywebpush expects."""
|
||||
return {
|
||||
"endpoint": sub["endpoint"],
|
||||
"keys": {
|
||||
"p256dh": sub["p256dh"],
|
||||
"auth": sub["auth"],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class _SendResult:
|
||||
sub_id: str
|
||||
success: bool = False
|
||||
expired: bool = False
|
||||
|
||||
|
||||
class PushManager:
|
||||
async def dispatch_message(self, data: dict) -> None:
|
||||
"""Send push notifications for a message event to all devices."""
|
||||
# Don't notify for messages the operator just sent themselves
|
||||
if data.get("outgoing"):
|
||||
return
|
||||
|
||||
# Check the global conversation list
|
||||
state_key = _state_key_for_message(data)
|
||||
try:
|
||||
push_conversations = await AppSettingsRepository.get_push_conversations()
|
||||
except Exception:
|
||||
logger.debug("Push dispatch: failed to load push_conversations", exc_info=True)
|
||||
return
|
||||
|
||||
if state_key not in push_conversations:
|
||||
return
|
||||
|
||||
try:
|
||||
subs = await PushSubscriptionRepository.get_all()
|
||||
except Exception:
|
||||
logger.debug("Push dispatch: failed to load subscriptions", exc_info=True)
|
||||
return
|
||||
|
||||
if not subs:
|
||||
return
|
||||
|
||||
payload = _build_payload(data)
|
||||
vapid_key = get_vapid_private_key()
|
||||
if not vapid_key:
|
||||
logger.debug("Push dispatch: no VAPID key configured, skipping")
|
||||
return
|
||||
|
||||
results = await asyncio.gather(
|
||||
*(self._send_one(sub, payload, vapid_key) for sub in subs),
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
# Batch-update all delivery outcomes in one transaction.
|
||||
success_ids: list[str] = []
|
||||
failure_ids: list[str] = []
|
||||
remove_ids: list[str] = []
|
||||
for r in results:
|
||||
if isinstance(r, _SendResult):
|
||||
if r.expired:
|
||||
remove_ids.append(r.sub_id)
|
||||
elif r.success:
|
||||
success_ids.append(r.sub_id)
|
||||
else:
|
||||
failure_ids.append(r.sub_id)
|
||||
if success_ids or failure_ids or remove_ids:
|
||||
try:
|
||||
await PushSubscriptionRepository.batch_record_outcomes(
|
||||
success_ids, failure_ids, remove_ids
|
||||
)
|
||||
except Exception:
|
||||
logger.debug("Push dispatch: failed to record outcomes", exc_info=True)
|
||||
|
||||
async def _send_one(self, sub: dict, payload: str, vapid_key: str) -> _SendResult:
|
||||
sub_id = sub["id"]
|
||||
result = _SendResult(sub_id=sub_id)
|
||||
try:
|
||||
async with asyncio.timeout(_SEND_TIMEOUT):
|
||||
await send_push(
|
||||
subscription_info=_subscription_info(sub),
|
||||
payload=payload,
|
||||
vapid_private_key=vapid_key,
|
||||
vapid_claims=_VAPID_CLAIMS,
|
||||
)
|
||||
result.success = True
|
||||
except WebPushException as e:
|
||||
status = getattr(e, "response", None)
|
||||
status_code = getattr(status, "status_code", 0) if status else 0
|
||||
if status_code in (403, 404, 410):
|
||||
logger.info("Push subscription expired (HTTP %d), removing %s", status_code, sub_id)
|
||||
result.expired = True
|
||||
else:
|
||||
logger.warning("Push send failed for %s: %s", sub_id, e)
|
||||
except TimeoutError:
|
||||
logger.warning("Push send timed out for %s", sub_id)
|
||||
except Exception:
|
||||
logger.debug("Push send error for %s", sub_id, exc_info=True)
|
||||
return result
|
||||
|
||||
|
||||
push_manager = PushManager()
|
||||
@@ -0,0 +1,231 @@
|
||||
"""Thin wrapper around pywebpush for sending push notifications.
|
||||
|
||||
Isolates the pywebpush dependency and runs the synchronous send in
|
||||
a thread executor to avoid blocking the event loop.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
from typing import Any, cast
|
||||
|
||||
import requests
|
||||
import urllib3.connection
|
||||
import urllib3.connectionpool
|
||||
from pywebpush import webpush
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError
|
||||
from requests.exceptions import ConnectTimeout as RequestsConnectTimeout
|
||||
from urllib3.exceptions import ConnectTimeoutError, NameResolutionError, NewConnectionError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_TIMEOUT = object()
|
||||
DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS = 3
|
||||
IPV4_FALLBACK_CONNECT_TIMEOUT_SECONDS = 10
|
||||
DEFAULT_PUSH_READ_TIMEOUT_SECONDS = 10
|
||||
|
||||
|
||||
def _create_ipv4_connection(
|
||||
address: tuple[str, int],
|
||||
timeout: float | None | object = DEFAULT_TIMEOUT,
|
||||
source_address: tuple[str, int] | None = None,
|
||||
socket_options=None,
|
||||
) -> socket.socket:
|
||||
"""Create a socket connection using IPv4 only."""
|
||||
host, port = address
|
||||
if host.startswith("["):
|
||||
host = host.strip("[]")
|
||||
|
||||
err: OSError | None = None
|
||||
for res in socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM):
|
||||
af, socktype, proto, _, sa = res
|
||||
sock = None
|
||||
try:
|
||||
sock = socket.socket(af, socktype, proto)
|
||||
if socket_options:
|
||||
for opt in socket_options:
|
||||
sock.setsockopt(*opt)
|
||||
if timeout is not DEFAULT_TIMEOUT:
|
||||
sock.settimeout(cast(float | None, timeout))
|
||||
if source_address:
|
||||
sock.bind(source_address)
|
||||
sock.connect(sa)
|
||||
return sock
|
||||
except OSError as exc:
|
||||
err = exc
|
||||
if sock is not None:
|
||||
sock.close()
|
||||
|
||||
if err is not None:
|
||||
raise err
|
||||
raise OSError("getaddrinfo returns an empty list")
|
||||
|
||||
|
||||
class IPv4HTTPConnection(urllib3.connection.HTTPConnection):
|
||||
"""urllib3 HTTP connection that resolves and connects via IPv4 only."""
|
||||
|
||||
def _new_conn(self) -> socket.socket:
|
||||
try:
|
||||
return _create_ipv4_connection(
|
||||
(self._dns_host, self.port),
|
||||
self.timeout,
|
||||
source_address=self.source_address,
|
||||
socket_options=self.socket_options,
|
||||
)
|
||||
except socket.gaierror as exc:
|
||||
raise NameResolutionError(self.host, self, exc) from exc
|
||||
except TimeoutError as exc:
|
||||
raise ConnectTimeoutError(
|
||||
self,
|
||||
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
|
||||
) from exc
|
||||
except OSError as exc:
|
||||
raise NewConnectionError(self, f"Failed to establish a new connection: {exc}") from exc
|
||||
|
||||
|
||||
class IPv4HTTPSConnection(urllib3.connection.HTTPSConnection):
|
||||
"""urllib3 HTTPS connection that resolves and connects via IPv4 only."""
|
||||
|
||||
def _new_conn(self) -> socket.socket:
|
||||
try:
|
||||
return _create_ipv4_connection(
|
||||
(self._dns_host, self.port),
|
||||
self.timeout,
|
||||
source_address=self.source_address,
|
||||
socket_options=self.socket_options,
|
||||
)
|
||||
except socket.gaierror as exc:
|
||||
raise NameResolutionError(self.host, self, exc) from exc
|
||||
except TimeoutError as exc:
|
||||
raise ConnectTimeoutError(
|
||||
self,
|
||||
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
|
||||
) from exc
|
||||
except OSError as exc:
|
||||
raise NewConnectionError(self, f"Failed to establish a new connection: {exc}") from exc
|
||||
|
||||
|
||||
class IPv4HTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||
ConnectionCls = cast(Any, IPv4HTTPConnection)
|
||||
|
||||
|
||||
class IPv4HTTPSConnectionPool(urllib3.connectionpool.HTTPSConnectionPool):
|
||||
ConnectionCls = cast(Any, IPv4HTTPSConnection)
|
||||
|
||||
|
||||
def _configure_pool_manager_for_ipv4(manager: Any) -> None:
|
||||
manager.pool_classes_by_scheme = manager.pool_classes_by_scheme.copy()
|
||||
manager.pool_classes_by_scheme["http"] = IPv4HTTPConnectionPool
|
||||
manager.pool_classes_by_scheme["https"] = IPv4HTTPSConnectionPool
|
||||
|
||||
|
||||
class IPv4HTTPAdapter(HTTPAdapter):
|
||||
"""requests adapter that uses IPv4-only urllib3 connection pools."""
|
||||
|
||||
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
|
||||
super().init_poolmanager(connections, maxsize, block=block, **pool_kwargs)
|
||||
_configure_pool_manager_for_ipv4(self.poolmanager)
|
||||
|
||||
def proxy_manager_for(self, *args, **kwargs):
|
||||
manager = super().proxy_manager_for(*args, **kwargs)
|
||||
_configure_pool_manager_for_ipv4(manager)
|
||||
return manager
|
||||
|
||||
|
||||
def _build_default_requests_session() -> requests.Session:
|
||||
return requests.Session()
|
||||
|
||||
|
||||
def _build_ipv4_requests_session() -> requests.Session:
|
||||
session = requests.Session()
|
||||
adapter = IPv4HTTPAdapter()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
return session
|
||||
|
||||
|
||||
def _send_push_with_session(
|
||||
*,
|
||||
subscription_info: dict,
|
||||
payload: str,
|
||||
vapid_private_key: str,
|
||||
vapid_claims: dict,
|
||||
session: requests.Session,
|
||||
connect_timeout_seconds: int,
|
||||
) -> int:
|
||||
response = webpush(
|
||||
subscription_info=subscription_info,
|
||||
data=payload,
|
||||
vapid_private_key=vapid_private_key,
|
||||
vapid_claims=vapid_claims,
|
||||
content_encoding="aes128gcm",
|
||||
timeout=cast(Any, (connect_timeout_seconds, DEFAULT_PUSH_READ_TIMEOUT_SECONDS)),
|
||||
requests_session=session,
|
||||
)
|
||||
return response.status_code # type: ignore[union-attr]
|
||||
|
||||
|
||||
def _send_push_with_fallback(
|
||||
subscription_info: dict,
|
||||
payload: str,
|
||||
vapid_private_key: str,
|
||||
vapid_claims: dict,
|
||||
) -> int:
|
||||
"""Send using normal dual-stack resolution, then retry with IPv4-only on connect failures."""
|
||||
session = _build_default_requests_session()
|
||||
try:
|
||||
return _send_push_with_session(
|
||||
subscription_info=subscription_info,
|
||||
payload=payload,
|
||||
vapid_private_key=vapid_private_key,
|
||||
vapid_claims=vapid_claims,
|
||||
session=session,
|
||||
connect_timeout_seconds=DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS,
|
||||
)
|
||||
except (RequestsConnectTimeout, RequestsConnectionError) as exc:
|
||||
logger.info("Push delivery retrying via IPv4 after initial network failure: %s", exc)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
session = _build_ipv4_requests_session()
|
||||
try:
|
||||
return _send_push_with_session(
|
||||
subscription_info=subscription_info,
|
||||
payload=payload,
|
||||
vapid_private_key=vapid_private_key,
|
||||
vapid_claims=vapid_claims,
|
||||
session=session,
|
||||
connect_timeout_seconds=IPV4_FALLBACK_CONNECT_TIMEOUT_SECONDS,
|
||||
)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
async def send_push(
|
||||
subscription_info: dict,
|
||||
payload: str,
|
||||
vapid_private_key: str,
|
||||
vapid_claims: dict,
|
||||
) -> int:
|
||||
"""Send an encrypted push notification.
|
||||
|
||||
Args:
|
||||
subscription_info: {"endpoint": ..., "keys": {"p256dh": ..., "auth": ...}}
|
||||
payload: JSON string to encrypt and send
|
||||
vapid_private_key: base64url-encoded raw EC private key scalar
|
||||
vapid_claims: {"sub": "mailto:..."} or {"sub": "https://..."}
|
||||
|
||||
Returns:
|
||||
HTTP status code from the push service.
|
||||
|
||||
Raises:
|
||||
WebPushException: on push service error (caller handles 404/410 cleanup).
|
||||
"""
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: _send_push_with_fallback(
|
||||
subscription_info, payload, vapid_private_key, vapid_claims
|
||||
),
|
||||
)
|
||||
@@ -0,0 +1,60 @@
|
||||
"""VAPID key management for Web Push.
|
||||
|
||||
Generates a P-256 key pair on first use and caches it in app_settings
|
||||
via ``AppSettingsRepository``. The public key is served to browsers
|
||||
for ``PushManager.subscribe()``.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
|
||||
from py_vapid import Vapid
|
||||
|
||||
from app.repository.settings import AppSettingsRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_cached_private_key: str = ""
|
||||
_cached_public_key: str = ""
|
||||
|
||||
|
||||
async def ensure_vapid_keys() -> tuple[str, str]:
|
||||
"""Read or generate VAPID keys. Call once at startup after DB connect."""
|
||||
global _cached_private_key, _cached_public_key
|
||||
|
||||
private, public = await AppSettingsRepository.get_vapid_keys()
|
||||
if private and public:
|
||||
_cached_private_key = private
|
||||
_cached_public_key = public
|
||||
logger.info("VAPID keys loaded from database")
|
||||
return _cached_private_key, _cached_public_key
|
||||
|
||||
# Generate new key pair
|
||||
vapid = Vapid()
|
||||
vapid.generate_keys()
|
||||
|
||||
# Private key as base64url-encoded raw 32-byte EC scalar — the format
|
||||
# that pywebpush passes to ``Vapid.from_string()``.
|
||||
raw_priv = vapid.private_key.private_numbers().private_value.to_bytes(32, "big") # type: ignore[union-attr]
|
||||
_cached_private_key = base64.urlsafe_b64encode(raw_priv).rstrip(b"=").decode("ascii")
|
||||
|
||||
# Public key as uncompressed P-256 point, base64url-encoded (no padding)
|
||||
# for the browser Push API's applicationServerKey
|
||||
raw_pub = vapid.public_key.public_bytes(Encoding.X962, PublicFormat.UncompressedPoint) # type: ignore[union-attr]
|
||||
_cached_public_key = base64.urlsafe_b64encode(raw_pub).rstrip(b"=").decode("ascii")
|
||||
|
||||
await AppSettingsRepository.set_vapid_keys(_cached_private_key, _cached_public_key)
|
||||
logger.info("Generated and stored new VAPID key pair")
|
||||
|
||||
return _cached_private_key, _cached_public_key
|
||||
|
||||
|
||||
def get_vapid_public_key() -> str:
|
||||
"""Return the cached VAPID public key (base64url). Must call ensure_vapid_keys() first."""
|
||||
return _cached_public_key
|
||||
|
||||
|
||||
def get_vapid_private_key() -> str:
|
||||
"""Return the cached VAPID private key (base64url). Must call ensure_vapid_keys() first."""
|
||||
return _cached_private_key
|
||||
+157
-63
@@ -14,6 +14,7 @@ import logging
|
||||
import math
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Literal
|
||||
|
||||
from meshcore import EventType, MeshCore
|
||||
@@ -36,6 +37,7 @@ from app.services.contact_reconciliation import (
|
||||
)
|
||||
from app.services.messages import create_fallback_channel_message
|
||||
from app.services.radio_runtime import radio_runtime as radio_manager
|
||||
from app.telemetry_interval import clamp_telemetry_interval
|
||||
from app.websocket import broadcast_error, broadcast_event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -159,10 +161,10 @@ MIN_ADVERT_INTERVAL = 3600
|
||||
# Periodic telemetry collection task handle
|
||||
_telemetry_collect_task: asyncio.Task | None = None
|
||||
|
||||
# Telemetry collection interval (8 hours)
|
||||
TELEMETRY_COLLECT_INTERVAL = 8 * 3600
|
||||
|
||||
# Initial delay before the first telemetry collection cycle (let radio settle)
|
||||
# Initial delay before the scheduler starts (let radio settle). After this,
|
||||
# the loop wakes at each UTC top-of-hour and decides whether to run a cycle
|
||||
# based on the user's telemetry_interval_hours preference, clamped up to
|
||||
# the shortest-legal interval for the current tracked-repeater count.
|
||||
TELEMETRY_COLLECT_INITIAL_DELAY = 60
|
||||
|
||||
# Counter to pause polling during repeater operations (supports nested pauses)
|
||||
@@ -1295,7 +1297,13 @@ async def stop_background_contact_reconciliation() -> None:
|
||||
|
||||
|
||||
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
||||
"""Return the contacts that would be loaded onto the radio right now."""
|
||||
"""Return the contacts that would be loaded onto the radio right now.
|
||||
|
||||
Fill order:
|
||||
1. Favorites (up to full capacity)
|
||||
2. Most recently DM-active non-repeaters (sent or received, up to 80% refill target)
|
||||
3. Most recently advertised non-repeaters (up to 80% refill target)
|
||||
"""
|
||||
app_settings = await AppSettingsRepository.get()
|
||||
max_contacts = _effective_radio_capacity(app_settings.max_radio_contacts)
|
||||
refill_target, _full_sync_trigger = _compute_radio_contact_limits(max_contacts)
|
||||
@@ -1315,7 +1323,7 @@ async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
||||
break
|
||||
|
||||
if len(selected_contacts) < refill_target:
|
||||
for contact in await ContactRepository.get_recently_contacted_non_repeaters(
|
||||
for contact in await ContactRepository.get_recently_dm_active_non_repeaters(
|
||||
limit=max_contacts
|
||||
):
|
||||
key = contact.public_key.lower()
|
||||
@@ -1354,8 +1362,8 @@ async def _sync_contacts_to_radio_inner(mc: MeshCore) -> dict:
|
||||
|
||||
Fill order is:
|
||||
1. Favorite contacts
|
||||
2. Most recently interacted-with non-repeaters
|
||||
3. Most recently advert-heard non-repeaters without interaction history
|
||||
2. Most recently DM-active non-repeaters (sent or received)
|
||||
3. Most recently advert-heard non-repeaters
|
||||
|
||||
Favorite contacts are always reloaded first, up to the configured capacity.
|
||||
Additional non-favorite fill stops at the refill target (80% of capacity).
|
||||
@@ -1489,8 +1497,8 @@ async def sync_recent_contacts_to_radio(force: bool = False, mc: MeshCore | None
|
||||
"""
|
||||
Load contacts to the radio for DM ACK support.
|
||||
|
||||
Fill order is favorites, then recently contacted non-repeaters,
|
||||
then recently advert-heard non-repeaters. Favorites are always reloaded
|
||||
Fill order is favorites, then recently DM-active non-repeaters (sent or
|
||||
received), then recently advert-heard non-repeaters. Favorites are always reloaded
|
||||
up to the configured capacity; additional non-favorite fill stops at the
|
||||
80% refill target.
|
||||
Only runs at most once every CONTACT_SYNC_THROTTLE_SECONDS unless forced.
|
||||
@@ -1584,6 +1592,35 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
||||
"full_events": status.get("full_evts", 0),
|
||||
}
|
||||
|
||||
# Best-effort LPP sensor fetch — failure here does not fail the overall
|
||||
# collection; status telemetry is still recorded without sensor data.
|
||||
try:
|
||||
lpp_raw = await mc.commands.req_telemetry_sync(
|
||||
contact.public_key, timeout=10, min_timeout=5
|
||||
)
|
||||
if lpp_raw:
|
||||
lpp_sensors = []
|
||||
for entry in lpp_raw:
|
||||
value = entry.get("value", 0)
|
||||
# Skip multi-value sensors (GPS, accelerometer, etc.)
|
||||
if isinstance(value, dict):
|
||||
continue
|
||||
lpp_sensors.append(
|
||||
{
|
||||
"channel": entry.get("channel", 0),
|
||||
"type_name": str(entry.get("type", "unknown")),
|
||||
"value": value,
|
||||
}
|
||||
)
|
||||
if lpp_sensors:
|
||||
data["lpp_sensors"] = lpp_sensors
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
"Telemetry collect: LPP sensor fetch failed for %s (non-fatal): %s",
|
||||
contact.public_key[:12],
|
||||
e,
|
||||
)
|
||||
|
||||
try:
|
||||
timestamp = int(time.time())
|
||||
await RepeaterTelemetryRepository.record(
|
||||
@@ -1621,62 +1658,122 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
async def _run_telemetry_cycle() -> None:
|
||||
"""Collect one telemetry sample from every tracked repeater."""
|
||||
if not radio_manager.is_connected:
|
||||
logger.debug("Telemetry collect: radio not connected, skipping cycle")
|
||||
return
|
||||
|
||||
app_settings = await AppSettingsRepository.get()
|
||||
tracked = app_settings.tracked_telemetry_repeaters
|
||||
if not tracked:
|
||||
return
|
||||
|
||||
logger.info("Telemetry collect: starting cycle for %d repeater(s)", len(tracked))
|
||||
collected = 0
|
||||
|
||||
for pub_key in tracked:
|
||||
contact = await ContactRepository.get_by_key(pub_key)
|
||||
if not contact or contact.type != 2:
|
||||
logger.debug(
|
||||
"Telemetry collect: skipping %s (not found or not repeater)",
|
||||
pub_key[:12],
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
async with radio_manager.radio_operation(
|
||||
"telemetry_collect",
|
||||
blocking=False,
|
||||
suspend_auto_fetch=True,
|
||||
) as mc:
|
||||
if await _collect_repeater_telemetry(mc, contact):
|
||||
collected += 1
|
||||
except RadioOperationBusyError:
|
||||
logger.debug(
|
||||
"Telemetry collect: radio busy, skipping %s",
|
||||
pub_key[:12],
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Telemetry collect: cycle complete, %d/%d successful",
|
||||
collected,
|
||||
len(tracked),
|
||||
)
|
||||
|
||||
|
||||
async def _sleep_until_next_utc_top_of_hour() -> None:
|
||||
"""Sleep until the next UTC top-of-hour (or a minimum of 1 second)."""
|
||||
now = datetime.now(UTC)
|
||||
next_top = now.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
|
||||
delay = (next_top - now).total_seconds()
|
||||
if delay < 1:
|
||||
delay = 1
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
|
||||
async def _maybe_run_scheduled_cycle(now: datetime) -> None:
|
||||
"""Evaluate the modulo gate for the given UTC time and run a cycle if due.
|
||||
|
||||
Factored out of the loop so we can also invoke it immediately after the
|
||||
post-boot initial delay — otherwise a restart within the initial-delay
|
||||
window before a scheduled boundary would carry the task past that boundary
|
||||
and skip a due cycle (for 24h cadence users, that's a full day of missed
|
||||
telemetry).
|
||||
"""
|
||||
app_settings = await AppSettingsRepository.get()
|
||||
tracked_count = len(app_settings.tracked_telemetry_repeaters)
|
||||
if tracked_count == 0:
|
||||
return
|
||||
effective_hours = clamp_telemetry_interval(app_settings.telemetry_interval_hours, tracked_count)
|
||||
if effective_hours <= 0:
|
||||
return
|
||||
if now.hour % effective_hours != 0:
|
||||
return
|
||||
await _run_telemetry_cycle()
|
||||
|
||||
|
||||
async def _telemetry_collect_loop() -> None:
|
||||
"""Background task that collects telemetry from tracked repeaters every 8 hours.
|
||||
"""Background task that runs tracked-repeater telemetry collection.
|
||||
|
||||
Runs a first cycle after a short initial delay (so newly tracked repeaters
|
||||
get a sample promptly), then sleeps the full interval between subsequent cycles.
|
||||
After an initial post-boot delay we evaluate the modulo gate once
|
||||
(covers the edge case where the initial delay crossed a scheduled
|
||||
boundary on restart). Then we wake at every UTC top-of-hour and
|
||||
evaluate the gate again. A cycle runs only when
|
||||
``current_utc_hour % effective_interval_hours == 0``, where the
|
||||
effective interval is the user preference clamped up to the shortest
|
||||
legal interval for the current tracked-repeater count. This keeps the
|
||||
total daily check count bounded at ``DAILY_CHECK_CEILING`` (24).
|
||||
|
||||
Acquires the radio lock per-repeater (non-blocking) so manual operations can
|
||||
The loop never updates the stored user preference. If the user picks a
|
||||
short interval and then adds repeaters that make it illegal, they keep
|
||||
their pick stored and we silently use the clamped value until they drop
|
||||
repeaters.
|
||||
|
||||
Radio lock is acquired per-repeater (non-blocking) so manual ops can
|
||||
interleave. Failures are logged and skipped.
|
||||
"""
|
||||
first_run = True
|
||||
try:
|
||||
await asyncio.sleep(TELEMETRY_COLLECT_INITIAL_DELAY)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Telemetry collect task cancelled before initial delay")
|
||||
return
|
||||
|
||||
# Post-boot boundary check: if the delay carried us into a matching hour
|
||||
# (or we booted exactly at a matching hour), run now rather than waiting
|
||||
# another full cycle.
|
||||
try:
|
||||
await _maybe_run_scheduled_cycle(datetime.now(UTC))
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Telemetry collect task cancelled after initial delay")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error("Error in post-boot telemetry check: %s", e, exc_info=True)
|
||||
|
||||
while True:
|
||||
try:
|
||||
delay = TELEMETRY_COLLECT_INITIAL_DELAY if first_run else TELEMETRY_COLLECT_INTERVAL
|
||||
await asyncio.sleep(delay)
|
||||
first_run = False
|
||||
|
||||
if not radio_manager.is_connected:
|
||||
logger.debug("Telemetry collect: radio not connected, skipping cycle")
|
||||
continue
|
||||
|
||||
app_settings = await AppSettingsRepository.get()
|
||||
tracked = app_settings.tracked_telemetry_repeaters
|
||||
if not tracked:
|
||||
continue
|
||||
|
||||
logger.info("Telemetry collect: starting cycle for %d repeater(s)", len(tracked))
|
||||
collected = 0
|
||||
|
||||
for pub_key in tracked:
|
||||
contact = await ContactRepository.get_by_key(pub_key)
|
||||
if not contact or contact.type != 2:
|
||||
logger.debug(
|
||||
"Telemetry collect: skipping %s (not found or not repeater)",
|
||||
pub_key[:12],
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
async with radio_manager.radio_operation(
|
||||
"telemetry_collect",
|
||||
blocking=False,
|
||||
suspend_auto_fetch=True,
|
||||
) as mc:
|
||||
if await _collect_repeater_telemetry(mc, contact):
|
||||
collected += 1
|
||||
except RadioOperationBusyError:
|
||||
logger.debug(
|
||||
"Telemetry collect: radio busy, skipping %s",
|
||||
pub_key[:12],
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Telemetry collect: cycle complete, %d/%d successful",
|
||||
collected,
|
||||
len(tracked),
|
||||
)
|
||||
await _sleep_until_next_utc_top_of_hour()
|
||||
await _maybe_run_scheduled_cycle(datetime.now(UTC))
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Telemetry collect task cancelled")
|
||||
@@ -1690,10 +1787,7 @@ def start_telemetry_collect() -> None:
|
||||
global _telemetry_collect_task
|
||||
if _telemetry_collect_task is None or _telemetry_collect_task.done():
|
||||
_telemetry_collect_task = asyncio.create_task(_telemetry_collect_loop())
|
||||
logger.info(
|
||||
"Started periodic telemetry collection (interval: %ds)",
|
||||
TELEMETRY_COLLECT_INTERVAL,
|
||||
)
|
||||
logger.info("Started periodic telemetry collection (UTC-hourly scheduler)")
|
||||
|
||||
|
||||
async def stop_telemetry_collect() -> None:
|
||||
|
||||
+69
-60
@@ -8,31 +8,33 @@ class ChannelRepository:
|
||||
@staticmethod
|
||||
async def upsert(key: str, name: str, is_hashtag: bool = False, on_radio: bool = False) -> None:
|
||||
"""Upsert a channel. Key is 32-char hex string."""
|
||||
await db.conn.execute(
|
||||
"""
|
||||
INSERT INTO channels (key, name, is_hashtag, on_radio, flood_scope_override)
|
||||
VALUES (?, ?, ?, ?, NULL)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
name = excluded.name,
|
||||
is_hashtag = excluded.is_hashtag,
|
||||
on_radio = excluded.on_radio
|
||||
""",
|
||||
(key.upper(), name, is_hashtag, on_radio),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO channels (key, name, is_hashtag, on_radio, flood_scope_override)
|
||||
VALUES (?, ?, ?, ?, NULL)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
name = excluded.name,
|
||||
is_hashtag = excluded.is_hashtag,
|
||||
on_radio = excluded.on_radio
|
||||
""",
|
||||
(key.upper(), name, is_hashtag, on_radio),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_by_key(key: str) -> Channel | None:
|
||||
"""Get a channel by its key (32-char hex string)."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
||||
FROM channels
|
||||
WHERE key = ?
|
||||
""",
|
||||
(key.upper(),),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
||||
FROM channels
|
||||
WHERE key = ?
|
||||
""",
|
||||
(key.upper(),),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if row:
|
||||
return Channel(
|
||||
key=row["key"],
|
||||
@@ -48,14 +50,15 @@ class ChannelRepository:
|
||||
|
||||
@staticmethod
|
||||
async def get_all() -> list[Channel]:
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
||||
FROM channels
|
||||
ORDER BY name
|
||||
"""
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
||||
FROM channels
|
||||
ORDER BY name
|
||||
"""
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [
|
||||
Channel(
|
||||
key=row["key"],
|
||||
@@ -73,21 +76,23 @@ class ChannelRepository:
|
||||
@staticmethod
|
||||
async def set_favorite(key: str, value: bool) -> bool:
|
||||
"""Set or clear the favorite flag for a channel. Returns True if row was found."""
|
||||
cursor = await db.conn.execute(
|
||||
"UPDATE channels SET favorite = ? WHERE key = ?",
|
||||
(1 if value else 0, key.upper()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE channels SET favorite = ? WHERE key = ?",
|
||||
(1 if value else 0, key.upper()),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def delete(key: str) -> None:
|
||||
"""Delete a channel by key."""
|
||||
await db.conn.execute(
|
||||
"DELETE FROM channels WHERE key = ?",
|
||||
(key.upper(),),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"DELETE FROM channels WHERE key = ?",
|
||||
(key.upper(),),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def update_last_read_at(key: str, timestamp: int | None = None) -> bool:
|
||||
@@ -96,35 +101,39 @@ class ChannelRepository:
|
||||
Returns True if a row was updated, False if channel not found.
|
||||
"""
|
||||
ts = timestamp if timestamp is not None else int(time.time())
|
||||
cursor = await db.conn.execute(
|
||||
"UPDATE channels SET last_read_at = ? WHERE key = ?",
|
||||
(ts, key.upper()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE channels SET last_read_at = ? WHERE key = ?",
|
||||
(ts, key.upper()),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def update_flood_scope_override(key: str, flood_scope_override: str | None) -> bool:
|
||||
"""Set or clear a channel's flood-scope override."""
|
||||
cursor = await db.conn.execute(
|
||||
"UPDATE channels SET flood_scope_override = ? WHERE key = ?",
|
||||
(flood_scope_override, key.upper()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE channels SET flood_scope_override = ? WHERE key = ?",
|
||||
(flood_scope_override, key.upper()),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def update_path_hash_mode_override(key: str, path_hash_mode_override: int | None) -> bool:
|
||||
"""Set or clear a channel's path hash mode override."""
|
||||
cursor = await db.conn.execute(
|
||||
"UPDATE channels SET path_hash_mode_override = ? WHERE key = ?",
|
||||
(path_hash_mode_override, key.upper()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE channels SET path_hash_mode_override = ? WHERE key = ?",
|
||||
(path_hash_mode_override, key.upper()),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def mark_all_read(timestamp: int) -> None:
|
||||
"""Mark all channels as read at the given timestamp."""
|
||||
await db.conn.execute("UPDATE channels SET last_read_at = ?", (timestamp,))
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute("UPDATE channels SET last_read_at = ?", (timestamp,)):
|
||||
pass
|
||||
|
||||
+467
-356
@@ -61,66 +61,72 @@ class ContactRepository:
|
||||
)
|
||||
)
|
||||
|
||||
await db.conn.execute(
|
||||
"""
|
||||
INSERT INTO contacts (public_key, name, type, flags, direct_path, direct_path_len,
|
||||
direct_path_hash_mode, direct_path_updated_at,
|
||||
route_override_path, route_override_len,
|
||||
route_override_hash_mode,
|
||||
last_advert, lat, lon, last_seen,
|
||||
on_radio, last_contacted, first_seen)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(public_key) DO UPDATE SET
|
||||
name = COALESCE(excluded.name, contacts.name),
|
||||
type = CASE WHEN excluded.type = 0 THEN contacts.type ELSE excluded.type END,
|
||||
flags = excluded.flags,
|
||||
direct_path = COALESCE(excluded.direct_path, contacts.direct_path),
|
||||
direct_path_len = COALESCE(excluded.direct_path_len, contacts.direct_path_len),
|
||||
direct_path_hash_mode = COALESCE(
|
||||
excluded.direct_path_hash_mode, contacts.direct_path_hash_mode
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO contacts (public_key, name, type, flags, direct_path, direct_path_len,
|
||||
direct_path_hash_mode, direct_path_updated_at,
|
||||
route_override_path, route_override_len,
|
||||
route_override_hash_mode,
|
||||
last_advert, lat, lon, last_seen,
|
||||
on_radio, last_contacted, first_seen)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(public_key) DO UPDATE SET
|
||||
name = COALESCE(excluded.name, contacts.name),
|
||||
type = CASE WHEN excluded.type = 0 THEN contacts.type ELSE excluded.type END,
|
||||
flags = excluded.flags,
|
||||
direct_path = COALESCE(excluded.direct_path, contacts.direct_path),
|
||||
direct_path_len = COALESCE(excluded.direct_path_len, contacts.direct_path_len),
|
||||
direct_path_hash_mode = COALESCE(
|
||||
excluded.direct_path_hash_mode, contacts.direct_path_hash_mode
|
||||
),
|
||||
direct_path_updated_at = COALESCE(
|
||||
excluded.direct_path_updated_at, contacts.direct_path_updated_at
|
||||
),
|
||||
route_override_path = COALESCE(
|
||||
excluded.route_override_path, contacts.route_override_path
|
||||
),
|
||||
route_override_len = COALESCE(
|
||||
excluded.route_override_len, contacts.route_override_len
|
||||
),
|
||||
route_override_hash_mode = COALESCE(
|
||||
excluded.route_override_hash_mode, contacts.route_override_hash_mode
|
||||
),
|
||||
last_advert = COALESCE(excluded.last_advert, contacts.last_advert),
|
||||
lat = COALESCE(excluded.lat, contacts.lat),
|
||||
lon = COALESCE(excluded.lon, contacts.lon),
|
||||
last_seen = CASE
|
||||
WHEN excluded.last_seen IS NULL THEN contacts.last_seen
|
||||
WHEN contacts.last_seen IS NULL THEN excluded.last_seen
|
||||
WHEN excluded.last_seen > contacts.last_seen THEN excluded.last_seen
|
||||
ELSE contacts.last_seen
|
||||
END,
|
||||
on_radio = COALESCE(excluded.on_radio, contacts.on_radio),
|
||||
last_contacted = COALESCE(excluded.last_contacted, contacts.last_contacted),
|
||||
first_seen = COALESCE(contacts.first_seen, excluded.first_seen)
|
||||
""",
|
||||
(
|
||||
contact_row.public_key.lower(),
|
||||
contact_row.name,
|
||||
contact_row.type,
|
||||
contact_row.flags,
|
||||
direct_path,
|
||||
direct_path_len,
|
||||
direct_path_hash_mode,
|
||||
contact_row.direct_path_updated_at,
|
||||
route_override_path,
|
||||
route_override_len,
|
||||
route_override_hash_mode,
|
||||
contact_row.last_advert,
|
||||
contact_row.lat,
|
||||
contact_row.lon,
|
||||
contact_row.last_seen,
|
||||
contact_row.on_radio,
|
||||
contact_row.last_contacted,
|
||||
contact_row.first_seen,
|
||||
),
|
||||
direct_path_updated_at = COALESCE(
|
||||
excluded.direct_path_updated_at, contacts.direct_path_updated_at
|
||||
),
|
||||
route_override_path = COALESCE(
|
||||
excluded.route_override_path, contacts.route_override_path
|
||||
),
|
||||
route_override_len = COALESCE(
|
||||
excluded.route_override_len, contacts.route_override_len
|
||||
),
|
||||
route_override_hash_mode = COALESCE(
|
||||
excluded.route_override_hash_mode, contacts.route_override_hash_mode
|
||||
),
|
||||
last_advert = COALESCE(excluded.last_advert, contacts.last_advert),
|
||||
lat = COALESCE(excluded.lat, contacts.lat),
|
||||
lon = COALESCE(excluded.lon, contacts.lon),
|
||||
last_seen = excluded.last_seen,
|
||||
on_radio = COALESCE(excluded.on_radio, contacts.on_radio),
|
||||
last_contacted = COALESCE(excluded.last_contacted, contacts.last_contacted),
|
||||
first_seen = COALESCE(contacts.first_seen, excluded.first_seen)
|
||||
""",
|
||||
(
|
||||
contact_row.public_key.lower(),
|
||||
contact_row.name,
|
||||
contact_row.type,
|
||||
contact_row.flags,
|
||||
direct_path,
|
||||
direct_path_len,
|
||||
direct_path_hash_mode,
|
||||
contact_row.direct_path_updated_at,
|
||||
route_override_path,
|
||||
route_override_len,
|
||||
route_override_hash_mode,
|
||||
contact_row.last_advert,
|
||||
contact_row.lat,
|
||||
contact_row.lon,
|
||||
contact_row.last_seen if contact_row.last_seen is not None else int(time.time()),
|
||||
contact_row.on_radio,
|
||||
contact_row.last_contacted,
|
||||
contact_row.first_seen,
|
||||
),
|
||||
)
|
||||
await db.conn.commit()
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _row_to_contact(row) -> Contact:
|
||||
@@ -178,10 +184,11 @@ class ContactRepository:
|
||||
|
||||
@staticmethod
|
||||
async def get_by_key(public_key: str) -> Contact | None:
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM contacts WHERE public_key = ?", (public_key.lower(),)
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM contacts WHERE public_key = ?", (public_key.lower(),)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return ContactRepository._row_to_contact(row) if row else None
|
||||
|
||||
@staticmethod
|
||||
@@ -195,11 +202,12 @@ class ContactRepository:
|
||||
exact = await ContactRepository.get_by_key(normalized_prefix)
|
||||
if exact:
|
||||
return exact
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT 2",
|
||||
(f"{normalized_prefix}%",),
|
||||
)
|
||||
rows = list(await cursor.fetchall())
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT 2",
|
||||
(f"{normalized_prefix}%",),
|
||||
) as cursor:
|
||||
rows = list(await cursor.fetchall())
|
||||
if len(rows) != 1:
|
||||
return None
|
||||
return ContactRepository._row_to_contact(rows[0])
|
||||
@@ -207,11 +215,12 @@ class ContactRepository:
|
||||
@staticmethod
|
||||
async def _get_prefix_matches(prefix: str, limit: int = 2) -> list[Contact]:
|
||||
"""Get contacts matching a key prefix, up to limit."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT ?",
|
||||
(f"{prefix.lower()}%", limit),
|
||||
)
|
||||
rows = list(await cursor.fetchall())
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT ?",
|
||||
(f"{prefix.lower()}%", limit),
|
||||
) as cursor:
|
||||
rows = list(await cursor.fetchall())
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
@@ -237,8 +246,9 @@ class ContactRepository:
|
||||
@staticmethod
|
||||
async def get_by_name(name: str) -> list[Contact]:
|
||||
"""Get all contacts with the given exact name."""
|
||||
cursor = await db.conn.execute("SELECT * FROM contacts WHERE name = ?", (name,))
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute("SELECT * FROM contacts WHERE name = ?", (name,)) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
@@ -254,8 +264,9 @@ class ContactRepository:
|
||||
normalized = [p.lower() for p in prefixes]
|
||||
conditions = " OR ".join(["public_key LIKE ?"] * len(normalized))
|
||||
params = [f"{p}%" for p in normalized]
|
||||
cursor = await db.conn.execute(f"SELECT * FROM contacts WHERE {conditions}", params)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(f"SELECT * FROM contacts WHERE {conditions}", params) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
# Group by which prefix each row matches
|
||||
prefix_to_rows: dict[str, list] = {p: [] for p in normalized}
|
||||
for row in rows:
|
||||
@@ -272,41 +283,67 @@ class ContactRepository:
|
||||
|
||||
@staticmethod
|
||||
async def get_all(limit: int = 100, offset: int = 0) -> list[Contact]:
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM contacts ORDER BY COALESCE(name, public_key) LIMIT ? OFFSET ?",
|
||||
(limit, offset),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM contacts ORDER BY COALESCE(name, public_key) LIMIT ? OFFSET ?",
|
||||
(limit, offset),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def get_recently_contacted_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||
"""Get recently interacted-with non-repeater contacts."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT * FROM contacts
|
||||
WHERE type != 2 AND last_contacted IS NOT NULL AND length(public_key) = 64
|
||||
ORDER BY last_contacted DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT * FROM contacts
|
||||
WHERE type != 2 AND last_contacted IS NOT NULL AND length(public_key) = 64
|
||||
ORDER BY last_contacted DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def get_recently_dm_active_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||
"""Get non-repeater contacts with the most recent DM activity (sent or received)."""
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT c.*
|
||||
FROM contacts c
|
||||
INNER JOIN (
|
||||
SELECT conversation_key, MAX(received_at) AS last_dm
|
||||
FROM messages
|
||||
WHERE type = 'PRIV'
|
||||
GROUP BY conversation_key
|
||||
) m ON c.public_key = m.conversation_key
|
||||
WHERE c.type != 2 AND length(c.public_key) = 64
|
||||
ORDER BY m.last_dm DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def get_recently_advertised_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||
"""Get recently advert-heard non-repeater contacts."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT * FROM contacts
|
||||
WHERE type != 2 AND last_advert IS NOT NULL AND length(public_key) = 64
|
||||
ORDER BY last_advert DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT * FROM contacts
|
||||
WHERE type != 2 AND last_advert IS NOT NULL AND length(public_key) = 64
|
||||
ORDER BY last_advert DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
@@ -317,27 +354,44 @@ class ContactRepository:
|
||||
path_hash_mode: int | None = None,
|
||||
updated_at: int | None = None,
|
||||
) -> None:
|
||||
"""Persist a learned direct route for a contact.
|
||||
|
||||
Both callers (the RF PATH packet processor and the firmware PATH_UPDATE
|
||||
event handler) are RF-backed: firmware ``onContactPathUpdated`` only
|
||||
fires from ``onContactPathRecv`` during RF PATH packet reception. So
|
||||
this method also advances ``last_seen`` monotonically. Never moves
|
||||
``last_seen`` backwards if an out-of-order arrival lands with an older
|
||||
timestamp.
|
||||
"""
|
||||
normalized_path, normalized_path_len, normalized_hash_mode = normalize_contact_route(
|
||||
path,
|
||||
path_len,
|
||||
path_hash_mode,
|
||||
)
|
||||
ts = updated_at if updated_at is not None else int(time.time())
|
||||
await db.conn.execute(
|
||||
"""UPDATE contacts SET direct_path = ?, direct_path_len = ?,
|
||||
direct_path_hash_mode = COALESCE(?, direct_path_hash_mode),
|
||||
direct_path_updated_at = ?,
|
||||
last_seen = ? WHERE public_key = ?""",
|
||||
(
|
||||
normalized_path,
|
||||
normalized_path_len,
|
||||
normalized_hash_mode,
|
||||
ts,
|
||||
ts,
|
||||
public_key.lower(),
|
||||
),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""UPDATE contacts SET direct_path = ?, direct_path_len = ?,
|
||||
direct_path_hash_mode = COALESCE(?, direct_path_hash_mode),
|
||||
direct_path_updated_at = ?,
|
||||
last_seen = CASE
|
||||
WHEN last_seen IS NULL THEN ?
|
||||
WHEN ? > last_seen THEN ?
|
||||
ELSE last_seen
|
||||
END
|
||||
WHERE public_key = ?""",
|
||||
(
|
||||
normalized_path,
|
||||
normalized_path_len,
|
||||
normalized_hash_mode,
|
||||
ts,
|
||||
ts,
|
||||
ts,
|
||||
ts,
|
||||
public_key.lower(),
|
||||
),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def set_routing_override(
|
||||
@@ -351,65 +405,71 @@ class ContactRepository:
|
||||
path_len,
|
||||
path_hash_mode,
|
||||
)
|
||||
await db.conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET route_override_path = ?, route_override_len = ?, route_override_hash_mode = ?
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(
|
||||
normalized_path,
|
||||
normalized_len,
|
||||
normalized_hash_mode,
|
||||
public_key.lower(),
|
||||
),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET route_override_path = ?, route_override_len = ?, route_override_hash_mode = ?
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(
|
||||
normalized_path,
|
||||
normalized_len,
|
||||
normalized_hash_mode,
|
||||
public_key.lower(),
|
||||
),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def clear_routing_override(public_key: str) -> None:
|
||||
await db.conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET route_override_path = NULL,
|
||||
route_override_len = NULL,
|
||||
route_override_hash_mode = NULL
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(public_key.lower(),),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET route_override_path = NULL,
|
||||
route_override_len = NULL,
|
||||
route_override_hash_mode = NULL
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(public_key.lower(),),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def clear_on_radio_except(keep_keys: list[str]) -> None:
|
||||
"""Set on_radio=False for all contacts NOT in keep_keys."""
|
||||
if not keep_keys:
|
||||
await db.conn.execute("UPDATE contacts SET on_radio = 0 WHERE on_radio = 1")
|
||||
else:
|
||||
placeholders = ",".join("?" * len(keep_keys))
|
||||
await db.conn.execute(
|
||||
f"UPDATE contacts SET on_radio = 0 WHERE on_radio = 1 AND public_key NOT IN ({placeholders})",
|
||||
keep_keys,
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
if not keep_keys:
|
||||
async with conn.execute("UPDATE contacts SET on_radio = 0 WHERE on_radio = 1"):
|
||||
pass
|
||||
else:
|
||||
placeholders = ",".join("?" * len(keep_keys))
|
||||
async with conn.execute(
|
||||
f"UPDATE contacts SET on_radio = 0 WHERE on_radio = 1 AND public_key NOT IN ({placeholders})",
|
||||
keep_keys,
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_favorites() -> list[Contact]:
|
||||
"""Return all contacts marked as favorite."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM contacts WHERE favorite = 1 AND LENGTH(public_key) = 64"
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM contacts WHERE favorite = 1 AND LENGTH(public_key) = 64"
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def set_favorite(public_key: str, value: bool) -> None:
|
||||
"""Set or clear the favorite flag for a contact."""
|
||||
await db.conn.execute(
|
||||
"UPDATE contacts SET favorite = ? WHERE public_key = ?",
|
||||
(1 if value else 0, public_key.lower()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE contacts SET favorite = ? WHERE public_key = ?",
|
||||
(1 if value else 0, public_key.lower()),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def delete(public_key: str) -> None:
|
||||
@@ -417,18 +477,53 @@ class ContactRepository:
|
||||
# contact_name_history and contact_advert_paths cascade via FK.
|
||||
# Messages are intentionally preserved so history re-surfaces
|
||||
# if the contact is re-added later.
|
||||
await db.conn.execute("DELETE FROM contacts WHERE public_key = ?", (normalized,))
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute("DELETE FROM contacts WHERE public_key = ?", (normalized,)):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def update_last_contacted(public_key: str, timestamp: int | None = None) -> None:
|
||||
"""Update the last_contacted timestamp for a contact."""
|
||||
"""Update the last_contacted timestamp for a contact.
|
||||
|
||||
``last_contacted`` tracks the most recent direct-conversation activity
|
||||
with this contact in either direction (incoming or outgoing DM). It is
|
||||
the field that powers "recent conversations" ordering on the frontend.
|
||||
|
||||
It deliberately does not touch ``last_seen``: ``last_seen`` is reserved
|
||||
for actual RF reception from the contact, and outgoing sends are not
|
||||
evidence that we heard from them. RF observations from DM ingest update
|
||||
``last_seen`` via :meth:`touch_last_seen` on incoming DMs only.
|
||||
"""
|
||||
ts = timestamp if timestamp is not None else int(time.time())
|
||||
await db.conn.execute(
|
||||
"UPDATE contacts SET last_contacted = ?, last_seen = ? WHERE public_key = ?",
|
||||
(ts, ts, public_key.lower()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE contacts SET last_contacted = ? WHERE public_key = ?",
|
||||
(ts, public_key.lower()),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def touch_last_seen(public_key: str, timestamp: int) -> None:
|
||||
"""Monotonically bump last_seen for a contact from an RF observation.
|
||||
|
||||
Never moves last_seen backwards; a no-op if the contact row does not
|
||||
exist. Use this from packet-ingest paths that have attributed a packet
|
||||
to a specific contact pubkey (advert, incoming DM, decrypted PATH, etc.).
|
||||
"""
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET last_seen = CASE
|
||||
WHEN last_seen IS NULL THEN ?
|
||||
WHEN ? > last_seen THEN ?
|
||||
ELSE last_seen
|
||||
END
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(timestamp, timestamp, timestamp, public_key.lower()),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def update_last_read_at(public_key: str, timestamp: int | None = None) -> bool:
|
||||
@@ -437,22 +532,25 @@ class ContactRepository:
|
||||
Returns True if a row was updated, False if contact not found.
|
||||
"""
|
||||
ts = timestamp if timestamp is not None else int(time.time())
|
||||
cursor = await db.conn.execute(
|
||||
"UPDATE contacts SET last_read_at = ? WHERE public_key = ?",
|
||||
(ts, public_key.lower()),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE contacts SET last_read_at = ? WHERE public_key = ?",
|
||||
(ts, public_key.lower()),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def promote_prefix_placeholders(full_key: str) -> list[str]:
|
||||
"""Promote prefix-only placeholder contacts to a resolved full key.
|
||||
|
||||
Returns the placeholder public keys that were merged into the full key.
|
||||
All operations for the promotion happen inside one ``db.tx()`` so
|
||||
partial promotions never leak to readers between steps.
|
||||
"""
|
||||
|
||||
async def migrate_child_rows(old_key: str, new_key: str) -> None:
|
||||
await db.conn.execute(
|
||||
async def migrate_child_rows(conn, old_key: str, new_key: str) -> None:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
||||
SELECT ?, name, first_seen, last_seen
|
||||
@@ -463,8 +561,9 @@ class ContactRepository:
|
||||
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
||||
""",
|
||||
(new_key, old_key),
|
||||
)
|
||||
await db.conn.execute(
|
||||
):
|
||||
pass
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO contact_advert_paths
|
||||
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
||||
@@ -477,132 +576,138 @@ class ContactRepository:
|
||||
heard_count = contact_advert_paths.heard_count + excluded.heard_count
|
||||
""",
|
||||
(new_key, old_key),
|
||||
)
|
||||
await db.conn.execute(
|
||||
):
|
||||
pass
|
||||
async with conn.execute(
|
||||
"DELETE FROM contact_name_history WHERE public_key = ?",
|
||||
(old_key,),
|
||||
)
|
||||
await db.conn.execute(
|
||||
):
|
||||
pass
|
||||
async with conn.execute(
|
||||
"DELETE FROM contact_advert_paths WHERE public_key = ?",
|
||||
(old_key,),
|
||||
)
|
||||
):
|
||||
pass
|
||||
|
||||
normalized_full_key = full_key.lower()
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT public_key, last_seen, last_contacted, first_seen, last_read_at
|
||||
FROM contacts
|
||||
WHERE length(public_key) < 64
|
||||
AND ? LIKE public_key || '%'
|
||||
ORDER BY length(public_key) DESC, public_key
|
||||
""",
|
||||
(normalized_full_key,),
|
||||
)
|
||||
rows = list(await cursor.fetchall())
|
||||
if not rows:
|
||||
return []
|
||||
|
||||
promoted_keys: list[str] = []
|
||||
|
||||
for row in rows:
|
||||
old_key = row["public_key"]
|
||||
if old_key == normalized_full_key:
|
||||
continue
|
||||
|
||||
match_cursor = await db.conn.execute(
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS match_count
|
||||
SELECT public_key, last_seen, last_contacted, first_seen, last_read_at
|
||||
FROM contacts
|
||||
WHERE length(public_key) = 64
|
||||
AND public_key LIKE ? || '%'
|
||||
WHERE length(public_key) < 64
|
||||
AND ? LIKE public_key || '%'
|
||||
ORDER BY length(public_key) DESC, public_key
|
||||
""",
|
||||
(old_key,),
|
||||
)
|
||||
match_row = await match_cursor.fetchone()
|
||||
match_count = match_row["match_count"] if match_row is not None else 0
|
||||
if match_count != 1:
|
||||
logger.warning(
|
||||
"Skipping prefix promotion for %s: %d full-key contacts match (expected 1)",
|
||||
old_key,
|
||||
match_count,
|
||||
)
|
||||
continue
|
||||
(normalized_full_key,),
|
||||
) as cursor:
|
||||
rows = list(await cursor.fetchall())
|
||||
if not rows:
|
||||
return []
|
||||
|
||||
await migrate_child_rows(old_key, normalized_full_key)
|
||||
for row in rows:
|
||||
old_key = row["public_key"]
|
||||
if old_key == normalized_full_key:
|
||||
continue
|
||||
|
||||
# Merge timestamp metadata from the old prefix contact into the
|
||||
# full-key contact (which all callers guarantee already exists),
|
||||
# then delete the prefix placeholder.
|
||||
await db.conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET last_seen = CASE
|
||||
WHEN contacts.last_seen IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.last_seen
|
||||
WHEN ? > contacts.last_seen THEN ?
|
||||
ELSE contacts.last_seen
|
||||
END,
|
||||
last_contacted = CASE
|
||||
WHEN contacts.last_contacted IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.last_contacted
|
||||
WHEN ? > contacts.last_contacted THEN ?
|
||||
ELSE contacts.last_contacted
|
||||
END,
|
||||
first_seen = CASE
|
||||
WHEN contacts.first_seen IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.first_seen
|
||||
WHEN ? < contacts.first_seen THEN ?
|
||||
ELSE contacts.first_seen
|
||||
END,
|
||||
last_read_at = CASE
|
||||
WHEN contacts.last_read_at IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.last_read_at
|
||||
WHEN ? > contacts.last_read_at THEN ?
|
||||
ELSE contacts.last_read_at
|
||||
END
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(
|
||||
row["last_seen"],
|
||||
row["last_seen"],
|
||||
row["last_seen"],
|
||||
row["last_seen"],
|
||||
row["last_contacted"],
|
||||
row["last_contacted"],
|
||||
row["last_contacted"],
|
||||
row["last_contacted"],
|
||||
row["first_seen"],
|
||||
row["first_seen"],
|
||||
row["first_seen"],
|
||||
row["first_seen"],
|
||||
row["last_read_at"],
|
||||
row["last_read_at"],
|
||||
row["last_read_at"],
|
||||
row["last_read_at"],
|
||||
normalized_full_key,
|
||||
),
|
||||
)
|
||||
await db.conn.execute("DELETE FROM contacts WHERE public_key = ?", (old_key,))
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS match_count
|
||||
FROM contacts
|
||||
WHERE length(public_key) = 64
|
||||
AND public_key LIKE ? || '%'
|
||||
""",
|
||||
(old_key,),
|
||||
) as match_cursor:
|
||||
match_row = await match_cursor.fetchone()
|
||||
match_count = match_row["match_count"] if match_row is not None else 0
|
||||
if match_count != 1:
|
||||
logger.warning(
|
||||
"Skipping prefix promotion for %s: %d full-key contacts match (expected 1)",
|
||||
old_key,
|
||||
match_count,
|
||||
)
|
||||
continue
|
||||
|
||||
promoted_keys.append(old_key)
|
||||
await migrate_child_rows(conn, old_key, normalized_full_key)
|
||||
|
||||
# Merge timestamp metadata from the old prefix contact into the
|
||||
# full-key contact (which all callers guarantee already exists),
|
||||
# then delete the prefix placeholder.
|
||||
async with conn.execute(
|
||||
"""
|
||||
UPDATE contacts
|
||||
SET last_seen = CASE
|
||||
WHEN contacts.last_seen IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.last_seen
|
||||
WHEN ? > contacts.last_seen THEN ?
|
||||
ELSE contacts.last_seen
|
||||
END,
|
||||
last_contacted = CASE
|
||||
WHEN contacts.last_contacted IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.last_contacted
|
||||
WHEN ? > contacts.last_contacted THEN ?
|
||||
ELSE contacts.last_contacted
|
||||
END,
|
||||
first_seen = CASE
|
||||
WHEN contacts.first_seen IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.first_seen
|
||||
WHEN ? < contacts.first_seen THEN ?
|
||||
ELSE contacts.first_seen
|
||||
END,
|
||||
last_read_at = CASE
|
||||
WHEN contacts.last_read_at IS NULL THEN ?
|
||||
WHEN ? IS NULL THEN contacts.last_read_at
|
||||
WHEN ? > contacts.last_read_at THEN ?
|
||||
ELSE contacts.last_read_at
|
||||
END
|
||||
WHERE public_key = ?
|
||||
""",
|
||||
(
|
||||
row["last_seen"],
|
||||
row["last_seen"],
|
||||
row["last_seen"],
|
||||
row["last_seen"],
|
||||
row["last_contacted"],
|
||||
row["last_contacted"],
|
||||
row["last_contacted"],
|
||||
row["last_contacted"],
|
||||
row["first_seen"],
|
||||
row["first_seen"],
|
||||
row["first_seen"],
|
||||
row["first_seen"],
|
||||
row["last_read_at"],
|
||||
row["last_read_at"],
|
||||
row["last_read_at"],
|
||||
row["last_read_at"],
|
||||
normalized_full_key,
|
||||
),
|
||||
):
|
||||
pass
|
||||
async with conn.execute("DELETE FROM contacts WHERE public_key = ?", (old_key,)):
|
||||
pass
|
||||
|
||||
promoted_keys.append(old_key)
|
||||
|
||||
await db.conn.commit()
|
||||
return promoted_keys
|
||||
|
||||
@staticmethod
|
||||
async def mark_all_read(timestamp: int) -> None:
|
||||
"""Mark all contacts as read at the given timestamp."""
|
||||
await db.conn.execute("UPDATE contacts SET last_read_at = ?", (timestamp,))
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute("UPDATE contacts SET last_read_at = ?", (timestamp,)):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_by_pubkey_first_byte(hex_byte: str) -> list[Contact]:
|
||||
"""Get contacts whose public key starts with the given hex byte (2 chars)."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM contacts WHERE substr(public_key, 1, 2) = ?",
|
||||
(hex_byte.lower(),),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM contacts WHERE substr(public_key, 1, 2) = ?",
|
||||
(hex_byte.lower(),),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||
|
||||
|
||||
@@ -641,71 +746,75 @@ class ContactAdvertPathRepository:
|
||||
normalized_path = path_hex.lower()
|
||||
path_len = hop_count if hop_count is not None else len(normalized_path) // 2
|
||||
|
||||
await db.conn.execute(
|
||||
"""
|
||||
INSERT INTO contact_advert_paths
|
||||
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
||||
VALUES (?, ?, ?, ?, ?, 1)
|
||||
ON CONFLICT(public_key, path_hex, path_len) DO UPDATE SET
|
||||
last_seen = MAX(contact_advert_paths.last_seen, excluded.last_seen),
|
||||
heard_count = contact_advert_paths.heard_count + 1
|
||||
""",
|
||||
(normalized_key, normalized_path, path_len, timestamp, timestamp),
|
||||
)
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO contact_advert_paths
|
||||
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
||||
VALUES (?, ?, ?, ?, ?, 1)
|
||||
ON CONFLICT(public_key, path_hex, path_len) DO UPDATE SET
|
||||
last_seen = MAX(contact_advert_paths.last_seen, excluded.last_seen),
|
||||
heard_count = contact_advert_paths.heard_count + 1
|
||||
""",
|
||||
(normalized_key, normalized_path, path_len, timestamp, timestamp),
|
||||
):
|
||||
pass
|
||||
|
||||
# Keep only the N most recent unique paths per contact.
|
||||
await db.conn.execute(
|
||||
"""
|
||||
DELETE FROM contact_advert_paths
|
||||
WHERE public_key = ?
|
||||
AND id NOT IN (
|
||||
SELECT id
|
||||
FROM contact_advert_paths
|
||||
WHERE public_key = ?
|
||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
LIMIT ?
|
||||
)
|
||||
""",
|
||||
(normalized_key, normalized_key, max_paths),
|
||||
)
|
||||
await db.conn.commit()
|
||||
# Keep only the N most recent unique paths per contact.
|
||||
async with conn.execute(
|
||||
"""
|
||||
DELETE FROM contact_advert_paths
|
||||
WHERE public_key = ?
|
||||
AND id NOT IN (
|
||||
SELECT id
|
||||
FROM contact_advert_paths
|
||||
WHERE public_key = ?
|
||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
LIMIT ?
|
||||
)
|
||||
""",
|
||||
(normalized_key, normalized_key, max_paths),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_recent_for_contact(public_key: str, limit: int = 10) -> list[ContactAdvertPath]:
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT path_hex, path_len, first_seen, last_seen, heard_count
|
||||
FROM contact_advert_paths
|
||||
WHERE public_key = ?
|
||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
LIMIT ?
|
||||
""",
|
||||
(public_key.lower(), limit),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT path_hex, path_len, first_seen, last_seen, heard_count
|
||||
FROM contact_advert_paths
|
||||
WHERE public_key = ?
|
||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
LIMIT ?
|
||||
""",
|
||||
(public_key.lower(), limit),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [ContactAdvertPathRepository._row_to_path(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def get_recent_for_all_contacts(
|
||||
limit_per_contact: int = 10,
|
||||
) -> list[ContactAdvertPathSummary]:
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT public_key, path_hex, path_len, first_seen, last_seen, heard_count
|
||||
FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY public_key
|
||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
) AS rn
|
||||
FROM contact_advert_paths
|
||||
)
|
||||
WHERE rn <= ?
|
||||
ORDER BY public_key ASC, last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
""",
|
||||
(limit_per_contact,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT public_key, path_hex, path_len, first_seen, last_seen, heard_count
|
||||
FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY public_key
|
||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
) AS rn
|
||||
FROM contact_advert_paths
|
||||
)
|
||||
WHERE rn <= ?
|
||||
ORDER BY public_key ASC, last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||
""",
|
||||
(limit_per_contact,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
grouped: dict[str, list[ContactAdvertPath]] = {}
|
||||
for row in rows:
|
||||
@@ -727,29 +836,31 @@ class ContactNameHistoryRepository:
|
||||
@staticmethod
|
||||
async def record_name(public_key: str, name: str, timestamp: int) -> None:
|
||||
"""Record a name observation. Upserts: updates last_seen if name already known."""
|
||||
await db.conn.execute(
|
||||
"""
|
||||
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(public_key, name) DO UPDATE SET
|
||||
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
||||
""",
|
||||
(public_key.lower(), name, timestamp, timestamp),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(public_key, name) DO UPDATE SET
|
||||
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
||||
""",
|
||||
(public_key.lower(), name, timestamp, timestamp),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_history(public_key: str) -> list[ContactNameHistory]:
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT name, first_seen, last_seen
|
||||
FROM contact_name_history
|
||||
WHERE public_key = ?
|
||||
ORDER BY last_seen DESC
|
||||
""",
|
||||
(public_key.lower(),),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT name, first_seen, last_seen
|
||||
FROM contact_name_history
|
||||
WHERE public_key = ?
|
||||
ORDER BY last_seen DESC
|
||||
""",
|
||||
(public_key.lower(),),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [
|
||||
ContactNameHistory(
|
||||
name=row["name"], first_seen=row["first_seen"], last_seen=row["last_seen"]
|
||||
|
||||
+61
-44
@@ -6,6 +6,8 @@ import time
|
||||
import uuid
|
||||
from typing import Any
|
||||
|
||||
import aiosqlite
|
||||
|
||||
from app.database import db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -31,26 +33,37 @@ def _row_to_dict(row: Any) -> dict[str, Any]:
|
||||
return result
|
||||
|
||||
|
||||
async def _get_in_conn(conn: aiosqlite.Connection, config_id: str) -> dict[str, Any] | None:
|
||||
"""Fetch a config using an already-acquired connection.
|
||||
|
||||
Used by ``create`` and ``update`` to return the freshly-written row
|
||||
without re-entering the non-reentrant DB lock.
|
||||
"""
|
||||
async with conn.execute("SELECT * FROM fanout_configs WHERE id = ?", (config_id,)) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
class FanoutConfigRepository:
|
||||
"""CRUD operations for fanout_configs table."""
|
||||
|
||||
@staticmethod
|
||||
async def get_all() -> list[dict[str, Any]]:
|
||||
"""Get all fanout configs ordered by sort_order."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM fanout_configs ORDER BY sort_order, created_at"
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM fanout_configs ORDER BY sort_order, created_at"
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [_row_to_dict(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def get(config_id: str) -> dict[str, Any] | None:
|
||||
"""Get a single fanout config by ID."""
|
||||
cursor = await db.conn.execute("SELECT * FROM fanout_configs WHERE id = ?", (config_id,))
|
||||
row = await cursor.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return _row_to_dict(row)
|
||||
async with db.readonly() as conn:
|
||||
return await _get_in_conn(conn, config_id)
|
||||
|
||||
@staticmethod
|
||||
async def create(
|
||||
@@ -65,39 +78,41 @@ class FanoutConfigRepository:
|
||||
new_id = config_id or str(uuid.uuid4())
|
||||
now = int(time.time())
|
||||
|
||||
# Get next sort_order
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT COALESCE(MAX(sort_order), -1) + 1 FROM fanout_configs"
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
sort_order = row[0] if row else 0
|
||||
async with db.tx() as conn:
|
||||
# Determine next sort_order under the same lock as the insert,
|
||||
# so two concurrent ``create()`` calls cannot collide.
|
||||
async with conn.execute(
|
||||
"SELECT COALESCE(MAX(sort_order), -1) + 1 FROM fanout_configs"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
sort_order = row[0] if row else 0
|
||||
|
||||
await db.conn.execute(
|
||||
"""
|
||||
INSERT INTO fanout_configs (id, type, name, enabled, config, scope, sort_order, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
new_id,
|
||||
config_type,
|
||||
name,
|
||||
1 if enabled else 0,
|
||||
json.dumps(config),
|
||||
json.dumps(scope),
|
||||
sort_order,
|
||||
now,
|
||||
),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO fanout_configs (id, type, name, enabled, config, scope, sort_order, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
new_id,
|
||||
config_type,
|
||||
name,
|
||||
1 if enabled else 0,
|
||||
json.dumps(config),
|
||||
json.dumps(scope),
|
||||
sort_order,
|
||||
now,
|
||||
),
|
||||
):
|
||||
pass
|
||||
|
||||
result = await FanoutConfigRepository.get(new_id)
|
||||
result = await _get_in_conn(conn, new_id)
|
||||
assert result is not None
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
async def update(config_id: str, **fields: Any) -> dict[str, Any] | None:
|
||||
"""Update a fanout config. Only provided fields are updated."""
|
||||
updates = []
|
||||
updates: list[str] = []
|
||||
params: list[Any] = []
|
||||
|
||||
for field in ("name", "enabled", "config", "scope", "sort_order"):
|
||||
@@ -115,23 +130,25 @@ class FanoutConfigRepository:
|
||||
|
||||
params.append(config_id)
|
||||
query = f"UPDATE fanout_configs SET {', '.join(updates)} WHERE id = ?"
|
||||
await db.conn.execute(query, params)
|
||||
await db.conn.commit()
|
||||
|
||||
return await FanoutConfigRepository.get(config_id)
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(query, params):
|
||||
pass
|
||||
return await _get_in_conn(conn, config_id)
|
||||
|
||||
@staticmethod
|
||||
async def delete(config_id: str) -> None:
|
||||
"""Delete a fanout config."""
|
||||
await db.conn.execute("DELETE FROM fanout_configs WHERE id = ?", (config_id,))
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute("DELETE FROM fanout_configs WHERE id = ?", (config_id,)):
|
||||
pass
|
||||
_configs_cache.pop(config_id, None)
|
||||
|
||||
@staticmethod
|
||||
async def get_enabled() -> list[dict[str, Any]]:
|
||||
"""Get all enabled fanout configs."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT * FROM fanout_configs WHERE enabled = 1 ORDER BY sort_order, created_at"
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM fanout_configs WHERE enabled = 1 ORDER BY sort_order, created_at"
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [_row_to_dict(row) for row in rows]
|
||||
|
||||
+391
-346
@@ -89,32 +89,34 @@ class MessageRepository:
|
||||
# Normalize sender_key to lowercase so queries can match without LOWER().
|
||||
normalized_sender_key = sender_key.lower() if sender_key else sender_key
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp,
|
||||
received_at, paths, txt_type, signature, outgoing,
|
||||
sender_name, sender_key)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
msg_type,
|
||||
conversation_key,
|
||||
text,
|
||||
sender_timestamp,
|
||||
received_at,
|
||||
paths_json,
|
||||
txt_type,
|
||||
signature,
|
||||
outgoing,
|
||||
sender_name,
|
||||
normalized_sender_key,
|
||||
),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp,
|
||||
received_at, paths, txt_type, signature, outgoing,
|
||||
sender_name, sender_key)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
msg_type,
|
||||
conversation_key,
|
||||
text,
|
||||
sender_timestamp,
|
||||
received_at,
|
||||
paths_json,
|
||||
txt_type,
|
||||
signature,
|
||||
outgoing,
|
||||
sender_name,
|
||||
normalized_sender_key,
|
||||
),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
lastrowid = cursor.lastrowid
|
||||
# rowcount is 0 if INSERT was ignored due to UNIQUE constraint violation
|
||||
if cursor.rowcount == 0:
|
||||
if rowcount == 0:
|
||||
return None
|
||||
return cursor.lastrowid
|
||||
return lastrowid
|
||||
|
||||
@staticmethod
|
||||
async def add_path(
|
||||
@@ -142,17 +144,20 @@ class MessageRepository:
|
||||
if snr is not None:
|
||||
entry["snr"] = snr
|
||||
new_entry = json.dumps(entry)
|
||||
await db.conn.execute(
|
||||
"""UPDATE messages SET paths = json_insert(
|
||||
COALESCE(paths, '[]'), '$[#]', json(?)
|
||||
) WHERE id = ?""",
|
||||
(new_entry, message_id),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""UPDATE messages SET paths = json_insert(
|
||||
COALESCE(paths, '[]'), '$[#]', json(?)
|
||||
) WHERE id = ?""",
|
||||
(new_entry, message_id),
|
||||
):
|
||||
pass
|
||||
|
||||
# Read back the full list for the return value
|
||||
cursor = await db.conn.execute("SELECT paths FROM messages WHERE id = ?", (message_id,))
|
||||
row = await cursor.fetchone()
|
||||
# Read back the full list for the return value, same transaction.
|
||||
async with conn.execute(
|
||||
"SELECT paths FROM messages WHERE id = ?", (message_id,)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if not row or not row["paths"]:
|
||||
return []
|
||||
|
||||
@@ -171,23 +176,24 @@ class MessageRepository:
|
||||
only a prefix as conversation_key are updated to use the full key.
|
||||
"""
|
||||
lower_key = full_key.lower()
|
||||
cursor = await db.conn.execute(
|
||||
"""UPDATE messages SET conversation_key = ?,
|
||||
sender_key = CASE
|
||||
WHEN sender_key IS NOT NULL AND length(sender_key) < 64
|
||||
AND ? LIKE sender_key || '%'
|
||||
THEN ? ELSE sender_key END
|
||||
WHERE type = 'PRIV' AND length(conversation_key) < 64
|
||||
AND ? LIKE conversation_key || '%'
|
||||
AND (
|
||||
SELECT COUNT(*) FROM contacts
|
||||
WHERE length(public_key) = 64
|
||||
AND public_key LIKE messages.conversation_key || '%'
|
||||
) = 1""",
|
||||
(lower_key, lower_key, lower_key, lower_key),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""UPDATE messages SET conversation_key = ?,
|
||||
sender_key = CASE
|
||||
WHEN sender_key IS NOT NULL AND length(sender_key) < 64
|
||||
AND ? LIKE sender_key || '%'
|
||||
THEN ? ELSE sender_key END
|
||||
WHERE type = 'PRIV' AND length(conversation_key) < 64
|
||||
AND ? LIKE conversation_key || '%'
|
||||
AND (
|
||||
SELECT COUNT(*) FROM contacts
|
||||
WHERE length(public_key) = 64
|
||||
AND public_key LIKE messages.conversation_key || '%'
|
||||
) = 1""",
|
||||
(lower_key, lower_key, lower_key, lower_key),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount
|
||||
|
||||
@staticmethod
|
||||
async def backfill_channel_sender_key(public_key: str, name: str) -> int:
|
||||
@@ -197,21 +203,22 @@ class MessageRepository:
|
||||
any channel messages with a matching sender_name but no sender_key
|
||||
are updated to associate them with this contact's public key.
|
||||
"""
|
||||
cursor = await db.conn.execute(
|
||||
"""UPDATE messages SET sender_key = ?
|
||||
WHERE type = 'CHAN' AND sender_name = ? AND sender_key IS NULL
|
||||
AND (
|
||||
SELECT COUNT(*) FROM contacts
|
||||
WHERE name = ?
|
||||
) = 1
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM contacts
|
||||
WHERE public_key = ? AND name = ?
|
||||
)""",
|
||||
(public_key.lower(), name, name, public_key.lower(), name),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""UPDATE messages SET sender_key = ?
|
||||
WHERE type = 'CHAN' AND sender_name = ? AND sender_key IS NULL
|
||||
AND (
|
||||
SELECT COUNT(*) FROM contacts
|
||||
WHERE name = ?
|
||||
) = 1
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM contacts
|
||||
WHERE public_key = ? AND name = ?
|
||||
)""",
|
||||
(public_key.lower(), name, name, public_key.lower(), name),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount
|
||||
|
||||
@staticmethod
|
||||
def _normalize_conversation_key(conversation_key: str) -> tuple[str, str]:
|
||||
@@ -462,8 +469,9 @@ class MessageRepository:
|
||||
query += " OFFSET ?"
|
||||
params.append(offset)
|
||||
|
||||
cursor = await db.conn.execute(query, params)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(query, params) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [MessageRepository._row_to_message(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
@@ -501,51 +509,54 @@ class MessageRepository:
|
||||
where_sql = " AND ".join(["1=1", *where_parts])
|
||||
|
||||
# 1. Get the target message (must satisfy filters if provided)
|
||||
target_cursor = await db.conn.execute(
|
||||
f"SELECT {MessageRepository._message_select('messages')} "
|
||||
f"FROM messages WHERE id = ? AND {where_sql}",
|
||||
(message_id, *base_params),
|
||||
)
|
||||
target_row = await target_cursor.fetchone()
|
||||
if not target_row:
|
||||
return [], False, False
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
f"SELECT {MessageRepository._message_select('messages')} "
|
||||
f"FROM messages WHERE id = ? AND {where_sql}",
|
||||
(message_id, *base_params),
|
||||
) as target_cursor:
|
||||
target_row = await target_cursor.fetchone()
|
||||
if not target_row:
|
||||
return [], False, False
|
||||
|
||||
target = MessageRepository._row_to_message(target_row)
|
||||
target = MessageRepository._row_to_message(target_row)
|
||||
|
||||
# 2. Get context_size+1 messages before target (DESC)
|
||||
before_query = f"""
|
||||
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
||||
AND (received_at < ? OR (received_at = ? AND id < ?))
|
||||
ORDER BY received_at DESC, id DESC LIMIT ?
|
||||
"""
|
||||
before_params = [
|
||||
*base_params,
|
||||
target.received_at,
|
||||
target.received_at,
|
||||
target.id,
|
||||
context_size + 1,
|
||||
]
|
||||
before_cursor = await db.conn.execute(before_query, before_params)
|
||||
before_rows = list(await before_cursor.fetchall())
|
||||
# 2. Get context_size+1 messages before target (DESC)
|
||||
before_query = f"""
|
||||
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
||||
AND (received_at < ? OR (received_at = ? AND id < ?))
|
||||
ORDER BY received_at DESC, id DESC LIMIT ?
|
||||
"""
|
||||
before_params = [
|
||||
*base_params,
|
||||
target.received_at,
|
||||
target.received_at,
|
||||
target.id,
|
||||
context_size + 1,
|
||||
]
|
||||
async with conn.execute(before_query, before_params) as before_cursor:
|
||||
before_rows = list(await before_cursor.fetchall())
|
||||
|
||||
has_older = len(before_rows) > context_size
|
||||
before_messages = [MessageRepository._row_to_message(r) for r in before_rows[:context_size]]
|
||||
has_older = len(before_rows) > context_size
|
||||
before_messages = [
|
||||
MessageRepository._row_to_message(r) for r in before_rows[:context_size]
|
||||
]
|
||||
|
||||
# 3. Get context_size+1 messages after target (ASC)
|
||||
after_query = f"""
|
||||
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
||||
AND (received_at > ? OR (received_at = ? AND id > ?))
|
||||
ORDER BY received_at ASC, id ASC LIMIT ?
|
||||
"""
|
||||
after_params = [
|
||||
*base_params,
|
||||
target.received_at,
|
||||
target.received_at,
|
||||
target.id,
|
||||
context_size + 1,
|
||||
]
|
||||
after_cursor = await db.conn.execute(after_query, after_params)
|
||||
after_rows = list(await after_cursor.fetchall())
|
||||
# 3. Get context_size+1 messages after target (ASC)
|
||||
after_query = f"""
|
||||
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
||||
AND (received_at > ? OR (received_at = ? AND id > ?))
|
||||
ORDER BY received_at ASC, id ASC LIMIT ?
|
||||
"""
|
||||
after_params = [
|
||||
*base_params,
|
||||
target.received_at,
|
||||
target.received_at,
|
||||
target.id,
|
||||
context_size + 1,
|
||||
]
|
||||
async with conn.execute(after_query, after_params) as after_cursor:
|
||||
after_rows = list(await after_cursor.fetchall())
|
||||
|
||||
has_newer = len(after_rows) > context_size
|
||||
after_messages = [MessageRepository._row_to_message(r) for r in after_rows[:context_size]]
|
||||
@@ -556,21 +567,29 @@ class MessageRepository:
|
||||
|
||||
@staticmethod
|
||||
async def increment_ack_count(message_id: int) -> int:
|
||||
"""Increment ack count and return the new value."""
|
||||
cursor = await db.conn.execute(
|
||||
"UPDATE messages SET acked = acked + 1 WHERE id = ? RETURNING acked", (message_id,)
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
await db.conn.commit()
|
||||
"""Increment ack count and return the new value.
|
||||
|
||||
NOTE: ``RETURNING`` leaves the prepared statement active until the
|
||||
row is fetched, so we MUST consume it inside the ``async with``
|
||||
block. Without that, the commit at the end of ``db.tx()`` fails
|
||||
with ``cannot commit transaction - SQL statements in progress``.
|
||||
"""
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE messages SET acked = acked + 1 WHERE id = ? RETURNING acked",
|
||||
(message_id,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["acked"] if row else 1
|
||||
|
||||
@staticmethod
|
||||
async def get_ack_and_paths(message_id: int) -> tuple[int, list[MessagePath] | None]:
|
||||
"""Get the current ack count and paths for a message."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT acked, paths FROM messages WHERE id = ?", (message_id,)
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT acked, paths FROM messages WHERE id = ?", (message_id,)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
return 0, None
|
||||
return row["acked"], MessageRepository._parse_paths(row["paths"])
|
||||
@@ -578,11 +597,12 @@ class MessageRepository:
|
||||
@staticmethod
|
||||
async def get_by_id(message_id: int) -> "Message | None":
|
||||
"""Look up a message by its ID."""
|
||||
cursor = await db.conn.execute(
|
||||
f"SELECT {MessageRepository._message_select('messages')} FROM messages WHERE id = ?",
|
||||
(message_id,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
f"SELECT {MessageRepository._message_select('messages')} FROM messages WHERE id = ?",
|
||||
(message_id,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
|
||||
@@ -591,11 +611,14 @@ class MessageRepository:
|
||||
@staticmethod
|
||||
async def delete_by_id(message_id: int) -> None:
|
||||
"""Delete a message row by ID."""
|
||||
await db.conn.execute(
|
||||
"UPDATE raw_packets SET message_id = NULL WHERE message_id = ?", (message_id,)
|
||||
)
|
||||
await db.conn.execute("DELETE FROM messages WHERE id = ?", (message_id,))
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE raw_packets SET message_id = NULL WHERE message_id = ?",
|
||||
(message_id,),
|
||||
):
|
||||
pass
|
||||
async with conn.execute("DELETE FROM messages WHERE id = ?", (message_id,)):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_by_content(
|
||||
@@ -618,8 +641,9 @@ class MessageRepository:
|
||||
query += " AND outgoing = ?"
|
||||
params.append(1 if outgoing else 0)
|
||||
query += " ORDER BY id ASC"
|
||||
cursor = await db.conn.execute(query, params)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(query, params) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
|
||||
@@ -653,76 +677,6 @@ class MessageRepository:
|
||||
)
|
||||
blocked_sql = f" AND {blocked_clause}" if blocked_clause else ""
|
||||
|
||||
# Channel unreads
|
||||
cursor = await db.conn.execute(
|
||||
f"""
|
||||
SELECT m.conversation_key,
|
||||
COUNT(*) as unread_count,
|
||||
SUM(CASE
|
||||
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
||||
ELSE 0
|
||||
END) > 0 as has_mention
|
||||
FROM messages m
|
||||
JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.outgoing = 0
|
||||
AND m.received_at > COALESCE(c.last_read_at, 0)
|
||||
{blocked_sql}
|
||||
GROUP BY m.conversation_key
|
||||
""",
|
||||
(mention_token or "", mention_token or "", *blocked_params),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
state_key = f"channel-{row['conversation_key']}"
|
||||
counts[state_key] = row["unread_count"]
|
||||
if mention_token and row["has_mention"]:
|
||||
mention_flags[state_key] = True
|
||||
|
||||
# Contact unreads
|
||||
cursor = await db.conn.execute(
|
||||
f"""
|
||||
SELECT m.conversation_key,
|
||||
COUNT(*) as unread_count,
|
||||
SUM(CASE
|
||||
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
||||
ELSE 0
|
||||
END) > 0 as has_mention
|
||||
FROM messages m
|
||||
LEFT JOIN contacts ct ON m.conversation_key = ct.public_key
|
||||
WHERE m.type = 'PRIV' AND m.outgoing = 0
|
||||
AND m.received_at > COALESCE(ct.last_read_at, 0)
|
||||
{blocked_sql}
|
||||
GROUP BY m.conversation_key
|
||||
""",
|
||||
(mention_token or "", mention_token or "", *blocked_params),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
state_key = f"contact-{row['conversation_key']}"
|
||||
counts[state_key] = row["unread_count"]
|
||||
if mention_token and row["has_mention"]:
|
||||
mention_flags[state_key] = True
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT key, last_read_at
|
||||
FROM channels
|
||||
"""
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
last_read_ats[f"channel-{row['key']}"] = row["last_read_at"]
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT public_key, last_read_at
|
||||
FROM contacts
|
||||
"""
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
last_read_ats[f"contact-{row['public_key']}"] = row["last_read_at"]
|
||||
|
||||
# Last message times for all conversations (including read ones),
|
||||
# excluding blocked incoming traffic so refresh matches live WS behavior.
|
||||
last_time_clause, last_time_params = MessageRepository._build_blocked_incoming_clause(
|
||||
@@ -730,20 +684,94 @@ class MessageRepository:
|
||||
)
|
||||
last_time_where_sql = f"WHERE {last_time_clause}" if last_time_clause else ""
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
f"""
|
||||
SELECT type, conversation_key, MAX(received_at) as last_message_time
|
||||
FROM messages
|
||||
{last_time_where_sql}
|
||||
GROUP BY type, conversation_key
|
||||
""",
|
||||
last_time_params,
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
prefix = "channel" if row["type"] == "CHAN" else "contact"
|
||||
state_key = f"{prefix}-{row['conversation_key']}"
|
||||
last_message_times[state_key] = row["last_message_time"]
|
||||
# Single readonly acquisition for all 5 queries — they form one logical
|
||||
# snapshot, and holding the lock for the batch is cheaper than acquiring
|
||||
# it 5 times.
|
||||
async with db.readonly() as conn:
|
||||
# Channel unreads
|
||||
async with conn.execute(
|
||||
f"""
|
||||
SELECT m.conversation_key,
|
||||
COUNT(*) as unread_count,
|
||||
SUM(CASE
|
||||
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
||||
ELSE 0
|
||||
END) > 0 as has_mention
|
||||
FROM messages m
|
||||
JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.outgoing = 0
|
||||
AND m.received_at > COALESCE(c.last_read_at, 0)
|
||||
{blocked_sql}
|
||||
GROUP BY m.conversation_key
|
||||
""",
|
||||
(mention_token or "", mention_token or "", *blocked_params),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
state_key = f"channel-{row['conversation_key']}"
|
||||
counts[state_key] = row["unread_count"]
|
||||
if mention_token and row["has_mention"]:
|
||||
mention_flags[state_key] = True
|
||||
|
||||
# Contact unreads
|
||||
async with conn.execute(
|
||||
f"""
|
||||
SELECT m.conversation_key,
|
||||
COUNT(*) as unread_count,
|
||||
SUM(CASE
|
||||
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
||||
ELSE 0
|
||||
END) > 0 as has_mention
|
||||
FROM messages m
|
||||
LEFT JOIN contacts ct ON m.conversation_key = ct.public_key
|
||||
WHERE m.type = 'PRIV' AND m.outgoing = 0
|
||||
AND m.received_at > COALESCE(ct.last_read_at, 0)
|
||||
{blocked_sql}
|
||||
GROUP BY m.conversation_key
|
||||
""",
|
||||
(mention_token or "", mention_token or "", *blocked_params),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
state_key = f"contact-{row['conversation_key']}"
|
||||
counts[state_key] = row["unread_count"]
|
||||
if mention_token and row["has_mention"]:
|
||||
mention_flags[state_key] = True
|
||||
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT key, last_read_at
|
||||
FROM channels
|
||||
"""
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
last_read_ats[f"channel-{row['key']}"] = row["last_read_at"]
|
||||
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT public_key, last_read_at
|
||||
FROM contacts
|
||||
"""
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
last_read_ats[f"contact-{row['public_key']}"] = row["last_read_at"]
|
||||
|
||||
async with conn.execute(
|
||||
f"""
|
||||
SELECT type, conversation_key, MAX(received_at) as last_message_time
|
||||
FROM messages
|
||||
{last_time_where_sql}
|
||||
GROUP BY type, conversation_key
|
||||
""",
|
||||
last_time_params,
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
for row in rows:
|
||||
prefix = "channel" if row["type"] == "CHAN" else "contact"
|
||||
state_key = f"{prefix}-{row['conversation_key']}"
|
||||
last_message_times[state_key] = row["last_message_time"]
|
||||
|
||||
# Only include last_read_ats for conversations that actually have messages.
|
||||
# Without this filter, every contact heard via advertisement (even without
|
||||
@@ -760,41 +788,45 @@ class MessageRepository:
|
||||
@staticmethod
|
||||
async def count_dm_messages(contact_key: str) -> int:
|
||||
"""Count total DM messages for a contact."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'PRIV' AND conversation_key = ?",
|
||||
(contact_key.lower(),),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'PRIV' AND conversation_key = ?",
|
||||
(contact_key.lower(),),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["cnt"] if row else 0
|
||||
|
||||
@staticmethod
|
||||
async def count_channel_messages_by_sender(sender_key: str) -> int:
|
||||
"""Count channel messages sent by a specific contact."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_key = ?",
|
||||
(sender_key.lower(),),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_key = ?",
|
||||
(sender_key.lower(),),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["cnt"] if row else 0
|
||||
|
||||
@staticmethod
|
||||
async def count_channel_messages_by_sender_name(sender_name: str) -> int:
|
||||
"""Count channel messages attributed to a display name."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
||||
(sender_name,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
||||
(sender_name,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["cnt"] if row else 0
|
||||
|
||||
@staticmethod
|
||||
async def get_first_channel_message_by_sender_name(sender_name: str) -> int | None:
|
||||
"""Get the earliest stored channel message timestamp for a display name."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT MIN(received_at) AS first_seen FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
||||
(sender_name,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT MIN(received_at) AS first_seen FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
||||
(sender_name,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["first_seen"] if row and row["first_seen"] is not None else None
|
||||
|
||||
@staticmethod
|
||||
@@ -813,68 +845,76 @@ class MessageRepository:
|
||||
t_48h = now - 172800
|
||||
t_7d = now - 604800
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS all_time,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_1h,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_24h,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_48h,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_7d,
|
||||
MIN(received_at) AS first_message_at,
|
||||
COUNT(DISTINCT sender_key) AS unique_sender_count
|
||||
FROM messages WHERE type = 'CHAN' AND conversation_key = ?
|
||||
""",
|
||||
(t_1h, t_24h, t_48h, t_7d, conversation_key),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None # Aggregate query always returns a row
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS all_time,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_1h,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_24h,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_48h,
|
||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_7d,
|
||||
MIN(received_at) AS first_message_at,
|
||||
COUNT(DISTINCT sender_key) AS unique_sender_count
|
||||
FROM messages WHERE type = 'CHAN' AND conversation_key = ?
|
||||
""",
|
||||
(t_1h, t_24h, t_48h, t_7d, conversation_key),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None # Aggregate query always returns a row
|
||||
|
||||
message_counts = {
|
||||
"last_1h": row["last_1h"] or 0,
|
||||
"last_24h": row["last_24h"] or 0,
|
||||
"last_48h": row["last_48h"] or 0,
|
||||
"last_7d": row["last_7d"] or 0,
|
||||
"all_time": row["all_time"] or 0,
|
||||
}
|
||||
|
||||
cursor2 = await db.conn.execute(
|
||||
"""
|
||||
SELECT COALESCE(sender_name, sender_key, 'Unknown') AS display_name,
|
||||
sender_key, COUNT(*) AS cnt
|
||||
FROM messages
|
||||
WHERE type = 'CHAN' AND conversation_key = ?
|
||||
AND received_at >= ? AND sender_key IS NOT NULL
|
||||
GROUP BY sender_key ORDER BY cnt DESC LIMIT 5
|
||||
""",
|
||||
(conversation_key, t_24h),
|
||||
)
|
||||
top_rows = await cursor2.fetchall()
|
||||
top_senders = [
|
||||
{
|
||||
"sender_name": r["display_name"],
|
||||
"sender_key": r["sender_key"],
|
||||
"message_count": r["cnt"],
|
||||
message_counts = {
|
||||
"last_1h": row["last_1h"] or 0,
|
||||
"last_24h": row["last_24h"] or 0,
|
||||
"last_48h": row["last_48h"] or 0,
|
||||
"last_7d": row["last_7d"] or 0,
|
||||
"all_time": row["all_time"] or 0,
|
||||
}
|
||||
for r in top_rows
|
||||
]
|
||||
|
||||
# Path hash width distribution for last 24h (in-Python parse of raw packet envelopes)
|
||||
cursor3 = await db.conn.execute(
|
||||
"""
|
||||
SELECT rp.data FROM raw_packets rp
|
||||
JOIN messages m ON rp.message_id = m.id
|
||||
WHERE m.type = 'CHAN' AND m.conversation_key = ?
|
||||
AND rp.timestamp >= ?
|
||||
""",
|
||||
(conversation_key, t_24h),
|
||||
)
|
||||
rows3 = await cursor3.fetchall()
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT COALESCE(sender_name, sender_key, 'Unknown') AS display_name,
|
||||
sender_key, COUNT(*) AS cnt
|
||||
FROM messages
|
||||
WHERE type = 'CHAN' AND conversation_key = ?
|
||||
AND received_at >= ? AND sender_key IS NOT NULL
|
||||
GROUP BY sender_key ORDER BY cnt DESC LIMIT 5
|
||||
""",
|
||||
(conversation_key, t_24h),
|
||||
) as cursor:
|
||||
top_rows = await cursor.fetchall()
|
||||
top_senders = [
|
||||
{
|
||||
"sender_name": r["display_name"],
|
||||
"sender_key": r["sender_key"],
|
||||
"message_count": r["cnt"],
|
||||
}
|
||||
for r in top_rows
|
||||
]
|
||||
|
||||
# Path hash width distribution for last 24h: fetch raw rows under
|
||||
# the lock, then release BEFORE the CPU-bound in-Python envelope
|
||||
# parse. Parsing can iterate thousands of rows and previously held
|
||||
# the DB lock for the whole traversal — blocking every other repo
|
||||
# caller on a Pi. Keep the lock only for the fetch.
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT rp.data FROM raw_packets rp
|
||||
JOIN messages m ON rp.message_id = m.id
|
||||
WHERE m.type = 'CHAN' AND m.conversation_key = ?
|
||||
AND rp.timestamp >= ?
|
||||
""",
|
||||
(conversation_key, t_24h),
|
||||
) as cursor:
|
||||
rows3 = await cursor.fetchall()
|
||||
first_message_at = row["first_message_at"]
|
||||
unique_sender_count = row["unique_sender_count"] or 0
|
||||
|
||||
path_hash_width_24h = bucket_path_hash_widths(rows3)
|
||||
|
||||
return {
|
||||
"message_counts": message_counts,
|
||||
"first_message_at": row["first_message_at"],
|
||||
"unique_sender_count": row["unique_sender_count"] or 0,
|
||||
"first_message_at": first_message_at,
|
||||
"unique_sender_count": unique_sender_count,
|
||||
"top_senders_24h": top_senders,
|
||||
"path_hash_width_24h": path_hash_width_24h,
|
||||
}
|
||||
@@ -882,14 +922,15 @@ class MessageRepository:
|
||||
@staticmethod
|
||||
async def count_channels_with_incoming_messages() -> int:
|
||||
"""Count distinct channel conversations with at least one incoming message."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT COUNT(DISTINCT conversation_key) AS cnt
|
||||
FROM messages
|
||||
WHERE type = 'CHAN' AND outgoing = 0
|
||||
"""
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT COUNT(DISTINCT conversation_key) AS cnt
|
||||
FROM messages
|
||||
WHERE type = 'CHAN' AND outgoing = 0
|
||||
"""
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return int(row["cnt"]) if row and row["cnt"] is not None else 0
|
||||
|
||||
@staticmethod
|
||||
@@ -898,20 +939,21 @@ class MessageRepository:
|
||||
|
||||
Returns list of (channel_key, channel_name, message_count) tuples.
|
||||
"""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||
COUNT(*) AS cnt
|
||||
FROM messages m
|
||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.sender_key = ?
|
||||
GROUP BY m.conversation_key
|
||||
ORDER BY cnt DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(sender_key.lower(), limit),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||
COUNT(*) AS cnt
|
||||
FROM messages m
|
||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.sender_key = ?
|
||||
GROUP BY m.conversation_key
|
||||
ORDER BY cnt DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(sender_key.lower(), limit),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [(row["conversation_key"], row["channel_name"], row["cnt"]) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
@@ -919,34 +961,36 @@ class MessageRepository:
|
||||
sender_name: str, limit: int = 5
|
||||
) -> list[tuple[str, str, int]]:
|
||||
"""Get channels where a display name has sent the most messages."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||
COUNT(*) AS cnt
|
||||
FROM messages m
|
||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.sender_name = ?
|
||||
GROUP BY m.conversation_key
|
||||
ORDER BY cnt DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(sender_name, limit),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||
COUNT(*) AS cnt
|
||||
FROM messages m
|
||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.sender_name = ?
|
||||
GROUP BY m.conversation_key
|
||||
ORDER BY cnt DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(sender_name, limit),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [(row["conversation_key"], row["channel_name"], row["cnt"]) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def _get_activity_hour_buckets(where_sql: str, params: list[Any]) -> dict[int, int]:
|
||||
cursor = await db.conn.execute(
|
||||
f"""
|
||||
SELECT received_at / 3600 AS hour_bucket, COUNT(*) AS cnt
|
||||
FROM messages
|
||||
WHERE {where_sql}
|
||||
GROUP BY hour_bucket
|
||||
""",
|
||||
params,
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
f"""
|
||||
SELECT received_at / 3600 AS hour_bucket, COUNT(*) AS cnt
|
||||
FROM messages
|
||||
WHERE {where_sql}
|
||||
GROUP BY hour_bucket
|
||||
""",
|
||||
params,
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return {int(row["hour_bucket"]): row["cnt"] for row in rows}
|
||||
|
||||
@staticmethod
|
||||
@@ -1000,16 +1044,17 @@ class MessageRepository:
|
||||
current_day_start = (now // 86400) * 86400
|
||||
start = current_day_start - (weeks - 1) * bucket_seconds
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
f"""
|
||||
SELECT (received_at - ?) / ? AS bucket_idx, COUNT(*) AS cnt
|
||||
FROM messages
|
||||
WHERE {where_sql} AND received_at >= ?
|
||||
GROUP BY bucket_idx
|
||||
""",
|
||||
[start, bucket_seconds, *params, start],
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
f"""
|
||||
SELECT (received_at - ?) / ? AS bucket_idx, COUNT(*) AS cnt
|
||||
FROM messages
|
||||
WHERE {where_sql} AND received_at >= ?
|
||||
GROUP BY bucket_idx
|
||||
""",
|
||||
[start, bucket_seconds, *params, start],
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
counts = {int(row["bucket_idx"]): row["cnt"] for row in rows}
|
||||
|
||||
return [
|
||||
|
||||
@@ -0,0 +1,162 @@
|
||||
"""Repository for push_subscriptions table."""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Any
|
||||
|
||||
from app.database import db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Auto-delete subscriptions that have failed this many times consecutively
|
||||
# without any successful delivery in between.
|
||||
MAX_CONSECUTIVE_FAILURES = 15
|
||||
|
||||
|
||||
def _row_to_dict(row: Any) -> dict[str, Any]:
|
||||
return {
|
||||
"id": row["id"],
|
||||
"endpoint": row["endpoint"],
|
||||
"p256dh": row["p256dh"],
|
||||
"auth": row["auth"],
|
||||
"label": row["label"] or "",
|
||||
"created_at": row["created_at"] or 0,
|
||||
"last_success_at": row["last_success_at"],
|
||||
"failure_count": row["failure_count"] or 0,
|
||||
}
|
||||
|
||||
|
||||
class PushSubscriptionRepository:
|
||||
@staticmethod
|
||||
async def create(
|
||||
endpoint: str,
|
||||
p256dh: str,
|
||||
auth: str,
|
||||
label: str = "",
|
||||
) -> dict[str, Any]:
|
||||
"""Create or upsert a push subscription (keyed by endpoint)."""
|
||||
sub_id = str(uuid.uuid4())
|
||||
now = int(time.time())
|
||||
|
||||
async with db.tx() as conn:
|
||||
await conn.execute(
|
||||
"""
|
||||
INSERT INTO push_subscriptions
|
||||
(id, endpoint, p256dh, auth, label, created_at, failure_count)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 0)
|
||||
ON CONFLICT(endpoint) DO UPDATE SET
|
||||
p256dh = excluded.p256dh,
|
||||
auth = excluded.auth,
|
||||
label = CASE WHEN excluded.label != '' THEN excluded.label
|
||||
ELSE push_subscriptions.label END,
|
||||
failure_count = 0
|
||||
""",
|
||||
(sub_id, endpoint, p256dh, auth, label, now),
|
||||
)
|
||||
async with conn.execute(
|
||||
"SELECT * FROM push_subscriptions WHERE endpoint = ?", (endpoint,)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
|
||||
return _row_to_dict(row) if row else {"id": sub_id} # type: ignore[arg-type]
|
||||
|
||||
@staticmethod
|
||||
async def get(subscription_id: str) -> dict[str, Any] | None:
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM push_subscriptions WHERE id = ?", (subscription_id,)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return _row_to_dict(row) if row else None
|
||||
|
||||
@staticmethod
|
||||
async def get_by_endpoint(endpoint: str) -> dict[str, Any] | None:
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM push_subscriptions WHERE endpoint = ?", (endpoint,)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return _row_to_dict(row) if row else None
|
||||
|
||||
@staticmethod
|
||||
async def get_all() -> list[dict[str, Any]]:
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT * FROM push_subscriptions ORDER BY created_at DESC"
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [_row_to_dict(row) for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def update(subscription_id: str, **fields: Any) -> dict[str, Any] | None:
|
||||
updates: list[str] = []
|
||||
params: list[Any] = []
|
||||
|
||||
if "label" in fields:
|
||||
updates.append("label = ?")
|
||||
params.append(fields["label"])
|
||||
|
||||
if not updates:
|
||||
return await PushSubscriptionRepository.get(subscription_id)
|
||||
|
||||
params.append(subscription_id)
|
||||
async with db.tx() as conn:
|
||||
await conn.execute(
|
||||
f"UPDATE push_subscriptions SET {', '.join(updates)} WHERE id = ?",
|
||||
params,
|
||||
)
|
||||
async with conn.execute(
|
||||
"SELECT * FROM push_subscriptions WHERE id = ?", (subscription_id,)
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return _row_to_dict(row) if row else None
|
||||
|
||||
@staticmethod
|
||||
async def delete(subscription_id: str) -> bool:
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"DELETE FROM push_subscriptions WHERE id = ?", (subscription_id,)
|
||||
) as cursor:
|
||||
return cursor.rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def delete_by_endpoint(endpoint: str) -> bool:
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"DELETE FROM push_subscriptions WHERE endpoint = ?", (endpoint,)
|
||||
) as cursor:
|
||||
return cursor.rowcount > 0
|
||||
|
||||
@staticmethod
|
||||
async def batch_record_outcomes(
|
||||
success_ids: list[str], failure_ids: list[str], remove_ids: list[str]
|
||||
) -> None:
|
||||
"""Batch-update delivery outcomes in a single transaction."""
|
||||
now = int(time.time())
|
||||
async with db.tx() as conn:
|
||||
if remove_ids:
|
||||
placeholders = ",".join("?" for _ in remove_ids)
|
||||
await conn.execute(
|
||||
f"DELETE FROM push_subscriptions WHERE id IN ({placeholders})",
|
||||
remove_ids,
|
||||
)
|
||||
if success_ids:
|
||||
placeholders = ",".join("?" for _ in success_ids)
|
||||
await conn.execute(
|
||||
f"UPDATE push_subscriptions SET last_success_at = ?, failure_count = 0 "
|
||||
f"WHERE id IN ({placeholders})",
|
||||
[now, *success_ids],
|
||||
)
|
||||
if failure_ids:
|
||||
placeholders = ",".join("?" for _ in failure_ids)
|
||||
await conn.execute(
|
||||
f"UPDATE push_subscriptions SET failure_count = failure_count + 1 "
|
||||
f"WHERE id IN ({placeholders})",
|
||||
failure_ids,
|
||||
)
|
||||
# Evict subscriptions that have exceeded the failure threshold
|
||||
await conn.execute(
|
||||
"DELETE FROM push_subscriptions WHERE failure_count >= ?",
|
||||
(MAX_CONSECUTIVE_FAILURES,),
|
||||
)
|
||||
@@ -34,65 +34,85 @@ class RawPacketRepository:
|
||||
# For malformed packets, hash the full data
|
||||
payload_hash = sha256(data).digest()
|
||||
|
||||
cursor = await db.conn.execute(
|
||||
"INSERT OR IGNORE INTO raw_packets (timestamp, data, payload_hash) VALUES (?, ?, ?)",
|
||||
(ts, data, payload_hash),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"INSERT OR IGNORE INTO raw_packets (timestamp, data, payload_hash) VALUES (?, ?, ?)",
|
||||
(ts, data, payload_hash),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
lastrowid = cursor.lastrowid
|
||||
|
||||
if cursor.rowcount > 0:
|
||||
assert cursor.lastrowid is not None
|
||||
return (cursor.lastrowid, True)
|
||||
if rowcount > 0:
|
||||
assert lastrowid is not None
|
||||
return (lastrowid, True)
|
||||
|
||||
# Duplicate payload — look up the existing row.
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT id FROM raw_packets WHERE payload_hash = ?", (payload_hash,)
|
||||
)
|
||||
existing = await cursor.fetchone()
|
||||
# Duplicate payload — look up the existing row (same transaction).
|
||||
async with conn.execute(
|
||||
"SELECT id FROM raw_packets WHERE payload_hash = ?", (payload_hash,)
|
||||
) as cursor:
|
||||
existing = await cursor.fetchone()
|
||||
assert existing is not None
|
||||
return (existing["id"], False)
|
||||
|
||||
@staticmethod
|
||||
async def get_undecrypted_count() -> int:
|
||||
"""Get count of undecrypted packets (those without a linked message)."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT COUNT(*) as count FROM raw_packets WHERE message_id IS NULL"
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT COUNT(*) as count FROM raw_packets WHERE message_id IS NULL"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["count"] if row else 0
|
||||
|
||||
@staticmethod
|
||||
async def get_oldest_undecrypted() -> int | None:
|
||||
"""Get timestamp of oldest undecrypted packet, or None if none exist."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT MIN(timestamp) as oldest FROM raw_packets WHERE message_id IS NULL"
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT MIN(timestamp) as oldest FROM raw_packets WHERE message_id IS NULL"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
return row["oldest"] if row and row["oldest"] is not None else None
|
||||
|
||||
@staticmethod
|
||||
async def _stream_undecrypted_rows(
|
||||
batch_size: int,
|
||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||
"""Internal: keyset-paginated scan of every undecrypted raw packet.
|
||||
|
||||
Yields ``(id, data, timestamp)`` for each row across all batches.
|
||||
Lock is acquired per batch only — concurrent writes can interleave
|
||||
at batch boundaries rather than being blocked for the full scan.
|
||||
Each batch opens a fresh cursor and consumes it fully with
|
||||
``fetchall()`` before releasing, so no prepared statement is alive
|
||||
at a yield boundary.
|
||||
|
||||
``last_id`` advances per row, not per yield, so external filters
|
||||
(see ``stream_undecrypted_text_messages``) that drop rows do not
|
||||
cause a re-scan of skipped IDs.
|
||||
"""
|
||||
last_id = -1
|
||||
while True:
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT id, data, timestamp FROM raw_packets "
|
||||
"WHERE message_id IS NULL AND id > ? ORDER BY id ASC LIMIT ?",
|
||||
(last_id, batch_size),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
if not rows:
|
||||
return
|
||||
for row in rows:
|
||||
last_id = row["id"]
|
||||
yield (row["id"], bytes(row["data"]), row["timestamp"])
|
||||
|
||||
@staticmethod
|
||||
async def stream_all_undecrypted(
|
||||
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||
"""Yield all undecrypted packets as (id, data, timestamp) in bounded batches.
|
||||
|
||||
Uses keyset pagination so each batch is a fresh query with a fully
|
||||
consumed cursor — no open statement held across yield boundaries.
|
||||
"""
|
||||
last_id = -1
|
||||
while True:
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT id, data, timestamp FROM raw_packets "
|
||||
"WHERE message_id IS NULL AND id > ? ORDER BY id ASC LIMIT ?",
|
||||
(last_id, batch_size),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
await cursor.close()
|
||||
if not rows:
|
||||
break
|
||||
for row in rows:
|
||||
last_id = row["id"]
|
||||
yield (row["id"], bytes(row["data"]), row["timestamp"])
|
||||
"""Yield all undecrypted packets as (id, data, timestamp) in bounded batches."""
|
||||
async for row in RawPacketRepository._stream_undecrypted_rows(batch_size):
|
||||
yield row
|
||||
|
||||
@staticmethod
|
||||
async def stream_undecrypted_text_messages(
|
||||
@@ -100,26 +120,15 @@ class RawPacketRepository:
|
||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||
"""Yield undecrypted TEXT_MESSAGE packets in bounded-size batches.
|
||||
|
||||
Uses keyset pagination so each batch is a fresh query with a fully
|
||||
consumed cursor — no open statement held across yield boundaries.
|
||||
Filters the shared scan to rows whose payload parses as a text
|
||||
message. Non-matching rows still advance the keyset cursor so they
|
||||
aren't re-fetched on subsequent batches.
|
||||
"""
|
||||
last_id = -1
|
||||
while True:
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT id, data, timestamp FROM raw_packets "
|
||||
"WHERE message_id IS NULL AND id > ? ORDER BY id ASC LIMIT ?",
|
||||
(last_id, batch_size),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
await cursor.close()
|
||||
if not rows:
|
||||
break
|
||||
for row in rows:
|
||||
last_id = row["id"]
|
||||
data = bytes(row["data"])
|
||||
payload_type = get_packet_payload_type(data)
|
||||
if payload_type == PayloadType.TEXT_MESSAGE:
|
||||
yield (row["id"], data, row["timestamp"])
|
||||
async for packet_id, data, timestamp in RawPacketRepository._stream_undecrypted_rows(
|
||||
batch_size
|
||||
):
|
||||
if get_packet_payload_type(data) == PayloadType.TEXT_MESSAGE:
|
||||
yield (packet_id, data, timestamp)
|
||||
|
||||
@staticmethod
|
||||
async def count_undecrypted_text_messages(
|
||||
@@ -136,20 +145,22 @@ class RawPacketRepository:
|
||||
@staticmethod
|
||||
async def mark_decrypted(packet_id: int, message_id: int) -> None:
|
||||
"""Link a raw packet to its decrypted message."""
|
||||
await db.conn.execute(
|
||||
"UPDATE raw_packets SET message_id = ? WHERE id = ?",
|
||||
(message_id, packet_id),
|
||||
)
|
||||
await db.conn.commit()
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"UPDATE raw_packets SET message_id = ? WHERE id = ?",
|
||||
(message_id, packet_id),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_linked_message_id(packet_id: int) -> int | None:
|
||||
"""Return the linked message ID for a raw packet, if any."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT message_id FROM raw_packets WHERE id = ?",
|
||||
(packet_id,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT message_id FROM raw_packets WHERE id = ?",
|
||||
(packet_id,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
return row["message_id"]
|
||||
@@ -157,11 +168,12 @@ class RawPacketRepository:
|
||||
@staticmethod
|
||||
async def get_by_id(packet_id: int) -> tuple[int, bytes, int, int | None] | None:
|
||||
"""Return a raw packet row as (id, data, timestamp, message_id)."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT id, data, timestamp, message_id FROM raw_packets WHERE id = ?",
|
||||
(packet_id,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT id, data, timestamp, message_id FROM raw_packets WHERE id = ?",
|
||||
(packet_id,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
return (row["id"], bytes(row["data"]), row["timestamp"], row["message_id"])
|
||||
@@ -170,16 +182,20 @@ class RawPacketRepository:
|
||||
async def prune_old_undecrypted(max_age_days: int) -> int:
|
||||
"""Delete undecrypted packets older than max_age_days. Returns count deleted."""
|
||||
cutoff = int(time.time()) - (max_age_days * 86400)
|
||||
cursor = await db.conn.execute(
|
||||
"DELETE FROM raw_packets WHERE message_id IS NULL AND timestamp < ?",
|
||||
(cutoff,),
|
||||
)
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"DELETE FROM raw_packets WHERE message_id IS NULL AND timestamp < ?",
|
||||
(cutoff,),
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount
|
||||
|
||||
@staticmethod
|
||||
async def purge_linked_to_messages() -> int:
|
||||
"""Delete raw packets that are already linked to a stored message."""
|
||||
cursor = await db.conn.execute("DELETE FROM raw_packets WHERE message_id IS NOT NULL")
|
||||
await db.conn.commit()
|
||||
return cursor.rowcount
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"DELETE FROM raw_packets WHERE message_id IS NOT NULL"
|
||||
) as cursor:
|
||||
rowcount = cursor.rowcount
|
||||
return rowcount
|
||||
|
||||
@@ -21,51 +21,54 @@ class RepeaterTelemetryRepository:
|
||||
data: dict,
|
||||
) -> None:
|
||||
"""Insert a telemetry history row and prune stale entries."""
|
||||
await db.conn.execute(
|
||||
"""
|
||||
INSERT INTO repeater_telemetry_history
|
||||
(public_key, timestamp, data)
|
||||
VALUES (?, ?, ?)
|
||||
""",
|
||||
(public_key, timestamp, json.dumps(data)),
|
||||
)
|
||||
|
||||
# Prune entries older than 30 days
|
||||
cutoff = int(time.time()) - _MAX_AGE_SECONDS
|
||||
await db.conn.execute(
|
||||
"DELETE FROM repeater_telemetry_history WHERE public_key = ? AND timestamp < ?",
|
||||
(public_key, cutoff),
|
||||
)
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
INSERT INTO repeater_telemetry_history
|
||||
(public_key, timestamp, data)
|
||||
VALUES (?, ?, ?)
|
||||
""",
|
||||
(public_key, timestamp, json.dumps(data)),
|
||||
):
|
||||
pass
|
||||
|
||||
# Cap at _MAX_ENTRIES_PER_REPEATER (keep newest)
|
||||
await db.conn.execute(
|
||||
"""
|
||||
DELETE FROM repeater_telemetry_history
|
||||
WHERE public_key = ? AND id NOT IN (
|
||||
SELECT id FROM repeater_telemetry_history
|
||||
WHERE public_key = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
)
|
||||
""",
|
||||
(public_key, public_key, _MAX_ENTRIES_PER_REPEATER),
|
||||
)
|
||||
# Prune entries older than 30 days
|
||||
async with conn.execute(
|
||||
"DELETE FROM repeater_telemetry_history WHERE public_key = ? AND timestamp < ?",
|
||||
(public_key, cutoff),
|
||||
):
|
||||
pass
|
||||
|
||||
await db.conn.commit()
|
||||
# Cap at _MAX_ENTRIES_PER_REPEATER (keep newest)
|
||||
async with conn.execute(
|
||||
"""
|
||||
DELETE FROM repeater_telemetry_history
|
||||
WHERE public_key = ? AND id NOT IN (
|
||||
SELECT id FROM repeater_telemetry_history
|
||||
WHERE public_key = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
)
|
||||
""",
|
||||
(public_key, public_key, _MAX_ENTRIES_PER_REPEATER),
|
||||
):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
async def get_history(public_key: str, since_timestamp: int) -> list[dict]:
|
||||
"""Return telemetry rows for a repeater since a given timestamp, ordered ASC."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT timestamp, data
|
||||
FROM repeater_telemetry_history
|
||||
WHERE public_key = ? AND timestamp >= ?
|
||||
ORDER BY timestamp ASC
|
||||
""",
|
||||
(public_key, since_timestamp),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT timestamp, data
|
||||
FROM repeater_telemetry_history
|
||||
WHERE public_key = ? AND timestamp >= ?
|
||||
ORDER BY timestamp ASC
|
||||
""",
|
||||
(public_key, since_timestamp),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [
|
||||
{
|
||||
"timestamp": row["timestamp"],
|
||||
@@ -73,3 +76,25 @@ class RepeaterTelemetryRepository:
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
async def get_latest(public_key: str) -> dict | None:
|
||||
"""Return the most recent telemetry row for a repeater, or None."""
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT timestamp, data
|
||||
FROM repeater_telemetry_history
|
||||
WHERE public_key = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(public_key,),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return {
|
||||
"timestamp": row["timestamp"],
|
||||
"data": json.loads(row["data"]),
|
||||
}
|
||||
|
||||
+319
-137
@@ -3,9 +3,12 @@ import logging
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import aiosqlite
|
||||
|
||||
from app.database import db
|
||||
from app.models import AppSettings
|
||||
from app.path_utils import bucket_path_hash_widths
|
||||
from app.telemetry_interval import DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,25 +19,34 @@ SECONDS_7D = 604800
|
||||
|
||||
|
||||
class AppSettingsRepository:
|
||||
"""Repository for app_settings table (single-row pattern)."""
|
||||
"""Repository for app_settings table (single-row pattern).
|
||||
|
||||
Public methods acquire the DB lock exactly once. ``toggle_*`` helpers that
|
||||
need a read-modify-write do so inside a single ``db.tx()`` — the internal
|
||||
``_get_in_conn`` / ``_apply_updates`` helpers run under the caller's
|
||||
already-held lock and must NEVER call ``db.tx()`` or ``db.readonly()``.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
async def get() -> AppSettings:
|
||||
"""Get the current app settings.
|
||||
async def _get_in_conn(conn: aiosqlite.Connection) -> AppSettings:
|
||||
"""Load settings using an already-acquired connection.
|
||||
|
||||
Always returns settings - creates default row if needed (migration handles initial row).
|
||||
Used by the public ``get()`` and by multi-step operations
|
||||
(``toggle_blocked_key``, ``toggle_blocked_name``) to avoid re-entering
|
||||
the non-reentrant DB lock.
|
||||
"""
|
||||
cursor = await db.conn.execute(
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT max_radio_contacts, auto_decrypt_dm_on_advert,
|
||||
last_message_times,
|
||||
advert_interval, last_advert_time, flood_scope,
|
||||
blocked_keys, blocked_names, discovery_blocked_types,
|
||||
tracked_telemetry_repeaters, auto_resend_channel
|
||||
tracked_telemetry_repeaters, auto_resend_channel,
|
||||
telemetry_interval_hours
|
||||
FROM app_settings WHERE id = 1
|
||||
"""
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
|
||||
if not row:
|
||||
# Should not happen after migration, but handle gracefully
|
||||
@@ -91,6 +103,16 @@ class AppSettingsRepository:
|
||||
except (KeyError, TypeError):
|
||||
auto_resend_channel = False
|
||||
|
||||
# Parse telemetry_interval_hours (migration adds the column with
|
||||
# default=8, but guard against older rows / partial migrations).
|
||||
try:
|
||||
raw_interval = row["telemetry_interval_hours"]
|
||||
telemetry_interval_hours = (
|
||||
int(raw_interval) if raw_interval is not None else DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
)
|
||||
except (KeyError, TypeError, ValueError):
|
||||
telemetry_interval_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
|
||||
return AppSettings(
|
||||
max_radio_contacts=row["max_radio_contacts"],
|
||||
auto_decrypt_dm_on_advert=bool(row["auto_decrypt_dm_on_advert"]),
|
||||
@@ -103,10 +125,13 @@ class AppSettingsRepository:
|
||||
discovery_blocked_types=discovery_blocked_types,
|
||||
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
||||
auto_resend_channel=auto_resend_channel,
|
||||
telemetry_interval_hours=telemetry_interval_hours,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def update(
|
||||
async def _apply_updates(
|
||||
conn: aiosqlite.Connection,
|
||||
*,
|
||||
max_radio_contacts: int | None = None,
|
||||
auto_decrypt_dm_on_advert: bool | None = None,
|
||||
last_message_times: dict[str, int] | None = None,
|
||||
@@ -118,9 +143,14 @@ class AppSettingsRepository:
|
||||
discovery_blocked_types: list[int] | None = None,
|
||||
tracked_telemetry_repeaters: list[str] | None = None,
|
||||
auto_resend_channel: bool | None = None,
|
||||
) -> AppSettings:
|
||||
"""Update app settings. Only provided fields are updated."""
|
||||
updates = []
|
||||
telemetry_interval_hours: int | None = None,
|
||||
) -> None:
|
||||
"""Apply field updates using an already-acquired connection.
|
||||
|
||||
Emits a single UPDATE statement inside the caller's transaction. Does
|
||||
NOT commit — the caller's ``db.tx()`` handles that.
|
||||
"""
|
||||
updates: list[str] = []
|
||||
params: list[Any] = []
|
||||
|
||||
if max_radio_contacts is not None:
|
||||
@@ -167,49 +197,186 @@ class AppSettingsRepository:
|
||||
updates.append("auto_resend_channel = ?")
|
||||
params.append(1 if auto_resend_channel else 0)
|
||||
|
||||
if telemetry_interval_hours is not None:
|
||||
updates.append("telemetry_interval_hours = ?")
|
||||
params.append(telemetry_interval_hours)
|
||||
|
||||
if updates:
|
||||
query = f"UPDATE app_settings SET {', '.join(updates)} WHERE id = 1"
|
||||
await db.conn.execute(query, params)
|
||||
await db.conn.commit()
|
||||
async with conn.execute(query, params):
|
||||
pass
|
||||
|
||||
return await AppSettingsRepository.get()
|
||||
@staticmethod
|
||||
async def get() -> AppSettings:
|
||||
"""Get the current app settings.
|
||||
|
||||
Always returns settings - creates default row if needed (migration handles initial row).
|
||||
"""
|
||||
async with db.readonly() as conn:
|
||||
return await AppSettingsRepository._get_in_conn(conn)
|
||||
|
||||
@staticmethod
|
||||
async def update(
|
||||
max_radio_contacts: int | None = None,
|
||||
auto_decrypt_dm_on_advert: bool | None = None,
|
||||
last_message_times: dict[str, int] | None = None,
|
||||
advert_interval: int | None = None,
|
||||
last_advert_time: int | None = None,
|
||||
flood_scope: str | None = None,
|
||||
blocked_keys: list[str] | None = None,
|
||||
blocked_names: list[str] | None = None,
|
||||
discovery_blocked_types: list[int] | None = None,
|
||||
tracked_telemetry_repeaters: list[str] | None = None,
|
||||
auto_resend_channel: bool | None = None,
|
||||
telemetry_interval_hours: int | None = None,
|
||||
) -> AppSettings:
|
||||
"""Update app settings. Only provided fields are updated."""
|
||||
async with db.tx() as conn:
|
||||
await AppSettingsRepository._apply_updates(
|
||||
conn,
|
||||
max_radio_contacts=max_radio_contacts,
|
||||
auto_decrypt_dm_on_advert=auto_decrypt_dm_on_advert,
|
||||
last_message_times=last_message_times,
|
||||
advert_interval=advert_interval,
|
||||
last_advert_time=last_advert_time,
|
||||
flood_scope=flood_scope,
|
||||
blocked_keys=blocked_keys,
|
||||
blocked_names=blocked_names,
|
||||
discovery_blocked_types=discovery_blocked_types,
|
||||
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
||||
auto_resend_channel=auto_resend_channel,
|
||||
telemetry_interval_hours=telemetry_interval_hours,
|
||||
)
|
||||
return await AppSettingsRepository._get_in_conn(conn)
|
||||
|
||||
@staticmethod
|
||||
async def toggle_blocked_key(key: str) -> AppSettings:
|
||||
"""Toggle a public key in the blocked list. Keys are normalized to lowercase."""
|
||||
"""Toggle a public key in the blocked list. Keys are normalized to lowercase.
|
||||
|
||||
Read-modify-write is atomic under a single ``db.tx()`` lock — two
|
||||
concurrent toggles for the same key cannot produce an inconsistent
|
||||
intermediate state.
|
||||
"""
|
||||
normalized = key.lower()
|
||||
settings = await AppSettingsRepository.get()
|
||||
if normalized in settings.blocked_keys:
|
||||
new_keys = [k for k in settings.blocked_keys if k != normalized]
|
||||
else:
|
||||
new_keys = settings.blocked_keys + [normalized]
|
||||
return await AppSettingsRepository.update(blocked_keys=new_keys)
|
||||
async with db.tx() as conn:
|
||||
settings = await AppSettingsRepository._get_in_conn(conn)
|
||||
if normalized in settings.blocked_keys:
|
||||
new_keys = [k for k in settings.blocked_keys if k != normalized]
|
||||
else:
|
||||
new_keys = settings.blocked_keys + [normalized]
|
||||
await AppSettingsRepository._apply_updates(conn, blocked_keys=new_keys)
|
||||
return await AppSettingsRepository._get_in_conn(conn)
|
||||
|
||||
@staticmethod
|
||||
async def toggle_blocked_name(name: str) -> AppSettings:
|
||||
"""Toggle a display name in the blocked list."""
|
||||
settings = await AppSettingsRepository.get()
|
||||
if name in settings.blocked_names:
|
||||
new_names = [n for n in settings.blocked_names if n != name]
|
||||
else:
|
||||
new_names = settings.blocked_names + [name]
|
||||
return await AppSettingsRepository.update(blocked_names=new_names)
|
||||
"""Toggle a display name in the blocked list.
|
||||
|
||||
Same atomicity guarantee as ``toggle_blocked_key``.
|
||||
"""
|
||||
async with db.tx() as conn:
|
||||
settings = await AppSettingsRepository._get_in_conn(conn)
|
||||
if name in settings.blocked_names:
|
||||
new_names = [n for n in settings.blocked_names if n != name]
|
||||
else:
|
||||
new_names = settings.blocked_names + [name]
|
||||
await AppSettingsRepository._apply_updates(conn, blocked_names=new_names)
|
||||
return await AppSettingsRepository._get_in_conn(conn)
|
||||
|
||||
@staticmethod
|
||||
async def get_vapid_keys() -> tuple[str, str]:
|
||||
"""Return (private_key_pem, public_key_b64url) from app_settings.
|
||||
|
||||
These are internal-only columns not exposed via the AppSettings model.
|
||||
"""
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT vapid_private_key, vapid_public_key FROM app_settings WHERE id = 1"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if row and row["vapid_private_key"] and row["vapid_public_key"]:
|
||||
return row["vapid_private_key"], row["vapid_public_key"]
|
||||
return "", ""
|
||||
|
||||
@staticmethod
|
||||
async def set_vapid_keys(private_key: str, public_key: str) -> None:
|
||||
"""Persist auto-generated VAPID key pair to app_settings."""
|
||||
async with db.tx() as conn:
|
||||
await conn.execute(
|
||||
"UPDATE app_settings SET vapid_private_key = ?, vapid_public_key = ? WHERE id = 1",
|
||||
(private_key, public_key),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def get_push_conversations() -> list[str]:
|
||||
"""Return the global list of push-enabled conversation state keys.
|
||||
|
||||
Internal-only column, not exposed via the AppSettings model.
|
||||
"""
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT push_conversations FROM app_settings WHERE id = 1"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
if row and row["push_conversations"]:
|
||||
try:
|
||||
return json.loads(row["push_conversations"])
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return []
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
async def set_push_conversations(conversations: list[str]) -> list[str]:
|
||||
"""Replace the global push-enabled conversation list."""
|
||||
async with db.tx() as conn:
|
||||
await conn.execute(
|
||||
"UPDATE app_settings SET push_conversations = ? WHERE id = 1",
|
||||
(json.dumps(conversations),),
|
||||
)
|
||||
return conversations
|
||||
|
||||
@staticmethod
|
||||
async def toggle_push_conversation(key: str) -> list[str]:
|
||||
"""Add or remove a conversation state key from the global push list.
|
||||
|
||||
Atomic read-modify-write under a single ``db.tx()`` lock.
|
||||
"""
|
||||
async with db.tx() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT push_conversations FROM app_settings WHERE id = 1"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
current: list[str] = []
|
||||
if row and row["push_conversations"]:
|
||||
try:
|
||||
current = json.loads(row["push_conversations"])
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
current = []
|
||||
if key in current:
|
||||
current = [k for k in current if k != key]
|
||||
else:
|
||||
current.append(key)
|
||||
await conn.execute(
|
||||
"UPDATE app_settings SET push_conversations = ? WHERE id = 1",
|
||||
(json.dumps(current),),
|
||||
)
|
||||
return current
|
||||
|
||||
|
||||
class StatisticsRepository:
|
||||
@staticmethod
|
||||
async def get_database_message_totals() -> dict[str, int]:
|
||||
"""Return message totals needed by lightweight debug surfaces."""
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
SUM(CASE WHEN type = 'PRIV' THEN 1 ELSE 0 END) AS total_dms,
|
||||
SUM(CASE WHEN type = 'CHAN' THEN 1 ELSE 0 END) AS total_channel_messages,
|
||||
SUM(CASE WHEN outgoing = 1 THEN 1 ELSE 0 END) AS total_outgoing
|
||||
FROM messages
|
||||
"""
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
SUM(CASE WHEN type = 'PRIV' THEN 1 ELSE 0 END) AS total_dms,
|
||||
SUM(CASE WHEN type = 'CHAN' THEN 1 ELSE 0 END) AS total_channel_messages,
|
||||
SUM(CASE WHEN outgoing = 1 THEN 1 ELSE 0 END) AS total_outgoing
|
||||
FROM messages
|
||||
"""
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
return {
|
||||
"total_dms": row["total_dms"] or 0,
|
||||
@@ -222,18 +389,19 @@ class StatisticsRepository:
|
||||
"""Get time-windowed counts for contacts/repeaters heard."""
|
||||
now = int(time.time())
|
||||
op = "!=" if exclude else "="
|
||||
cursor = await db.conn.execute(
|
||||
f"""
|
||||
SELECT
|
||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_hour,
|
||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_week
|
||||
FROM contacts
|
||||
WHERE type {op} ? AND last_seen IS NOT NULL
|
||||
""",
|
||||
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D, contact_type),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
f"""
|
||||
SELECT
|
||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_hour,
|
||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_week
|
||||
FROM contacts
|
||||
WHERE type {op} ? AND last_seen IS NOT NULL
|
||||
""",
|
||||
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D, contact_type),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None # Aggregate query always returns a row
|
||||
return {
|
||||
"last_hour": row["last_hour"] or 0,
|
||||
@@ -249,24 +417,25 @@ class StatisticsRepository:
|
||||
the old UPPER(...) join and aggregate per known channel directly.
|
||||
"""
|
||||
now = int(time.time())
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
WITH known AS (
|
||||
SELECT conversation_key, MAX(received_at) AS last_received_at
|
||||
FROM messages
|
||||
WHERE type = 'CHAN'
|
||||
AND conversation_key IN (SELECT key FROM channels)
|
||||
GROUP BY conversation_key
|
||||
)
|
||||
SELECT
|
||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_hour,
|
||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_week
|
||||
FROM known
|
||||
""",
|
||||
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
WITH known AS (
|
||||
SELECT conversation_key, MAX(received_at) AS last_received_at
|
||||
FROM messages
|
||||
WHERE type = 'CHAN'
|
||||
AND conversation_key IN (SELECT key FROM channels)
|
||||
GROUP BY conversation_key
|
||||
)
|
||||
SELECT
|
||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_hour,
|
||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_week
|
||||
FROM known
|
||||
""",
|
||||
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D),
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
return {
|
||||
"last_hour": row["last_hour"] or 0,
|
||||
@@ -280,92 +449,105 @@ class StatisticsRepository:
|
||||
now = int(time.time())
|
||||
cutoff = now - SECONDS_72H
|
||||
# Bucket timestamps to the start of each hour
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT (timestamp / 3600) * 3600 AS hour_ts, COUNT(*) AS count
|
||||
FROM raw_packets
|
||||
WHERE timestamp >= ?
|
||||
GROUP BY hour_ts
|
||||
ORDER BY hour_ts
|
||||
""",
|
||||
(cutoff,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT (timestamp / 3600) * 3600 AS hour_ts, COUNT(*) AS count
|
||||
FROM raw_packets
|
||||
WHERE timestamp >= ?
|
||||
GROUP BY hour_ts
|
||||
ORDER BY hour_ts
|
||||
""",
|
||||
(cutoff,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return [{"timestamp": row["hour_ts"], "count": row["count"]} for row in rows]
|
||||
|
||||
@staticmethod
|
||||
async def _path_hash_width_24h() -> dict[str, int | float]:
|
||||
"""Count parsed raw packets from the last 24h by hop hash width."""
|
||||
now = int(time.time())
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT data FROM raw_packets WHERE timestamp >= ?",
|
||||
(now - SECONDS_24H,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
async with db.readonly() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT data FROM raw_packets WHERE timestamp >= ?",
|
||||
(now - SECONDS_24H,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return bucket_path_hash_widths(rows)
|
||||
|
||||
@staticmethod
|
||||
async def get_all() -> dict:
|
||||
"""Aggregate all statistics from existing tables."""
|
||||
"""Aggregate all statistics from existing tables.
|
||||
|
||||
Each helper acquires its own lock; there's no requirement that the
|
||||
whole snapshot be atomic. If we ever wanted a consistent snapshot
|
||||
we'd batch all queries into a single ``db.readonly()`` and use
|
||||
``_in_conn`` helpers, but statistics are intentionally approximate.
|
||||
"""
|
||||
now = int(time.time())
|
||||
|
||||
# Top 5 busiest channels in last 24h
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||
COUNT(*) AS message_count
|
||||
FROM messages m
|
||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.received_at >= ?
|
||||
GROUP BY m.conversation_key
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 5
|
||||
""",
|
||||
(now - SECONDS_24H,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
busiest_channels_24h = [
|
||||
{
|
||||
"channel_key": row["conversation_key"],
|
||||
"channel_name": row["channel_name"],
|
||||
"message_count": row["message_count"],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
async with db.readonly() as conn:
|
||||
# Top 5 busiest channels in last 24h
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||
COUNT(*) AS message_count
|
||||
FROM messages m
|
||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||
WHERE m.type = 'CHAN' AND m.received_at >= ?
|
||||
GROUP BY m.conversation_key
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 5
|
||||
""",
|
||||
(now - SECONDS_24H,),
|
||||
) as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
busiest_channels_24h = [
|
||||
{
|
||||
"channel_key": row["conversation_key"],
|
||||
"channel_name": row["channel_name"],
|
||||
"message_count": row["message_count"],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
# Entity counts
|
||||
cursor = await db.conn.execute("SELECT COUNT(*) AS cnt FROM contacts WHERE type != 2")
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
contact_count: int = row["cnt"]
|
||||
# Entity counts
|
||||
async with conn.execute(
|
||||
"SELECT COUNT(*) AS cnt FROM contacts WHERE type != 2"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
contact_count: int = row["cnt"]
|
||||
|
||||
cursor = await db.conn.execute("SELECT COUNT(*) AS cnt FROM contacts WHERE type = 2")
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
repeater_count: int = row["cnt"]
|
||||
async with conn.execute(
|
||||
"SELECT COUNT(*) AS cnt FROM contacts WHERE type = 2"
|
||||
) as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
repeater_count: int = row["cnt"]
|
||||
|
||||
cursor = await db.conn.execute("SELECT COUNT(*) AS cnt FROM channels")
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
channel_count: int = row["cnt"]
|
||||
async with conn.execute("SELECT COUNT(*) AS cnt FROM channels") as cursor:
|
||||
row = await cursor.fetchone()
|
||||
assert row is not None
|
||||
channel_count: int = row["cnt"]
|
||||
|
||||
# Packet split
|
||||
cursor = await db.conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS total,
|
||||
SUM(CASE WHEN message_id IS NOT NULL THEN 1 ELSE 0 END) AS decrypted
|
||||
FROM raw_packets
|
||||
"""
|
||||
)
|
||||
pkt_row = await cursor.fetchone()
|
||||
assert pkt_row is not None
|
||||
total_packets = pkt_row["total"] or 0
|
||||
decrypted_packets = pkt_row["decrypted"] or 0
|
||||
undecrypted_packets = total_packets - decrypted_packets
|
||||
# Packet split
|
||||
async with conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS total,
|
||||
SUM(CASE WHEN message_id IS NOT NULL THEN 1 ELSE 0 END) AS decrypted
|
||||
FROM raw_packets
|
||||
"""
|
||||
) as cursor:
|
||||
pkt_row = await cursor.fetchone()
|
||||
assert pkt_row is not None
|
||||
total_packets = pkt_row["total"] or 0
|
||||
decrypted_packets = pkt_row["decrypted"] or 0
|
||||
undecrypted_packets = total_packets - decrypted_packets
|
||||
|
||||
# These each acquire their own lock. The snapshot isn't atomic across
|
||||
# them — fine for stats, which are approximate by nature.
|
||||
message_totals = await StatisticsRepository.get_database_message_totals()
|
||||
|
||||
# Activity windows
|
||||
contacts_heard = await StatisticsRepository._activity_counts(contact_type=2, exclude=True)
|
||||
repeaters_heard = await StatisticsRepository._activity_counts(contact_type=2)
|
||||
known_channels_active = await StatisticsRepository._known_channels_active()
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
"""Web Push subscription management endpoints."""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
from pywebpush import WebPushException
|
||||
|
||||
from app.push.send import send_push
|
||||
from app.push.vapid import get_vapid_private_key, get_vapid_public_key
|
||||
from app.repository.push_subscriptions import PushSubscriptionRepository
|
||||
from app.repository.settings import AppSettingsRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/push", tags=["push"])
|
||||
|
||||
|
||||
# ── Request/response models ─────────────────────────────────────────────
|
||||
|
||||
|
||||
class VapidPublicKeyResponse(BaseModel):
|
||||
public_key: str
|
||||
|
||||
|
||||
class PushSubscribeRequest(BaseModel):
|
||||
endpoint: str = Field(min_length=1)
|
||||
p256dh: str = Field(min_length=1)
|
||||
auth: str = Field(min_length=1)
|
||||
label: str = ""
|
||||
|
||||
|
||||
class PushSubscriptionUpdate(BaseModel):
|
||||
label: str | None = None
|
||||
|
||||
|
||||
class PushConversationToggle(BaseModel):
|
||||
key: str = Field(min_length=1)
|
||||
|
||||
|
||||
# ─��� Endpoints ────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@router.get("/vapid-public-key", response_model=VapidPublicKeyResponse)
|
||||
async def vapid_public_key() -> VapidPublicKeyResponse:
|
||||
"""Return the VAPID public key for browser PushManager.subscribe()."""
|
||||
key = get_vapid_public_key()
|
||||
if not key:
|
||||
raise HTTPException(status_code=503, detail="VAPID keys not initialized")
|
||||
return VapidPublicKeyResponse(public_key=key)
|
||||
|
||||
|
||||
@router.post("/subscribe")
|
||||
async def subscribe(body: PushSubscribeRequest) -> dict:
|
||||
"""Register or update a push subscription (device). Upserts by endpoint."""
|
||||
sub = await PushSubscriptionRepository.create(
|
||||
endpoint=body.endpoint,
|
||||
p256dh=body.p256dh,
|
||||
auth=body.auth,
|
||||
label=body.label,
|
||||
)
|
||||
return sub
|
||||
|
||||
|
||||
@router.get("/subscriptions")
|
||||
async def list_subscriptions() -> list[dict]:
|
||||
"""List all push subscriptions (devices)."""
|
||||
return await PushSubscriptionRepository.get_all()
|
||||
|
||||
|
||||
@router.patch("/subscriptions/{subscription_id}")
|
||||
async def update_subscription(subscription_id: str, body: PushSubscriptionUpdate) -> dict:
|
||||
"""Update a subscription's label."""
|
||||
existing = await PushSubscriptionRepository.get(subscription_id)
|
||||
if not existing:
|
||||
raise HTTPException(status_code=404, detail="Subscription not found")
|
||||
|
||||
updates = {}
|
||||
if body.label is not None:
|
||||
updates["label"] = body.label
|
||||
|
||||
result = await PushSubscriptionRepository.update(subscription_id, **updates)
|
||||
return result or existing
|
||||
|
||||
|
||||
@router.delete("/subscriptions/{subscription_id}")
|
||||
async def unsubscribe(subscription_id: str) -> dict:
|
||||
"""Delete a push subscription (device)."""
|
||||
deleted = await PushSubscriptionRepository.delete(subscription_id)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Subscription not found")
|
||||
return {"deleted": True}
|
||||
|
||||
|
||||
@router.post("/subscriptions/{subscription_id}/test")
|
||||
async def test_push(subscription_id: str) -> dict:
|
||||
"""Send a test notification to a subscription."""
|
||||
sub = await PushSubscriptionRepository.get(subscription_id)
|
||||
if not sub:
|
||||
raise HTTPException(status_code=404, detail="Subscription not found")
|
||||
|
||||
vapid_key = get_vapid_private_key()
|
||||
if not vapid_key:
|
||||
raise HTTPException(status_code=503, detail="VAPID keys not initialized")
|
||||
|
||||
payload = json.dumps(
|
||||
{
|
||||
"title": "RemoteTerm Test",
|
||||
"body": "Push notifications are working!",
|
||||
"tag": "meshcore-test",
|
||||
"url_hash": "",
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(15):
|
||||
await send_push(
|
||||
subscription_info={
|
||||
"endpoint": sub["endpoint"],
|
||||
"keys": {"p256dh": sub["p256dh"], "auth": sub["auth"]},
|
||||
},
|
||||
payload=payload,
|
||||
vapid_private_key=vapid_key,
|
||||
vapid_claims={"sub": "mailto:noreply@meshcore.local"},
|
||||
)
|
||||
return {"status": "sent"}
|
||||
except TimeoutError:
|
||||
raise HTTPException(status_code=504, detail="Push delivery timed out") from None
|
||||
except WebPushException as e:
|
||||
status_code = getattr(getattr(e, "response", None), "status_code", 0)
|
||||
if status_code in (403, 404, 410):
|
||||
logger.info(
|
||||
"Test push: subscription stale (HTTP %d), removing %s",
|
||||
status_code,
|
||||
subscription_id,
|
||||
)
|
||||
await PushSubscriptionRepository.delete(subscription_id)
|
||||
raise HTTPException(
|
||||
status_code=410,
|
||||
detail="Subscription is stale (VAPID key mismatch or expired). "
|
||||
"Re-enable push from a conversation header.",
|
||||
) from None
|
||||
logger.warning("Test push failed: %s", e)
|
||||
raise HTTPException(status_code=502, detail=f"Push delivery failed: {e}") from None
|
||||
except Exception as e:
|
||||
logger.warning("Test push failed: %s", e)
|
||||
raise HTTPException(status_code=502, detail=f"Push delivery failed: {e}") from None
|
||||
|
||||
|
||||
# ── Global push conversation management ──────────────────────────────────
|
||||
|
||||
|
||||
@router.get("/conversations")
|
||||
async def get_push_conversations() -> list[str]:
|
||||
"""Return the global list of push-enabled conversation state keys."""
|
||||
return await AppSettingsRepository.get_push_conversations()
|
||||
|
||||
|
||||
@router.post("/conversations/toggle")
|
||||
async def toggle_push_conversation(body: PushConversationToggle) -> list[str]:
|
||||
"""Add or remove a conversation from the global push list."""
|
||||
return await AppSettingsRepository.toggle_push_conversation(body.key)
|
||||
@@ -94,6 +94,7 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
||||
contact = await _resolve_contact_or_404(public_key)
|
||||
_require_repeater(contact)
|
||||
|
||||
lpp_raw = None
|
||||
async with radio_manager.radio_operation(
|
||||
"repeater_status", pause_polling=True, suspend_auto_fetch=True
|
||||
) as mc:
|
||||
@@ -102,6 +103,15 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
||||
|
||||
status = await mc.commands.req_status_sync(contact.public_key, timeout=10, min_timeout=5)
|
||||
|
||||
# Best-effort LPP sensor fetch while we still hold the lock
|
||||
if status is not None:
|
||||
try:
|
||||
lpp_raw = await mc.commands.req_telemetry_sync(
|
||||
contact.public_key, timeout=10, min_timeout=5
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("LPP sensor fetch failed for %s (non-fatal): %s", public_key[:12], e)
|
||||
|
||||
if status is None:
|
||||
raise HTTPException(status_code=504, detail="No status response from repeater")
|
||||
|
||||
@@ -128,6 +138,24 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
||||
# Record to telemetry history as a JSON blob (best-effort)
|
||||
now = int(time.time())
|
||||
status_dict = response.model_dump(exclude={"telemetry_history"})
|
||||
|
||||
# Attach scalar LPP sensors to the stored snapshot (same logic as auto-collect)
|
||||
if lpp_raw:
|
||||
lpp_sensors = []
|
||||
for entry in lpp_raw:
|
||||
value = entry.get("value", 0)
|
||||
if isinstance(value, dict):
|
||||
continue
|
||||
lpp_sensors.append(
|
||||
{
|
||||
"channel": entry.get("channel", 0),
|
||||
"type_name": str(entry.get("type", "unknown")),
|
||||
"value": value,
|
||||
}
|
||||
)
|
||||
if lpp_sensors:
|
||||
status_dict["lpp_sensors"] = lpp_sensors
|
||||
|
||||
try:
|
||||
await RepeaterTelemetryRepository.record(
|
||||
public_key=contact.public_key,
|
||||
|
||||
@@ -8,6 +8,13 @@ from pydantic import BaseModel, Field
|
||||
from app.models import CONTACT_TYPE_REPEATER, AppSettings
|
||||
from app.region_scope import normalize_region_scope
|
||||
from app.repository import AppSettingsRepository, ChannelRepository, ContactRepository
|
||||
from app.telemetry_interval import (
|
||||
DEFAULT_TELEMETRY_INTERVAL_HOURS,
|
||||
TELEMETRY_INTERVAL_OPTIONS_HOURS,
|
||||
clamp_telemetry_interval,
|
||||
legal_interval_options,
|
||||
next_run_timestamp_utc,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/settings", tags=["settings"])
|
||||
@@ -57,6 +64,15 @@ class AppSettingsUpdate(BaseModel):
|
||||
default=None,
|
||||
description="Auto-resend channel messages once if no echo heard within 2 seconds",
|
||||
)
|
||||
telemetry_interval_hours: int | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Preferred tracked-repeater telemetry interval in hours. "
|
||||
f"Must be one of {list(TELEMETRY_INTERVAL_OPTIONS_HOURS)}. "
|
||||
"Effective interval is clamped up to the shortest legal value "
|
||||
"based on the current tracked-repeater count."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class BlockKeyRequest(BaseModel):
|
||||
@@ -82,6 +98,29 @@ class TrackedTelemetryRequest(BaseModel):
|
||||
public_key: str = Field(description="Public key of the repeater to toggle tracking")
|
||||
|
||||
|
||||
class TelemetrySchedule(BaseModel):
|
||||
"""Surface of telemetry scheduling derivations for the UI.
|
||||
|
||||
``preferred_hours`` is the stored user choice. ``effective_hours`` is the
|
||||
value the scheduler actually uses (preferred, clamped up to the shortest
|
||||
legal interval given the current tracked-repeater count). ``options``
|
||||
lists the subset of the menu that is legal at the current count; the UI
|
||||
should hide anything not in this list. ``next_run_at`` is the Unix
|
||||
timestamp (seconds, UTC) of the next scheduled cycle, or ``None`` when
|
||||
no repeaters are tracked (nothing to schedule).
|
||||
"""
|
||||
|
||||
preferred_hours: int = Field(description="User's saved telemetry interval preference")
|
||||
effective_hours: int = Field(description="Scheduler's clamped interval")
|
||||
options: list[int] = Field(description="Legal interval choices at the current count")
|
||||
tracked_count: int = Field(description="Number of repeaters currently tracked")
|
||||
max_tracked: int = Field(description="Maximum number of repeaters that can be tracked")
|
||||
next_run_at: int | None = Field(
|
||||
default=None,
|
||||
description="Unix timestamp (UTC seconds) of the next scheduled cycle",
|
||||
)
|
||||
|
||||
|
||||
class TrackedTelemetryResponse(BaseModel):
|
||||
tracked_telemetry_repeaters: list[str] = Field(
|
||||
description="Current list of tracked repeater public keys"
|
||||
@@ -89,6 +128,24 @@ class TrackedTelemetryResponse(BaseModel):
|
||||
names: dict[str, str] = Field(
|
||||
description="Map of public key to display name for tracked repeaters"
|
||||
)
|
||||
schedule: TelemetrySchedule = Field(description="Current scheduling state")
|
||||
|
||||
|
||||
def _build_schedule(tracked_count: int, preferred_hours: int | None) -> TelemetrySchedule:
|
||||
pref = (
|
||||
preferred_hours
|
||||
if preferred_hours in TELEMETRY_INTERVAL_OPTIONS_HOURS
|
||||
else DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
)
|
||||
effective = clamp_telemetry_interval(pref, tracked_count)
|
||||
return TelemetrySchedule(
|
||||
preferred_hours=pref,
|
||||
effective_hours=effective,
|
||||
options=legal_interval_options(tracked_count),
|
||||
tracked_count=tracked_count,
|
||||
max_tracked=MAX_TRACKED_TELEMETRY_REPEATERS,
|
||||
next_run_at=next_run_timestamp_utc(effective) if tracked_count > 0 else None,
|
||||
)
|
||||
|
||||
|
||||
@router.get("", response_model=AppSettings)
|
||||
@@ -136,6 +193,20 @@ async def update_settings(update: AppSettingsUpdate) -> AppSettings:
|
||||
if update.auto_resend_channel is not None:
|
||||
kwargs["auto_resend_channel"] = update.auto_resend_channel
|
||||
|
||||
# Telemetry interval preference. Invalid values fall back to default
|
||||
# rather than 400-ing so a stale client can't brick settings saves.
|
||||
if update.telemetry_interval_hours is not None:
|
||||
raw_interval = update.telemetry_interval_hours
|
||||
if raw_interval not in TELEMETRY_INTERVAL_OPTIONS_HOURS:
|
||||
logger.warning(
|
||||
"telemetry_interval_hours=%r is not in the menu; defaulting to %d",
|
||||
raw_interval,
|
||||
DEFAULT_TELEMETRY_INTERVAL_HOURS,
|
||||
)
|
||||
raw_interval = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
logger.info("Updating telemetry_interval_hours to %d", raw_interval)
|
||||
kwargs["telemetry_interval_hours"] = raw_interval
|
||||
|
||||
# Flood scope
|
||||
flood_scope_changed = False
|
||||
if update.flood_scope is not None:
|
||||
@@ -229,6 +300,7 @@ async def toggle_tracked_telemetry(request: TrackedTelemetryRequest) -> TrackedT
|
||||
return TrackedTelemetryResponse(
|
||||
tracked_telemetry_repeaters=new_list,
|
||||
names=await _resolve_names(new_list),
|
||||
schedule=_build_schedule(len(new_list), settings.telemetry_interval_hours),
|
||||
)
|
||||
|
||||
# Validate it's a repeater
|
||||
@@ -255,4 +327,20 @@ async def toggle_tracked_telemetry(request: TrackedTelemetryRequest) -> TrackedT
|
||||
return TrackedTelemetryResponse(
|
||||
tracked_telemetry_repeaters=new_list,
|
||||
names=await _resolve_names(new_list),
|
||||
schedule=_build_schedule(len(new_list), settings.telemetry_interval_hours),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tracked-telemetry/schedule", response_model=TelemetrySchedule)
|
||||
async def get_telemetry_schedule() -> TelemetrySchedule:
|
||||
"""Return the current telemetry scheduling derivation.
|
||||
|
||||
The UI uses this to render the interval dropdown (legal options),
|
||||
surface saved-vs-effective when they differ, and show the next-run-at
|
||||
timestamp so users know when the next cycle will fire.
|
||||
"""
|
||||
app_settings = await AppSettingsRepository.get()
|
||||
return _build_schedule(
|
||||
len(app_settings.tracked_telemetry_repeaters),
|
||||
app_settings.telemetry_interval_hours,
|
||||
)
|
||||
|
||||
@@ -252,6 +252,11 @@ async def _store_direct_message(
|
||||
|
||||
if update_last_contacted_key:
|
||||
await contact_repository.update_last_contacted(update_last_contacted_key, received_at)
|
||||
# Incoming DMs are direct RF evidence that this contact transmitted;
|
||||
# outgoing DMs are our own send and must not bump the contact's
|
||||
# last_seen.
|
||||
if not outgoing:
|
||||
await contact_repository.touch_last_seen(update_last_contacted_key, received_at)
|
||||
|
||||
return message
|
||||
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
"""Shared math for the tracked-repeater telemetry scheduler.
|
||||
|
||||
The app enforces a ceiling of 24 repeater status checks per 24 hours across
|
||||
all tracked repeaters. With N repeaters tracked, the shortest legal interval
|
||||
is ``24 // floor(24 / N)`` hours. Longer intervals (``12`` or ``24``) are
|
||||
always legal at any N and are offered as user choices on top of the derived
|
||||
shortest-legal value.
|
||||
|
||||
The user picks an interval via settings. The scheduler uses
|
||||
``clamp_telemetry_interval`` to push that pick up to the shortest legal
|
||||
interval if the user has added repeaters that invalidated their choice.
|
||||
The stored preference is *not* mutated on clamp — users get their pick back
|
||||
if they later drop repeaters.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
# Daily check budget: total number of repeater status checks we allow
|
||||
# across all tracked repeaters per 24-hour window.
|
||||
DAILY_CHECK_CEILING = 24
|
||||
|
||||
# Menu of interval values shown to users. The derivation-based options
|
||||
# (1..8) are filtered per current repeater count via
|
||||
# ``legal_interval_options``; 12 and 24 are always legal.
|
||||
TELEMETRY_INTERVAL_OPTIONS_HOURS: tuple[int, ...] = (1, 2, 3, 4, 6, 8, 12, 24)
|
||||
|
||||
DEFAULT_TELEMETRY_INTERVAL_HOURS = 8
|
||||
|
||||
|
||||
def shortest_legal_interval_hours(n_tracked: int) -> int:
|
||||
"""Return the shortest interval (hours) that keeps under the daily ceiling.
|
||||
|
||||
With ``N`` repeaters, each full cycle costs ``N`` checks. We're capped at
|
||||
``DAILY_CHECK_CEILING`` checks/day, so the maximum cycles/day is
|
||||
``floor(24 / N)`` and the resulting interval is ``24 // cycles_per_day``.
|
||||
For ``N == 0`` we return the default so the math still terminates, though
|
||||
the scheduler skips empty-tracked cycles regardless.
|
||||
"""
|
||||
if n_tracked <= 0:
|
||||
return DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
cycles_per_day = DAILY_CHECK_CEILING // n_tracked
|
||||
if cycles_per_day <= 0:
|
||||
# Would exceed ceiling even at 24h cadence; fall back to 24h.
|
||||
return 24
|
||||
return 24 // cycles_per_day
|
||||
|
||||
|
||||
def clamp_telemetry_interval(preferred_hours: int, n_tracked: int) -> int:
|
||||
"""Return the effective interval: max of user preference and shortest legal.
|
||||
|
||||
Unrecognized values fall back to the default.
|
||||
"""
|
||||
if preferred_hours not in TELEMETRY_INTERVAL_OPTIONS_HOURS:
|
||||
preferred_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
shortest = shortest_legal_interval_hours(n_tracked)
|
||||
return max(preferred_hours, shortest)
|
||||
|
||||
|
||||
def legal_interval_options(n_tracked: int) -> list[int]:
|
||||
"""Return the subset of the interval menu that is legal for a given N."""
|
||||
shortest = shortest_legal_interval_hours(n_tracked)
|
||||
return [h for h in TELEMETRY_INTERVAL_OPTIONS_HOURS if h >= shortest]
|
||||
|
||||
|
||||
def next_run_timestamp_utc(effective_hours: int, now: datetime | None = None) -> int:
|
||||
"""Return Unix timestamp for the next UTC top-of-hour where
|
||||
``hour % effective_hours == 0``.
|
||||
|
||||
Returns the next matching hour strictly in the future (never ``now``
|
||||
itself, even if ``now`` lies exactly on a matching boundary).
|
||||
"""
|
||||
if effective_hours <= 0:
|
||||
effective_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
if now is None:
|
||||
now = datetime.now(UTC)
|
||||
else:
|
||||
now = now.astimezone(UTC)
|
||||
|
||||
# Round up to the next top-of-hour, then skip forward until the modulo matches.
|
||||
candidate = now.replace(minute=0, second=0, microsecond=0)
|
||||
# Always move at least one hour forward so "now" never matches.
|
||||
candidate = candidate.replace(hour=candidate.hour)
|
||||
from datetime import timedelta
|
||||
|
||||
candidate = candidate + timedelta(hours=1)
|
||||
while candidate.hour % effective_hours != 0:
|
||||
candidate = candidate + timedelta(hours=1)
|
||||
return int(candidate.timestamp())
|
||||
@@ -108,6 +108,10 @@ def broadcast_event(event_type: str, data: dict, *, realtime: bool = True) -> No
|
||||
|
||||
if event_type == "message":
|
||||
asyncio.create_task(fanout_manager.broadcast_message(data))
|
||||
|
||||
from app.push.manager import push_manager
|
||||
|
||||
asyncio.create_task(push_manager.dispatch_message(data))
|
||||
elif event_type == "raw_packet":
|
||||
asyncio.create_task(fanout_manager.broadcast_raw(data))
|
||||
elif event_type == "contact":
|
||||
|
||||
@@ -57,6 +57,7 @@ frontend/src/
|
||||
│ ├── useConversationRouter.ts # URL hash → active conversation routing
|
||||
│ ├── useContactsAndChannels.ts # Contact/channel loading, creation, deletion
|
||||
│ ├── useBrowserNotifications.ts # Per-conversation browser notification preferences + dispatch
|
||||
│ ├── usePushSubscription.ts # Web Push subscription lifecycle, per-conversation filters
|
||||
│ ├── useFaviconBadge.ts # Browser tab unread badge state
|
||||
│ ├── useRawPacketStatsSession.ts # Session-scoped packet-feed stats history
|
||||
│ └── useRememberedServerPassword.ts # Browser-local repeater/room password persistence
|
||||
@@ -429,6 +430,17 @@ The `SearchView` component (`components/SearchView.tsx`) provides full-text sear
|
||||
- **Bidirectional pagination**: After jumping mid-history, `hasNewerMessages` enables forward pagination via `fetchNewerMessages`. The scroll-to-bottom button calls `jumpToBottom` (re-fetches latest page) instead of just scrolling.
|
||||
- **WS message suppression**: When `hasNewerMessages` is true, incoming WS messages for the active conversation are not added to the message list (the user is viewing historical context, not the latest page).
|
||||
|
||||
## Web Push Notifications
|
||||
|
||||
Web Push allows notifications even when the browser tab is closed. Requires HTTPS (self-signed OK).
|
||||
|
||||
- **Service worker**: `frontend/public/sw.js` handles `push` events (show notification) and `notificationclick` (focus/open tab, navigate via `url_hash`). Registered in `main.tsx` on secure contexts only.
|
||||
- **`usePushSubscription` hook**: manages the full subscription lifecycle — subscribe (register SW → `PushManager.subscribe()` → POST to backend), unsubscribe, global push-conversation toggles, device listing, and deletion.
|
||||
- **ChatHeader integration**: `BellRing` icon (amber when active) appears next to the existing desktop notification `Bell` on secure contexts. First click subscribes the browser and enables push for that conversation; subsequent clicks toggle the conversation on/off.
|
||||
- **Settings > Local**: `PushDeviceManagement` component shows subscription status, lists all registered devices with test/delete buttons. Uses `usePushSubscription` hook directly.
|
||||
- Auto-generates device labels from User-Agent (e.g., "Chrome on macOS").
|
||||
- `PushSubscriptionInfo` type in `types.ts`; API methods in `api.ts`.
|
||||
|
||||
## Styling
|
||||
|
||||
UI styling is mostly utility-class driven (Tailwind-style classes in JSX) plus shared globals in `index.css` and `styles.css`.
|
||||
|
||||
+4
-1
@@ -13,8 +13,11 @@
|
||||
<link rel="icon" type="image/png" href="./favicon-96x96.png" sizes="96x96" />
|
||||
<link rel="shortcut icon" href="./favicon.ico" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="./apple-touch-icon.png" />
|
||||
<link rel="manifest" href="./site.webmanifest" />
|
||||
<link rel="manifest" href="./site.webmanifest" crossorigin="use-credentials" />
|
||||
<script>
|
||||
// Service worker registration moved to main.tsx (requires isSecureContext
|
||||
// for Web Push). Do not duplicate here.
|
||||
|
||||
// Start critical data fetches before React/Vite JS loads.
|
||||
// Must be in <head> BEFORE the module script so the browser queues these
|
||||
// fetches before it discovers and starts downloading the JS bundle.
|
||||
|
||||
Generated
+2
-2
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"version": "3.8.0",
|
||||
"version": "3.11.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"version": "3.8.0",
|
||||
"version": "3.11.3",
|
||||
"dependencies": {
|
||||
"@codemirror/lang-python": "^6.2.1",
|
||||
"@codemirror/theme-one-dark": "^6.1.3",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"private": true,
|
||||
"version": "3.11.0",
|
||||
"version": "3.11.3",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 122 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 426 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 109 KiB |
@@ -0,0 +1,60 @@
|
||||
/* Service worker for PWA installability and Web Push notifications. */
|
||||
|
||||
self.addEventListener("install", () => {
|
||||
self.skipWaiting();
|
||||
});
|
||||
|
||||
self.addEventListener("activate", (event) => {
|
||||
event.waitUntil(self.clients.claim());
|
||||
});
|
||||
|
||||
// No-op fetch handler — required for PWA installability criteria.
|
||||
// We don't cache anything; the app always fetches from the network.
|
||||
self.addEventListener("fetch", () => {});
|
||||
|
||||
self.addEventListener("push", (event) => {
|
||||
let data = {};
|
||||
try {
|
||||
data = event.data ? event.data.json() : {};
|
||||
} catch {
|
||||
data = { title: "New message", body: event.data?.text() || "" };
|
||||
}
|
||||
|
||||
const title = data.title || "New message";
|
||||
const options = {
|
||||
body: data.body || "",
|
||||
icon: "./favicon-256x256.png",
|
||||
badge: "./favicon-96x96.png",
|
||||
tag: data.tag || "meshcore-push",
|
||||
data: { url_hash: data.url_hash || "" },
|
||||
};
|
||||
|
||||
event.waitUntil(self.registration.showNotification(title, options));
|
||||
});
|
||||
|
||||
self.addEventListener("notificationclick", (event) => {
|
||||
event.notification.close();
|
||||
const urlHash = event.notification.data?.url_hash || "";
|
||||
// Use the SW registration scope as the base URL so subpath deployments
|
||||
// (e.g. archworks.co/meshcore/) navigate correctly.
|
||||
const base = self.registration.scope;
|
||||
|
||||
event.waitUntil(
|
||||
clients
|
||||
.matchAll({ type: "window", includeUncontrolled: true })
|
||||
.then((windowClients) => {
|
||||
// Focus an existing tab if one is open
|
||||
for (const client of windowClients) {
|
||||
if (client.url.startsWith(base)) {
|
||||
client.focus();
|
||||
if (urlHash) {
|
||||
client.navigate(base + urlHash);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Otherwise open a new tab
|
||||
return clients.openWindow(base + (urlHash || ""));
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -22,6 +22,7 @@ import { toast } from './components/ui/sonner';
|
||||
import { AppShell } from './components/AppShell';
|
||||
import type { MessageInputHandle } from './components/MessageInput';
|
||||
import { DistanceUnitProvider } from './contexts/DistanceUnitContext';
|
||||
import { usePush } from './contexts/PushSubscriptionContext';
|
||||
import { messageContainsMention } from './utils/messageParser';
|
||||
import { getStateKey } from './utils/conversationState';
|
||||
import type { BulkCreateHashtagChannelsResult, Conversation, Message, RawPacket } from './types';
|
||||
@@ -99,6 +100,7 @@ export function App() {
|
||||
toggleConversationNotifications,
|
||||
notifyIncomingMessage,
|
||||
} = useBrowserNotifications();
|
||||
const pushSubscription = usePush();
|
||||
const { rawPacketStatsSession, recordRawPacketObservation } = useRawPacketStatsSession();
|
||||
const {
|
||||
showNewMessage,
|
||||
@@ -588,6 +590,7 @@ export function App() {
|
||||
onDeleteChannel: handleDeleteChannel,
|
||||
onSetChannelFloodScopeOverride: handleSetChannelFloodScopeOverride,
|
||||
onSetChannelPathHashModeOverride: handleSetChannelPathHashModeOverride,
|
||||
onSelectConversation: handleSelectConversationWithTargetReset,
|
||||
onOpenContactInfo: handleOpenContactInfo,
|
||||
onOpenChannelInfo: handleOpenChannelInfo,
|
||||
onSenderClick: handleSenderClick,
|
||||
@@ -614,6 +617,36 @@ export function App() {
|
||||
);
|
||||
}
|
||||
},
|
||||
pushSupported: pushSubscription.isSupported,
|
||||
pushSubscribed: pushSubscription.isSubscribed,
|
||||
pushEnabledForConversation:
|
||||
activeConversation?.type === 'contact' || activeConversation?.type === 'channel'
|
||||
? pushSubscription.isConversationPushEnabled(
|
||||
getStateKey(activeConversation.type, activeConversation.id)
|
||||
)
|
||||
: false,
|
||||
onTogglePush: async () => {
|
||||
if (
|
||||
!activeConversation ||
|
||||
(activeConversation.type !== 'contact' && activeConversation.type !== 'channel')
|
||||
)
|
||||
return;
|
||||
const key = getStateKey(activeConversation.type, activeConversation.id);
|
||||
const pushEnabled = pushSubscription.isConversationPushEnabled(key);
|
||||
|
||||
if (!pushEnabled && !pushSubscription.isSubscribed) {
|
||||
const subscriptionId = await pushSubscription.subscribe();
|
||||
if (!subscriptionId) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
await pushSubscription.toggleConversation(key);
|
||||
},
|
||||
onOpenPushSettings: () => {
|
||||
setSettingsSection('local');
|
||||
if (!showSettings) handleToggleSettingsView();
|
||||
},
|
||||
trackedTelemetryRepeaters: appSettings?.tracked_telemetry_repeaters ?? [],
|
||||
onToggleTrackedTelemetry: handleToggleTrackedTelemetry,
|
||||
repeaterAutoLoginKey,
|
||||
@@ -647,6 +680,7 @@ export function App() {
|
||||
onToggleBlockedKey: handleBlockKey,
|
||||
onToggleBlockedName: handleBlockName,
|
||||
contacts,
|
||||
channels,
|
||||
onBulkDeleteContacts: (deletedKeys: string[]) => {
|
||||
const keySet = new Set(deletedKeys.map((k) => k.toLowerCase()));
|
||||
setContacts((prev) => prev.filter((c) => !keySet.has(c.public_key.toLowerCase())));
|
||||
|
||||
@@ -22,6 +22,7 @@ import type {
|
||||
RadioTraceResponse,
|
||||
RadioDiscoveryTarget,
|
||||
PathDiscoveryResponse,
|
||||
PushSubscriptionInfo,
|
||||
ResendChannelMessageResponse,
|
||||
RepeaterAclResponse,
|
||||
RepeaterAdvertIntervalsResponse,
|
||||
@@ -33,6 +34,7 @@ import type {
|
||||
RepeaterRadioSettingsResponse,
|
||||
RepeaterStatusResponse,
|
||||
TelemetryHistoryEntry,
|
||||
TelemetrySchedule,
|
||||
TrackedTelemetryResponse,
|
||||
StatisticsResponse,
|
||||
TraceResponse,
|
||||
@@ -332,6 +334,8 @@ export const api = {
|
||||
body: JSON.stringify({ public_key: publicKey }),
|
||||
}),
|
||||
|
||||
getTelemetrySchedule: () => fetchJson<TelemetrySchedule>('/settings/tracked-telemetry/schedule'),
|
||||
|
||||
// Favorites
|
||||
toggleFavorite: (type: 'channel' | 'contact', id: string) =>
|
||||
fetchJson<{ type: string; id: string; favorite: boolean }>('/settings/favorites/toggle', {
|
||||
@@ -438,4 +442,28 @@ export const api = {
|
||||
fetchJson<RepeaterLppTelemetryResponse>(`/contacts/${publicKey}/room/lpp-telemetry`, {
|
||||
method: 'POST',
|
||||
}),
|
||||
|
||||
// Push Notifications
|
||||
getVapidPublicKey: () => fetchJson<{ public_key: string }>('/push/vapid-public-key'),
|
||||
pushSubscribe: (subscription: {
|
||||
endpoint: string;
|
||||
p256dh: string;
|
||||
auth: string;
|
||||
label?: string;
|
||||
}) =>
|
||||
fetchJson<PushSubscriptionInfo>('/push/subscribe', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(subscription),
|
||||
}),
|
||||
getPushSubscriptions: () => fetchJson<PushSubscriptionInfo[]>('/push/subscriptions'),
|
||||
deletePushSubscription: (id: string) =>
|
||||
fetchJson<{ deleted: boolean }>(`/push/subscriptions/${id}`, { method: 'DELETE' }),
|
||||
testPushSubscription: (id: string) =>
|
||||
fetchJson<{ status: string }>(`/push/subscriptions/${id}/test`, { method: 'POST' }),
|
||||
getPushConversations: () => fetchJson<string[]>('/push/conversations'),
|
||||
togglePushConversation: (key: string) =>
|
||||
fetchJson<string[]>('/push/conversations/toggle', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ key }),
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { Bell, ChevronsLeftRight, Globe2, Info, Route, Star, Trash2 } from 'lucide-react';
|
||||
import { toast } from './ui/sonner';
|
||||
import { DirectTraceIcon } from './DirectTraceIcon';
|
||||
@@ -26,6 +26,11 @@ interface ChatHeaderProps {
|
||||
onTrace: () => void;
|
||||
onPathDiscovery: (publicKey: string) => Promise<PathDiscoveryResponse>;
|
||||
onToggleNotifications: () => void;
|
||||
pushSupported?: boolean;
|
||||
pushSubscribed?: boolean;
|
||||
pushEnabledForConversation?: boolean;
|
||||
onTogglePush?: () => void;
|
||||
onOpenPushSettings?: () => void;
|
||||
onToggleFavorite: (type: 'channel' | 'contact', id: string) => void;
|
||||
onSetChannelFloodScopeOverride?: (key: string, floodScopeOverride: string) => void;
|
||||
onSetChannelPathHashModeOverride?: (key: string, pathHashModeOverride: number | null) => void;
|
||||
@@ -46,6 +51,11 @@ export function ChatHeader({
|
||||
onTrace,
|
||||
onPathDiscovery,
|
||||
onToggleNotifications,
|
||||
pushSupported,
|
||||
pushSubscribed,
|
||||
pushEnabledForConversation,
|
||||
onTogglePush,
|
||||
onOpenPushSettings,
|
||||
onToggleFavorite,
|
||||
onSetChannelFloodScopeOverride,
|
||||
onSetChannelPathHashModeOverride,
|
||||
@@ -58,14 +68,29 @@ export function ChatHeader({
|
||||
const [pathDiscoveryOpen, setPathDiscoveryOpen] = useState(false);
|
||||
const [channelOverrideOpen, setChannelOverrideOpen] = useState(false);
|
||||
const [pathHashModeOverrideOpen, setPathHashModeOverrideOpen] = useState(false);
|
||||
const [notifDropdownOpen, setNotifDropdownOpen] = useState(false);
|
||||
const notifDropdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
setShowKey(false);
|
||||
setPathDiscoveryOpen(false);
|
||||
setChannelOverrideOpen(false);
|
||||
setPathHashModeOverrideOpen(false);
|
||||
setNotifDropdownOpen(false);
|
||||
}, [conversation.id]);
|
||||
|
||||
// Close notification dropdown on outside click
|
||||
useEffect(() => {
|
||||
if (!notifDropdownOpen) return;
|
||||
const handler = (e: MouseEvent) => {
|
||||
if (notifDropdownRef.current && !notifDropdownRef.current.contains(e.target as Node)) {
|
||||
setNotifDropdownOpen(false);
|
||||
}
|
||||
};
|
||||
document.addEventListener('mousedown', handler);
|
||||
return () => document.removeEventListener('mousedown', handler);
|
||||
}, [notifDropdownOpen]);
|
||||
|
||||
const activeChannel =
|
||||
conversation.type === 'channel'
|
||||
? channels.find((channel) => channel.key === conversation.id)
|
||||
@@ -288,34 +313,94 @@ export function ChatHeader({
|
||||
<DirectTraceIcon className="h-4 w-4 text-muted-foreground" />
|
||||
</button>
|
||||
)}
|
||||
{notificationsSupported && !activeContactIsRoomServer && (
|
||||
<button
|
||||
className="flex items-center gap-1 rounded px-1 py-1 hover:bg-accent text-lg leading-none transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
||||
onClick={onToggleNotifications}
|
||||
title={
|
||||
notificationsEnabled
|
||||
? 'Disable desktop notifications for this conversation'
|
||||
: notificationsPermission === 'denied'
|
||||
? 'Notifications blocked by the browser'
|
||||
: 'Enable desktop notifications for this conversation'
|
||||
}
|
||||
aria-label={
|
||||
notificationsEnabled
|
||||
? 'Disable notifications for this conversation'
|
||||
: 'Enable notifications for this conversation'
|
||||
}
|
||||
>
|
||||
<Bell
|
||||
className={`h-4 w-4 ${notificationsEnabled ? 'text-status-connected' : 'text-muted-foreground'}`}
|
||||
fill={notificationsEnabled ? 'currentColor' : 'none'}
|
||||
aria-hidden="true"
|
||||
/>
|
||||
{notificationsEnabled && (
|
||||
<span className="hidden md:inline text-[0.6875rem] font-medium text-status-connected">
|
||||
Notifications On
|
||||
</span>
|
||||
{(notificationsSupported || pushSupported) && !activeContactIsRoomServer && (
|
||||
<div className="relative" ref={notifDropdownRef}>
|
||||
<button
|
||||
className="p-1 rounded hover:bg-accent text-lg leading-none transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
||||
onClick={() => setNotifDropdownOpen((v) => !v)}
|
||||
title="Notification settings"
|
||||
aria-label="Notification settings"
|
||||
aria-expanded={notifDropdownOpen}
|
||||
>
|
||||
<Bell
|
||||
className={cn(
|
||||
'h-4 w-4',
|
||||
notificationsEnabled || pushEnabledForConversation
|
||||
? 'text-primary'
|
||||
: 'text-muted-foreground'
|
||||
)}
|
||||
fill={notificationsEnabled || pushEnabledForConversation ? 'currentColor' : 'none'}
|
||||
aria-hidden="true"
|
||||
/>
|
||||
</button>
|
||||
{notifDropdownOpen && (
|
||||
<div className="absolute right-[-4.5rem] sm:right-0 top-full z-50 mt-1 w-[calc(100vw-2rem)] sm:w-72 max-w-72 rounded-md border border-border bg-popover p-3 shadow-lg space-y-3">
|
||||
{notificationsSupported && (
|
||||
<label className="flex items-start gap-2.5 cursor-pointer group">
|
||||
<input
|
||||
type="checkbox"
|
||||
className="mt-0.5 accent-primary h-4 w-4 shrink-0"
|
||||
checked={notificationsEnabled}
|
||||
disabled={notificationsPermission === 'denied'}
|
||||
onChange={onToggleNotifications}
|
||||
/>
|
||||
<div className="min-w-0">
|
||||
<span className="text-sm font-medium text-foreground block leading-tight">
|
||||
Desktop notifications (legacy)
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground leading-snug block mt-0.5">
|
||||
{notificationsPermission === 'denied'
|
||||
? 'Blocked by browser — check site permissions'
|
||||
: 'Alerts while this tab is open'}
|
||||
</span>
|
||||
</div>
|
||||
</label>
|
||||
)}
|
||||
{pushSupported && onTogglePush && (
|
||||
<>
|
||||
<label className="flex items-start gap-2.5 cursor-pointer group">
|
||||
<input
|
||||
type="checkbox"
|
||||
className="mt-0.5 accent-primary h-4 w-4 shrink-0"
|
||||
checked={!!pushEnabledForConversation}
|
||||
onChange={onTogglePush}
|
||||
/>
|
||||
<div className="min-w-0">
|
||||
<span className="text-sm font-medium text-foreground block leading-tight">
|
||||
Web Push (beta testing)
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground leading-snug block mt-0.5">
|
||||
{pushSubscribed
|
||||
? 'Alerts even when the browser is closed'
|
||||
: 'Alerts even when the browser is closed. Requires HTTPS.'}
|
||||
</span>
|
||||
</div>
|
||||
</label>
|
||||
<span className="text-xs text-muted-foreground leading-snug block mt-0.5">
|
||||
All notification types require a trusted HTTPS context. Depending on your
|
||||
browser, a snakeoil certificate may not be sufficient.
|
||||
</span>
|
||||
{onOpenPushSettings && (
|
||||
<p className="text-xs text-muted-foreground leading-snug mt-1.5">
|
||||
Manage Web Push enabled devices in{' '}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
setNotifDropdownOpen(false);
|
||||
onOpenPushSettings();
|
||||
}}
|
||||
className="text-primary hover:underline transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
>
|
||||
Settings → Local
|
||||
</button>
|
||||
.
|
||||
</p>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{conversation.type === 'channel' && onSetChannelFloodScopeOverride && (
|
||||
<button
|
||||
|
||||
@@ -3,10 +3,9 @@ import { useMemo, useState } from 'react';
|
||||
import type { Contact, PathDiscoveryResponse, PathDiscoveryRoute } from '../types';
|
||||
import {
|
||||
findContactsByPrefix,
|
||||
formatForcedRouteSummary,
|
||||
formatLearnedRouteSummary,
|
||||
formatRouteLabel,
|
||||
getDirectContactRoute,
|
||||
getEffectiveContactRoute,
|
||||
hasRoutingOverride,
|
||||
parsePathHops,
|
||||
} from '../utils/pathUtils';
|
||||
import { Button } from './ui/button';
|
||||
@@ -99,30 +98,9 @@ export function ContactPathDiscoveryModal({
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [result, setResult] = useState<PathDiscoveryResponse | null>(null);
|
||||
|
||||
const effectiveRoute = useMemo(() => getEffectiveContactRoute(contact), [contact]);
|
||||
const directRoute = useMemo(() => getDirectContactRoute(contact), [contact]);
|
||||
const hasForcedRoute = hasRoutingOverride(contact);
|
||||
const learnedRouteSummary = useMemo(() => {
|
||||
if (!directRoute) {
|
||||
return 'Flood';
|
||||
}
|
||||
const hops = parsePathHops(directRoute.path, directRoute.path_len);
|
||||
return hops.length > 0
|
||||
? `${formatRouteLabel(directRoute.path_len, true)} (${hops.join(' -> ')})`
|
||||
: formatRouteLabel(directRoute.path_len, true);
|
||||
}, [directRoute]);
|
||||
const forcedRouteSummary = useMemo(() => {
|
||||
if (!hasForcedRoute) {
|
||||
return null;
|
||||
}
|
||||
if (effectiveRoute.pathLen === -1) {
|
||||
return 'Flood';
|
||||
}
|
||||
const hops = parsePathHops(effectiveRoute.path, effectiveRoute.pathLen);
|
||||
return hops.length > 0
|
||||
? `${formatRouteLabel(effectiveRoute.pathLen, true)} (${hops.join(' -> ')})`
|
||||
: formatRouteLabel(effectiveRoute.pathLen, true);
|
||||
}, [effectiveRoute, hasForcedRoute]);
|
||||
const learnedRouteSummary = useMemo(() => formatLearnedRouteSummary(contact), [contact]);
|
||||
const forcedRouteSummary = useMemo(() => formatForcedRouteSummary(contact), [contact]);
|
||||
const hasForcedRoute = forcedRouteSummary !== null;
|
||||
|
||||
const forwardChain = result
|
||||
? renderRouteNodes(
|
||||
|
||||
@@ -3,10 +3,9 @@ import { useEffect, useMemo, useState } from 'react';
|
||||
import { api } from '../api';
|
||||
import type { Contact } from '../types';
|
||||
import {
|
||||
formatRouteLabel,
|
||||
formatForcedRouteSummary,
|
||||
formatLearnedRouteSummary,
|
||||
formatRoutingOverrideInput,
|
||||
getDirectContactRoute,
|
||||
hasRoutingOverride,
|
||||
} from '../utils/pathUtils';
|
||||
import { Button } from './ui/button';
|
||||
import {
|
||||
@@ -28,18 +27,6 @@ interface ContactRoutingOverrideModalProps {
|
||||
onError: (message: string) => void;
|
||||
}
|
||||
|
||||
function summarizeLearnedRoute(contact: Contact): string {
|
||||
return formatRouteLabel(getDirectContactRoute(contact)?.path_len ?? -1, true);
|
||||
}
|
||||
|
||||
function summarizeForcedRoute(contact: Contact): string | null {
|
||||
if (!hasRoutingOverride(contact)) {
|
||||
return null;
|
||||
}
|
||||
const routeOverrideLen = contact.route_override_len;
|
||||
return routeOverrideLen == null ? null : formatRouteLabel(routeOverrideLen, true);
|
||||
}
|
||||
|
||||
export function ContactRoutingOverrideModal({
|
||||
open,
|
||||
onClose,
|
||||
@@ -59,7 +46,8 @@ export function ContactRoutingOverrideModal({
|
||||
setError(null);
|
||||
}, [contact, open]);
|
||||
|
||||
const forcedRouteSummary = useMemo(() => summarizeForcedRoute(contact), [contact]);
|
||||
const learnedRouteSummary = useMemo(() => formatLearnedRouteSummary(contact), [contact]);
|
||||
const forcedRouteSummary = useMemo(() => formatForcedRouteSummary(contact), [contact]);
|
||||
|
||||
const saveRoute = async (value: string) => {
|
||||
setSaving(true);
|
||||
@@ -98,7 +86,7 @@ export function ContactRoutingOverrideModal({
|
||||
<div className="rounded-md border border-border bg-muted/20 p-3 text-sm">
|
||||
<div className="font-medium">{contact.name || contact.public_key.slice(0, 12)}</div>
|
||||
<div className="mt-1 text-muted-foreground">
|
||||
Current learned route: {summarizeLearnedRoute(contact)}
|
||||
Current learned route: {learnedRouteSummary}
|
||||
</div>
|
||||
{forcedRouteSummary && (
|
||||
<div className="mt-1 text-destructive">
|
||||
|
||||
@@ -20,7 +20,11 @@ import type {
|
||||
} from '../types';
|
||||
import type { RawPacketStatsSessionState } from '../utils/rawPacketStats';
|
||||
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_ROOM } from '../types';
|
||||
import { isPrefixOnlyContact, isUnknownFullKeyContact } from '../utils/pubkey';
|
||||
import {
|
||||
getContactDisplayName,
|
||||
isPrefixOnlyContact,
|
||||
isUnknownFullKeyContact,
|
||||
} from '../utils/pubkey';
|
||||
|
||||
const RepeaterDashboard = lazy(() =>
|
||||
import('./RepeaterDashboard').then((m) => ({ default: m.RepeaterDashboard }))
|
||||
@@ -65,6 +69,7 @@ interface ConversationPaneProps {
|
||||
channelKey: string,
|
||||
pathHashModeOverride: number | null
|
||||
) => Promise<void>;
|
||||
onSelectConversation: (conversation: Conversation) => void;
|
||||
onOpenContactInfo: (publicKey: string, fromChannel?: boolean) => void;
|
||||
onOpenChannelInfo: (channelKey: string) => void;
|
||||
onSenderClick: (sender: string) => void;
|
||||
@@ -77,6 +82,11 @@ interface ConversationPaneProps {
|
||||
onDismissUnreadMarker: () => void;
|
||||
onSendMessage: (text: string) => Promise<void>;
|
||||
onToggleNotifications: () => void;
|
||||
pushSupported?: boolean;
|
||||
pushSubscribed?: boolean;
|
||||
pushEnabledForConversation?: boolean;
|
||||
onTogglePush?: () => void;
|
||||
onOpenPushSettings?: () => void;
|
||||
trackedTelemetryRepeaters: string[];
|
||||
onToggleTrackedTelemetry: (publicKey: string) => Promise<void>;
|
||||
repeaterAutoLoginKey: string | null;
|
||||
@@ -137,6 +147,7 @@ export function ConversationPane({
|
||||
onDeleteChannel,
|
||||
onSetChannelFloodScopeOverride,
|
||||
onSetChannelPathHashModeOverride,
|
||||
onSelectConversation,
|
||||
onOpenContactInfo,
|
||||
onOpenChannelInfo,
|
||||
onSenderClick,
|
||||
@@ -149,6 +160,11 @@ export function ConversationPane({
|
||||
onDismissUnreadMarker,
|
||||
onSendMessage,
|
||||
onToggleNotifications,
|
||||
pushSupported,
|
||||
pushSubscribed,
|
||||
pushEnabledForConversation,
|
||||
onTogglePush,
|
||||
onOpenPushSettings,
|
||||
trackedTelemetryRepeaters,
|
||||
onToggleTrackedTelemetry,
|
||||
repeaterAutoLoginKey,
|
||||
@@ -197,6 +213,17 @@ export function ConversationPane({
|
||||
focusedKey={activeConversation.mapFocusKey}
|
||||
rawPackets={rawPackets}
|
||||
config={config}
|
||||
onSelectContact={(contact) =>
|
||||
onSelectConversation({
|
||||
type: 'contact',
|
||||
id: contact.public_key,
|
||||
name: getContactDisplayName(
|
||||
contact.name,
|
||||
contact.public_key,
|
||||
contact.last_advert
|
||||
),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</Suspense>
|
||||
</div>
|
||||
@@ -271,6 +298,11 @@ export function ConversationPane({
|
||||
notificationsSupported={notificationsSupported}
|
||||
notificationsEnabled={notificationsEnabled}
|
||||
notificationsPermission={notificationsPermission}
|
||||
pushSupported={pushSupported}
|
||||
pushSubscribed={pushSubscribed}
|
||||
pushEnabledForConversation={pushEnabledForConversation}
|
||||
onTogglePush={onTogglePush}
|
||||
onOpenPushSettings={onOpenPushSettings}
|
||||
onTrace={onTrace}
|
||||
onPathDiscovery={onPathDiscovery}
|
||||
onToggleNotifications={onToggleNotifications}
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
import { Fragment, useEffect, useState, useMemo, useRef, useCallback } from 'react';
|
||||
import { MapContainer, TileLayer, CircleMarker, Popup, useMap, Polyline } from 'react-leaflet';
|
||||
import {
|
||||
MapContainer,
|
||||
TileLayer,
|
||||
CircleMarker,
|
||||
Popup,
|
||||
useMap,
|
||||
useMapEvents,
|
||||
Polyline,
|
||||
LayersControl,
|
||||
} from 'react-leaflet';
|
||||
import type { LatLngBoundsExpression, CircleMarker as LeafletCircleMarker } from 'leaflet';
|
||||
import L from 'leaflet';
|
||||
import 'leaflet/dist/leaflet.css';
|
||||
@@ -21,29 +30,132 @@ interface MapViewProps {
|
||||
focusedKey?: string | null;
|
||||
rawPackets?: RawPacket[];
|
||||
config?: RadioConfig | null;
|
||||
/** When provided, the contact name in each popup becomes a clickable link
|
||||
* that opens the conversation for that contact (DM, repeater, or room). */
|
||||
onSelectContact?: (contact: Contact) => void;
|
||||
}
|
||||
|
||||
// --- Tile layer presets ---
|
||||
const TILE_LIGHT = {
|
||||
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||
attribution: '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a>',
|
||||
background: '#1a1a2e',
|
||||
};
|
||||
const TILE_DARK = {
|
||||
url: 'https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png',
|
||||
attribution:
|
||||
'© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> © <a href="https://carto.com/">CARTO</a>',
|
||||
background: '#0d0d0d',
|
||||
};
|
||||
// Every provider here is free and works without an API key. Attribution strings
|
||||
// follow each provider's requirements; do not remove them. If you add a new
|
||||
// provider, verify its terms of service (especially for Esri / Google-style
|
||||
// satellite tiles) before committing.
|
||||
interface TileLayerPreset {
|
||||
id: string;
|
||||
label: string;
|
||||
url: string;
|
||||
attribution: string;
|
||||
background: string;
|
||||
/** Highest zoom the provider publishes tiles at. When the layer is active,
|
||||
* the map's zoom ceiling is tightened to this value via
|
||||
* `MaxZoomByActiveLayer` so the user cannot zoom into a grey void. */
|
||||
maxZoom?: number;
|
||||
}
|
||||
|
||||
function getSavedDarkMap(): boolean {
|
||||
// Global zoom bounds for the MapContainer itself. These are pinned to the
|
||||
// container so Leaflet's internal tile-range math never has to guess when
|
||||
// layers swap in/out via LayersControl. Without this, an initial-mount race
|
||||
// between MapContainer layout and LayersControl.BaseLayer addition has been
|
||||
// observed to throw "Attempted to load an infinite number of tiles".
|
||||
const MAP_MIN_ZOOM = 2;
|
||||
const MAP_MAX_ZOOM = 19;
|
||||
|
||||
const TILE_LAYERS: readonly TileLayerPreset[] = [
|
||||
{
|
||||
id: 'light',
|
||||
label: 'Light (OpenStreetMap)',
|
||||
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||
attribution: '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a>',
|
||||
background: '#1a1a2e',
|
||||
maxZoom: 19,
|
||||
},
|
||||
{
|
||||
id: 'dark',
|
||||
label: 'Dark (CARTO)',
|
||||
url: 'https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png',
|
||||
attribution:
|
||||
'© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> © <a href="https://carto.com/">CARTO</a>',
|
||||
background: '#0d0d0d',
|
||||
maxZoom: 19,
|
||||
},
|
||||
{
|
||||
id: 'topographic',
|
||||
label: 'Topographic (OpenTopoMap)',
|
||||
url: 'https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png',
|
||||
attribution:
|
||||
'Map data: © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, <a href="http://viewfinderpanoramas.org">SRTM</a> | Map style: © <a href="https://opentopomap.org">OpenTopoMap</a> (<a href="https://creativecommons.org/licenses/by-sa/3.0/">CC-BY-SA</a>)',
|
||||
background: '#a3b3bc',
|
||||
maxZoom: 17,
|
||||
},
|
||||
{
|
||||
id: 'satellite',
|
||||
label: 'Satellite (Esri)',
|
||||
url: 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
|
||||
attribution:
|
||||
'Tiles © <a href="https://www.esri.com/">Esri</a> — Source: Esri, Maxar, Earthstar Geographics, and the GIS User Community',
|
||||
background: '#1a1f2e',
|
||||
// Esri's tile service advertises LODs up to 23 and returns HTTP 200 for
|
||||
// every tile request, but the underlying imagery is only high-resolution
|
||||
// up to ~18 in most developed areas and shallower in rural regions. We
|
||||
// cap at 18 rather than 19 so users don't zoom into visibly-empty or
|
||||
// severely-upscaled tiles. Remote regions may still be sparse at 18.
|
||||
maxZoom: 18,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const MAP_LAYER_STORAGE_KEY = 'remoteterm-map-layer';
|
||||
const LEGACY_DARK_MAP_STORAGE_KEY = 'remoteterm-dark-map';
|
||||
|
||||
function getSavedLayerId(): string {
|
||||
try {
|
||||
return localStorage.getItem('remoteterm-dark-map') === 'true';
|
||||
const stored = localStorage.getItem(MAP_LAYER_STORAGE_KEY);
|
||||
if (stored && TILE_LAYERS.some((l) => l.id === stored)) return stored;
|
||||
// Legacy migration: boolean dark-map flag predates multi-layer support.
|
||||
const legacyDark = localStorage.getItem(LEGACY_DARK_MAP_STORAGE_KEY) === 'true';
|
||||
return legacyDark ? 'dark' : 'light';
|
||||
} catch {
|
||||
return false;
|
||||
return 'light';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Leaflet-internal companion component: listens for base-layer changes driven
|
||||
* by Leaflet's own LayersControl UI and pipes the selection back to React.
|
||||
* Kept separate so the persistence/state logic stays out of the render tree.
|
||||
*/
|
||||
function LayerChangeWatcher({ onChange }: { onChange: (name: string) => void }) {
|
||||
useMapEvents({
|
||||
baselayerchange: (event) => {
|
||||
if (event.name) onChange(event.name);
|
||||
},
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforces the active layer's zoom ceiling on the underlying Leaflet map.
|
||||
*
|
||||
* Leaflet's `map.getMaxZoom()` prefers `options.maxZoom` (set on MapContainer)
|
||||
* over per-layer `maxZoom`, so a per-TileLayer cap is silently ignored unless
|
||||
* we push it down to the map itself. We do that here whenever the active
|
||||
* layer changes, and clamp the current zoom if the user happened to be zoomed
|
||||
* past the new cap at the moment of the switch.
|
||||
*
|
||||
* The MapContainer's fixed `minZoom`/`maxZoom` remain the absolute hull that
|
||||
* prevents the "Attempted to load an infinite number of tiles" race during
|
||||
* initial mount (see `MAP_MIN_ZOOM`/`MAP_MAX_ZOOM` below).
|
||||
*/
|
||||
function MaxZoomByActiveLayer({ maxZoom }: { maxZoom: number }) {
|
||||
const map = useMap();
|
||||
useEffect(() => {
|
||||
map.setMaxZoom(maxZoom);
|
||||
if (map.getZoom() > maxZoom) {
|
||||
map.setZoom(maxZoom);
|
||||
}
|
||||
}, [map, maxZoom]);
|
||||
return null;
|
||||
}
|
||||
|
||||
const MAP_RECENCY_COLORS = {
|
||||
recent: '#06b6d4',
|
||||
today: '#2563eb',
|
||||
@@ -379,20 +491,43 @@ function ParticleOverlay({ particles }: { particles: MapParticle[] }) {
|
||||
|
||||
// --- Main component ---
|
||||
|
||||
export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewProps) {
|
||||
export function MapView({
|
||||
contacts,
|
||||
focusedKey,
|
||||
rawPackets,
|
||||
config,
|
||||
onSelectContact,
|
||||
}: MapViewProps) {
|
||||
const [sevenDaysAgo] = useState(() => Date.now() / 1000 - 7 * 24 * 60 * 60);
|
||||
const [darkMap, setDarkMap] = useState(getSavedDarkMap);
|
||||
const tile = darkMap ? TILE_DARK : TILE_LIGHT;
|
||||
const [selectedLayerId, setSelectedLayerId] = useState<string>(getSavedLayerId);
|
||||
const activeLayer = TILE_LAYERS.find((l) => l.id === selectedLayerId) ?? TILE_LAYERS[0];
|
||||
|
||||
// Sync with settings changes from other components
|
||||
// Sync layer selection across tabs and windows.
|
||||
useEffect(() => {
|
||||
const onStorage = (e: StorageEvent) => {
|
||||
if (e.key === 'remoteterm-dark-map') setDarkMap(e.newValue === 'true');
|
||||
if (e.key !== MAP_LAYER_STORAGE_KEY) return;
|
||||
const next = e.newValue ?? '';
|
||||
if (TILE_LAYERS.some((l) => l.id === next)) {
|
||||
setSelectedLayerId(next);
|
||||
}
|
||||
};
|
||||
window.addEventListener('storage', onStorage);
|
||||
return () => window.removeEventListener('storage', onStorage);
|
||||
}, []);
|
||||
|
||||
const handleLayerChange = useCallback((layerName: string) => {
|
||||
const match = TILE_LAYERS.find((l) => l.label === layerName);
|
||||
if (!match) return;
|
||||
setSelectedLayerId(match.id);
|
||||
try {
|
||||
localStorage.setItem(MAP_LAYER_STORAGE_KEY, match.id);
|
||||
// Clear the legacy key so a future downgrade-rollback doesn't revert us.
|
||||
localStorage.removeItem(LEGACY_DARK_MAP_STORAGE_KEY);
|
||||
} catch {
|
||||
// localStorage may be disabled; selection stays in memory only.
|
||||
}
|
||||
}, []);
|
||||
|
||||
const [showPackets, setShowPackets] = useState(false);
|
||||
const [discoveryMode, setDiscoveryMode] = useState(false);
|
||||
const [discoveredKeys, setDiscoveredKeys] = useState<Set<string>>(new Set());
|
||||
@@ -674,10 +809,12 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full">
|
||||
{/* Info bar */}
|
||||
<div className="px-4 py-2 bg-muted/50 text-xs text-muted-foreground flex items-center justify-between">
|
||||
{/* Info bar: stacks vertically on narrow viewports (info label, legend
|
||||
row, controls row) so nothing truncates; flattens to a single row
|
||||
with right-aligned cluster at md and up. */}
|
||||
<div className="px-4 py-2 bg-muted/50 text-xs text-muted-foreground flex flex-col gap-1 md:flex-row md:items-center md:justify-between md:gap-3">
|
||||
<span>{infoLabel}</span>
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="flex flex-wrap items-center gap-x-3 gap-y-1 md:justify-end">
|
||||
{!showPackets && (
|
||||
<>
|
||||
<span className="flex items-center gap-1">
|
||||
@@ -758,7 +895,7 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
||||
/>{' '}
|
||||
repeater
|
||||
</span>
|
||||
<label className="flex items-center gap-1.5 cursor-pointer ml-2">
|
||||
<label className="flex items-center gap-1.5 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={showPackets}
|
||||
@@ -791,10 +928,28 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
||||
<MapContainer
|
||||
center={[20, 0]}
|
||||
zoom={2}
|
||||
minZoom={MAP_MIN_ZOOM}
|
||||
maxZoom={MAP_MAX_ZOOM}
|
||||
className="h-full w-full"
|
||||
style={{ background: tile.background }}
|
||||
style={{ background: activeLayer.background }}
|
||||
>
|
||||
<TileLayer key={tile.url} attribution={tile.attribution} url={tile.url} />
|
||||
<LayersControl position="topright" collapsed={false}>
|
||||
{TILE_LAYERS.map((layer) => (
|
||||
<LayersControl.BaseLayer
|
||||
key={layer.id}
|
||||
name={layer.label}
|
||||
checked={layer.id === selectedLayerId}
|
||||
>
|
||||
<TileLayer
|
||||
url={layer.url}
|
||||
attribution={layer.attribution}
|
||||
maxZoom={layer.maxZoom}
|
||||
/>
|
||||
</LayersControl.BaseLayer>
|
||||
))}
|
||||
</LayersControl>
|
||||
<LayerChangeWatcher onChange={handleLayerChange} />
|
||||
<MaxZoomByActiveLayer maxZoom={activeLayer.maxZoom ?? MAP_MAX_ZOOM} />
|
||||
<MapBoundsHandler contacts={mappableContacts} focusedContact={focusedContact} />
|
||||
|
||||
{/* Faint route lines for active packet paths */}
|
||||
@@ -839,7 +994,21 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
||||
🛜
|
||||
</span>
|
||||
)}
|
||||
{displayName}
|
||||
{onSelectContact ? (
|
||||
<button
|
||||
type="button"
|
||||
className="p-0 bg-transparent border-0 font-inherit text-primary underline hover:text-primary/80 cursor-pointer"
|
||||
onClick={(event) => {
|
||||
event.stopPropagation();
|
||||
onSelectContact(contact);
|
||||
}}
|
||||
title={`Open conversation with ${displayName}`}
|
||||
>
|
||||
{displayName}
|
||||
</button>
|
||||
) : (
|
||||
displayName
|
||||
)}
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 mt-1">Last heard: {lastHeardLabel}</div>
|
||||
<div className="text-xs text-gray-400 mt-1 font-mono">
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
type ReactNode,
|
||||
} from 'react';
|
||||
import type { Channel, Contact, Message, MessagePath, RadioConfig, RawPacket } from '../types';
|
||||
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_ROOM } from '../types';
|
||||
import { CONTACT_TYPE_ROOM } from '../types';
|
||||
import { api } from '../api';
|
||||
import {
|
||||
findLinkedChannelReferences,
|
||||
@@ -808,12 +808,13 @@ export function MessageList({
|
||||
{sortedMessages.map((msg, index) => {
|
||||
// For DMs, look up contact; for channel messages, use parsed sender
|
||||
const contact = msg.type === 'PRIV' ? getContact(msg.conversation_key) : null;
|
||||
const isRepeater = contact?.type === CONTACT_TYPE_REPEATER;
|
||||
const isRoomServer = contact?.type === CONTACT_TYPE_ROOM;
|
||||
|
||||
// Skip sender parsing for repeater messages (CLI responses often have colons)
|
||||
// Only parse "sender: text" prefix for channel messages — DMs never carry
|
||||
// an in-text sender prefix, so parsing them would incorrectly strip
|
||||
// user text that happens to contain a colon (e.g. "TEST1: TEST2").
|
||||
const { sender, content } =
|
||||
isRepeater || (isRoomServer && msg.type === 'PRIV')
|
||||
msg.type === 'PRIV'
|
||||
? { sender: null, content: msg.text }
|
||||
: parseSenderFromText(msg.text);
|
||||
const directSenderName =
|
||||
@@ -845,7 +846,8 @@ export function MessageList({
|
||||
isCorruptChannelMessage
|
||||
);
|
||||
const prevMsg = sortedMessages[index - 1];
|
||||
const prevParsedSender = prevMsg ? parseSenderFromText(prevMsg.text).sender : null;
|
||||
const prevParsedSender =
|
||||
prevMsg && prevMsg.type === 'CHAN' ? parseSenderFromText(prevMsg.text).sender : null;
|
||||
const prevSenderKey = prevMsg
|
||||
? getSenderKey(
|
||||
prevMsg,
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useState, useEffect, type ReactNode } from 'react';
|
||||
import type {
|
||||
AppSettings,
|
||||
AppSettingsUpdate,
|
||||
Channel,
|
||||
Contact,
|
||||
HealthStatus,
|
||||
RadioAdvertMode,
|
||||
@@ -49,6 +50,7 @@ interface SettingsModalBaseProps {
|
||||
onToggleBlockedKey?: (key: string) => void;
|
||||
onToggleBlockedName?: (name: string) => void;
|
||||
contacts?: Contact[];
|
||||
channels?: Channel[];
|
||||
onBulkDeleteContacts?: (deletedKeys: string[]) => void;
|
||||
trackedTelemetryRepeaters?: string[];
|
||||
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
||||
@@ -86,6 +88,7 @@ export function SettingsModal(props: SettingsModalProps) {
|
||||
onToggleBlockedKey,
|
||||
onToggleBlockedName,
|
||||
contacts,
|
||||
channels,
|
||||
onBulkDeleteContacts,
|
||||
trackedTelemetryRepeaters,
|
||||
onToggleTrackedTelemetry,
|
||||
@@ -228,6 +231,8 @@ export function SettingsModal(props: SettingsModalProps) {
|
||||
{isSectionVisible('local') && (
|
||||
<SettingsLocalSection
|
||||
onLocalLabelChange={onLocalLabelChange}
|
||||
contacts={contacts}
|
||||
channels={channels}
|
||||
className={sectionContentClass}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -265,6 +265,12 @@ export function Sidebar({
|
||||
const sortContactsByOrder = useCallback(
|
||||
(items: Contact[], order: SortOrder) =>
|
||||
[...items].sort((a, b) => {
|
||||
// Unread DM contacts always float to the top
|
||||
const unreadA = unreadCounts[getStateKey('contact', a.public_key)] || 0;
|
||||
const unreadB = unreadCounts[getStateKey('contact', b.public_key)] || 0;
|
||||
if (unreadA > 0 && unreadB === 0) return -1;
|
||||
if (unreadA === 0 && unreadB > 0) return 1;
|
||||
|
||||
if (order === 'recent') {
|
||||
const timeA = getContactRecentTime(a);
|
||||
const timeB = getContactRecentTime(b);
|
||||
@@ -274,7 +280,7 @@ export function Sidebar({
|
||||
}
|
||||
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
||||
}),
|
||||
[getContactRecentTime]
|
||||
[getContactRecentTime, unreadCounts]
|
||||
);
|
||||
|
||||
const sortRepeatersByOrder = useCallback(
|
||||
|
||||
@@ -12,13 +12,21 @@ import type { HealthStatus, RadioConfig } from '../types';
|
||||
import { api } from '../api';
|
||||
import { toast } from './ui/sonner';
|
||||
import { handleKeyboardActivate } from '../utils/a11y';
|
||||
import { applyTheme, getSavedTheme, THEME_CHANGE_EVENT } from '../utils/theme';
|
||||
import { applyTheme, getEffectiveTheme, THEME_CHANGE_EVENT } from '../utils/theme';
|
||||
import {
|
||||
BATTERY_DISPLAY_CHANGE_EVENT,
|
||||
getShowBatteryPercent,
|
||||
getShowBatteryVoltage,
|
||||
mvToPercent,
|
||||
} from '../utils/batteryDisplay';
|
||||
import {
|
||||
STATUS_DOT_PULSE_CHANGE_EVENT,
|
||||
STATUS_DOT_PULSE_DURATION_MS,
|
||||
STATUS_DOT_PULSE_PACKET_EVENT,
|
||||
getStatusDotPulseEnabled,
|
||||
pulseColorFor,
|
||||
type StatusDotPulseKind,
|
||||
} from '../utils/statusDotPulse';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
interface StatusBarProps {
|
||||
@@ -84,17 +92,71 @@ export function StatusBar({
|
||||
? 'Radio OK'
|
||||
: 'Radio Disconnected';
|
||||
const [reconnecting, setReconnecting] = useState(false);
|
||||
const [currentTheme, setCurrentTheme] = useState(getSavedTheme);
|
||||
// Track the *effective* theme (follow-os is resolved to original/light) so the
|
||||
// toggle icon and action match what the user currently sees rendered.
|
||||
const [currentTheme, setCurrentTheme] = useState(getEffectiveTheme);
|
||||
const [pulseEnabled, setPulseEnabled] = useState(getStatusDotPulseEnabled);
|
||||
const [pulseKind, setPulseKind] = useState<StatusDotPulseKind | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const handleThemeChange = (event: Event) => {
|
||||
const themeId = (event as CustomEvent<string>).detail;
|
||||
setCurrentTheme(typeof themeId === 'string' && themeId ? themeId : getSavedTheme());
|
||||
};
|
||||
const handler = () => setPulseEnabled(getStatusDotPulseEnabled());
|
||||
window.addEventListener(STATUS_DOT_PULSE_CHANGE_EVENT, handler);
|
||||
return () => window.removeEventListener(STATUS_DOT_PULSE_CHANGE_EVENT, handler);
|
||||
}, []);
|
||||
|
||||
window.addEventListener(THEME_CHANGE_EVENT, handleThemeChange as EventListener);
|
||||
useEffect(() => {
|
||||
if (!pulseEnabled) {
|
||||
setPulseKind(null);
|
||||
return;
|
||||
}
|
||||
let timer: number | null = null;
|
||||
const handler = (event: Event) => {
|
||||
const kind = (event as CustomEvent<StatusDotPulseKind>).detail;
|
||||
setPulseKind(kind);
|
||||
if (timer !== null) {
|
||||
window.clearTimeout(timer);
|
||||
}
|
||||
timer = window.setTimeout(() => {
|
||||
setPulseKind(null);
|
||||
timer = null;
|
||||
}, STATUS_DOT_PULSE_DURATION_MS);
|
||||
};
|
||||
window.addEventListener(STATUS_DOT_PULSE_PACKET_EVENT, handler);
|
||||
return () => {
|
||||
window.removeEventListener(THEME_CHANGE_EVENT, handleThemeChange as EventListener);
|
||||
window.removeEventListener(STATUS_DOT_PULSE_PACKET_EVENT, handler);
|
||||
if (timer !== null) {
|
||||
window.clearTimeout(timer);
|
||||
}
|
||||
};
|
||||
}, [pulseEnabled]);
|
||||
|
||||
useEffect(() => {
|
||||
const syncEffective = () => setCurrentTheme(getEffectiveTheme());
|
||||
window.addEventListener(THEME_CHANGE_EVENT, syncEffective);
|
||||
|
||||
// When saved theme is "follow-os", OS appearance changes alter the effective
|
||||
// theme without firing a THEME_CHANGE_EVENT, so also watch matchMedia.
|
||||
const mql =
|
||||
typeof window.matchMedia === 'function'
|
||||
? window.matchMedia('(prefers-color-scheme: light)')
|
||||
: null;
|
||||
if (mql) {
|
||||
if (typeof mql.addEventListener === 'function') {
|
||||
mql.addEventListener('change', syncEffective);
|
||||
} else if (typeof (mql as MediaQueryList).addListener === 'function') {
|
||||
(mql as MediaQueryList).addListener(syncEffective);
|
||||
}
|
||||
}
|
||||
|
||||
return () => {
|
||||
window.removeEventListener(THEME_CHANGE_EVENT, syncEffective);
|
||||
if (mql) {
|
||||
if (typeof mql.removeEventListener === 'function') {
|
||||
mql.removeEventListener('change', syncEffective);
|
||||
} else if (typeof (mql as MediaQueryList).removeListener === 'function') {
|
||||
(mql as MediaQueryList).removeListener(syncEffective);
|
||||
}
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
@@ -154,9 +216,12 @@ export function StatusBar({
|
||||
radioState === 'initializing' || radioState === 'connecting'
|
||||
? 'bg-warning'
|
||||
: connected
|
||||
? 'bg-status-connected shadow-[0_0_6px_hsl(var(--status-connected)/0.5)]'
|
||||
? pulseKind
|
||||
? ''
|
||||
: 'bg-status-connected shadow-[0_0_6px_hsl(var(--status-connected)/0.5)]'
|
||||
: 'bg-status-disconnected'
|
||||
)}
|
||||
style={connected && pulseKind ? { backgroundColor: pulseColorFor(pulseKind) } : undefined}
|
||||
aria-hidden="true"
|
||||
/>
|
||||
<span className="hidden lg:inline text-muted-foreground">{statusLabel}</span>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { RepeaterPane, NotFetched, LppSensorRow } from './repeaterPaneShared';
|
||||
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||
import type { RepeaterLppTelemetryResponse, PaneState } from '../../types';
|
||||
|
||||
export function LppTelemetryPane({
|
||||
@@ -12,6 +13,7 @@ export function LppTelemetryPane({
|
||||
onRefresh: () => void;
|
||||
disabled?: boolean;
|
||||
}) {
|
||||
const { distanceUnit } = useDistanceUnit();
|
||||
return (
|
||||
<RepeaterPane title="LPP Sensors" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||
{!data ? (
|
||||
@@ -21,7 +23,7 @@ export function LppTelemetryPane({
|
||||
) : (
|
||||
<div className="space-y-0.5">
|
||||
{data.sensors.map((sensor, i) => (
|
||||
<LppSensorRow key={i} sensor={sensor} />
|
||||
<LppSensorRow key={i} sensor={sensor} unitPref={distanceUnit} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
import { RepeaterPane, NotFetched, KvRow } from './repeaterPaneShared';
|
||||
import type { RepeaterOwnerInfoResponse, PaneState } from '../../types';
|
||||
|
||||
function LabeledBlock({ label, value }: { label: string; value: string }) {
|
||||
return (
|
||||
<div className="py-0.5">
|
||||
<span className="text-sm text-muted-foreground whitespace-nowrap">{label}</span>
|
||||
<p className="text-sm font-medium mt-0.5 break-words">{value}</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function OwnerInfoPane({
|
||||
data,
|
||||
state,
|
||||
@@ -17,8 +26,8 @@ export function OwnerInfoPane({
|
||||
{!data ? (
|
||||
<NotFetched />
|
||||
) : (
|
||||
<div className="break-all">
|
||||
<KvRow label="Owner Info" value={data.owner_info ?? '—'} />
|
||||
<div className="space-y-1">
|
||||
<LabeledBlock label="Owner Info" value={data.owner_info ?? '—'} />
|
||||
<KvRow label="Guest Password" value={data.guest_password ?? '—'} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -11,19 +11,37 @@ import {
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Button } from '../ui/button';
|
||||
import { Separator } from '../ui/separator';
|
||||
import type { TelemetryHistoryEntry, Contact } from '../../types';
|
||||
import { lppDisplayUnit } from './repeaterPaneShared';
|
||||
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||
import type { TelemetryHistoryEntry, TelemetryLppSensor, Contact } from '../../types';
|
||||
|
||||
const MAX_TRACKED = 8;
|
||||
|
||||
type Metric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
||||
type BuiltinMetric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
||||
|
||||
const METRIC_CONFIG: Record<Metric, { label: string; unit: string; color: string }> = {
|
||||
interface MetricConfig {
|
||||
label: string;
|
||||
unit: string;
|
||||
color: string;
|
||||
}
|
||||
|
||||
const BUILTIN_METRIC_CONFIG: Record<BuiltinMetric, MetricConfig> = {
|
||||
battery_volts: { label: 'Voltage', unit: 'V', color: '#22c55e' },
|
||||
noise_floor_dbm: { label: 'Noise Floor', unit: 'dBm', color: '#8b5cf6' },
|
||||
packets: { label: 'Packets', unit: '', color: '#0ea5e9' },
|
||||
uptime_seconds: { label: 'Uptime', unit: 's', color: '#f59e0b' },
|
||||
};
|
||||
|
||||
const BUILTIN_METRICS: BuiltinMetric[] = Object.keys(BUILTIN_METRIC_CONFIG) as BuiltinMetric[];
|
||||
|
||||
// Stable color rotation for dynamic LPP sensors
|
||||
const LPP_COLORS = ['#ec4899', '#14b8a6', '#f97316', '#6366f1', '#84cc16', '#e11d48'];
|
||||
|
||||
/** Build a flat data key for an LPP sensor: lpp_{type_name}_ch{channel} */
|
||||
function lppKey(s: TelemetryLppSensor): string {
|
||||
return `lpp_${s.type_name}_ch${s.channel}`;
|
||||
}
|
||||
|
||||
const TOOLTIP_STYLE = {
|
||||
contentStyle: {
|
||||
backgroundColor: 'hsl(var(--popover))',
|
||||
@@ -66,18 +84,62 @@ export function TelemetryHistoryPane({
|
||||
trackedTelemetryRepeaters,
|
||||
onToggleTrackedTelemetry,
|
||||
}: TelemetryHistoryPaneProps) {
|
||||
const [metric, setMetric] = useState<Metric>('battery_volts');
|
||||
const { distanceUnit } = useDistanceUnit();
|
||||
const [metric, setMetric] = useState<string>('battery_volts');
|
||||
const [toggling, setToggling] = useState(false);
|
||||
|
||||
const isTracked = trackedTelemetryRepeaters.includes(publicKey);
|
||||
const slotsFull = trackedTelemetryRepeaters.length >= MAX_TRACKED && !isTracked;
|
||||
|
||||
const config = METRIC_CONFIG[metric];
|
||||
// Discover unique LPP sensors across all history entries
|
||||
const lppMetrics = useMemo(() => {
|
||||
const seen = new Map<string, { type_name: string; channel: number }>();
|
||||
for (const e of entries) {
|
||||
for (const s of e.data.lpp_sensors ?? []) {
|
||||
const k = lppKey(s);
|
||||
if (!seen.has(k)) seen.set(k, { type_name: s.type_name, channel: s.channel });
|
||||
}
|
||||
}
|
||||
const result: { key: string; config: MetricConfig; type_name: string; channel: number }[] = [];
|
||||
let colorIdx = 0;
|
||||
for (const [k, info] of seen) {
|
||||
const label =
|
||||
info.type_name.charAt(0).toUpperCase() +
|
||||
info.type_name.slice(1).replace(/_/g, ' ') +
|
||||
` Ch${info.channel}`;
|
||||
const { unit } = lppDisplayUnit(info.type_name, 0, distanceUnit);
|
||||
result.push({
|
||||
key: k,
|
||||
config: { label, unit, color: LPP_COLORS[colorIdx % LPP_COLORS.length] },
|
||||
type_name: info.type_name,
|
||||
channel: info.channel,
|
||||
});
|
||||
colorIdx++;
|
||||
}
|
||||
return result;
|
||||
}, [entries, distanceUnit]);
|
||||
|
||||
const allMetricKeys = useMemo(
|
||||
() => [...BUILTIN_METRICS, ...lppMetrics.map((m) => m.key)],
|
||||
[lppMetrics]
|
||||
);
|
||||
|
||||
// If the selected metric disappears (e.g. different repeater), reset to default
|
||||
const activeMetric = allMetricKeys.includes(metric) ? metric : 'battery_volts';
|
||||
|
||||
const isBuiltin = BUILTIN_METRICS.includes(activeMetric as BuiltinMetric);
|
||||
const activeConfig: MetricConfig = isBuiltin
|
||||
? BUILTIN_METRIC_CONFIG[activeMetric as BuiltinMetric]
|
||||
: (lppMetrics.find((m) => m.key === activeMetric)?.config ?? {
|
||||
label: activeMetric,
|
||||
unit: '',
|
||||
color: '#888',
|
||||
});
|
||||
|
||||
const chartData = useMemo(() => {
|
||||
return entries.map((e) => {
|
||||
const d = e.data;
|
||||
return {
|
||||
const point: Record<string, number | undefined> = {
|
||||
timestamp: e.timestamp,
|
||||
battery_volts: d.battery_volts,
|
||||
noise_floor_dbm: d.noise_floor_dbm,
|
||||
@@ -85,19 +147,27 @@ export function TelemetryHistoryPane({
|
||||
packets_sent: d.packets_sent,
|
||||
uptime_seconds: d.uptime_seconds,
|
||||
};
|
||||
// Flatten LPP sensors into the point, converting units as needed
|
||||
for (const s of d.lpp_sensors ?? []) {
|
||||
if (typeof s.value === 'number') {
|
||||
point[lppKey(s)] = lppDisplayUnit(s.type_name, s.value, distanceUnit).value;
|
||||
}
|
||||
}
|
||||
return point;
|
||||
});
|
||||
}, [entries]);
|
||||
}, [entries, distanceUnit]);
|
||||
|
||||
const dataKeys = metric === 'packets' ? ['packets_received', 'packets_sent'] : [metric];
|
||||
const dataKeys =
|
||||
activeMetric === 'packets' ? ['packets_received', 'packets_sent'] : [activeMetric];
|
||||
|
||||
const yDomain = useMemo<[number, number] | undefined>(() => {
|
||||
if (metric !== 'battery_volts' || chartData.length === 0) return undefined;
|
||||
if (activeMetric !== 'battery_volts' || chartData.length === 0) return undefined;
|
||||
const values = chartData.map((d) => d.battery_volts).filter((v) => v != null) as number[];
|
||||
if (values.length === 0) return [3, 5];
|
||||
const lo = Math.min(...values);
|
||||
const hi = Math.max(...values);
|
||||
return [Math.min(3, Math.floor(lo) - 1), Math.max(5, Math.ceil(hi) + 1)];
|
||||
}, [metric, chartData]);
|
||||
}, [activeMetric, chartData]);
|
||||
|
||||
const handleToggle = async () => {
|
||||
setToggling(true);
|
||||
@@ -181,20 +251,35 @@ export function TelemetryHistoryPane({
|
||||
<Separator className="mb-3" />
|
||||
|
||||
{/* Metric selector */}
|
||||
<div className="flex gap-1 mb-2">
|
||||
{(Object.keys(METRIC_CONFIG) as Metric[]).map((m) => (
|
||||
<div className="flex flex-wrap gap-1 mb-2">
|
||||
{BUILTIN_METRICS.map((m) => (
|
||||
<button
|
||||
key={m}
|
||||
type="button"
|
||||
onClick={() => setMetric(m)}
|
||||
className={cn(
|
||||
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
||||
metric === m
|
||||
activeMetric === m
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
||||
)}
|
||||
>
|
||||
{METRIC_CONFIG[m].label}
|
||||
{BUILTIN_METRIC_CONFIG[m].label}
|
||||
</button>
|
||||
))}
|
||||
{lppMetrics.map((m) => (
|
||||
<button
|
||||
key={m.key}
|
||||
type="button"
|
||||
onClick={() => setMetric(m.key)}
|
||||
className={cn(
|
||||
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
||||
activeMetric === m.key
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
||||
)}
|
||||
>
|
||||
{m.config.label}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
@@ -221,7 +306,9 @@ export function TelemetryHistoryPane({
|
||||
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
tickFormatter={(v) => (metric === 'uptime_seconds' ? formatUptime(v) : `${v}`)}
|
||||
tickFormatter={(v) =>
|
||||
activeMetric === 'uptime_seconds' ? formatUptime(v) : `${v}`
|
||||
}
|
||||
/>
|
||||
<RechartsTooltip
|
||||
{...TOOLTIP_STYLE}
|
||||
@@ -234,15 +321,20 @@ export function TelemetryHistoryPane({
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
formatter={(value: any, name: any) => {
|
||||
const numVal = typeof value === 'number' ? value : Number(value);
|
||||
const display = metric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
||||
const display =
|
||||
activeMetric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
||||
const suffix =
|
||||
metric === 'uptime_seconds' ? '' : config.unit ? ` ${config.unit}` : '';
|
||||
activeMetric === 'uptime_seconds'
|
||||
? ''
|
||||
: activeConfig.unit
|
||||
? ` ${activeConfig.unit}`
|
||||
: '';
|
||||
const label =
|
||||
metric === 'packets'
|
||||
activeMetric === 'packets'
|
||||
? name === 'packets_received'
|
||||
? 'Received'
|
||||
: 'Sent'
|
||||
: config.label;
|
||||
: activeConfig.label;
|
||||
return [`${display}${suffix}`, label];
|
||||
}}
|
||||
/>
|
||||
@@ -251,19 +343,41 @@ export function TelemetryHistoryPane({
|
||||
key={key}
|
||||
type="linear"
|
||||
dataKey={key}
|
||||
stroke={metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color}
|
||||
fill={metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color}
|
||||
stroke={
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color
|
||||
}
|
||||
fill={
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color
|
||||
}
|
||||
fillOpacity={0.15}
|
||||
strokeWidth={1.5}
|
||||
dot={{
|
||||
r: 4,
|
||||
fill: metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color,
|
||||
fill:
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color,
|
||||
strokeWidth: 1.5,
|
||||
stroke: 'hsl(var(--popover))',
|
||||
}}
|
||||
activeDot={{
|
||||
r: 6,
|
||||
fill: metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color,
|
||||
fill:
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color,
|
||||
strokeWidth: 2,
|
||||
stroke: 'hsl(var(--popover))',
|
||||
}}
|
||||
|
||||
@@ -1,7 +1,23 @@
|
||||
import type { ReactNode } from 'react';
|
||||
import { Separator } from '../ui/separator';
|
||||
import { RepeaterPane, NotFetched, KvRow, formatDuration } from './repeaterPaneShared';
|
||||
import type { RepeaterStatusResponse, PaneState } from '../../types';
|
||||
|
||||
function Secondary({ children }: { children: ReactNode }) {
|
||||
return <span className="ml-1.5 font-normal text-muted-foreground">{children}</span>;
|
||||
}
|
||||
|
||||
function formatAirtimePercent(airtimeSec: number, uptimeSec: number): string | null {
|
||||
if (uptimeSec <= 0) return null;
|
||||
return `${((airtimeSec / uptimeSec) * 100).toFixed(2)}%`;
|
||||
}
|
||||
|
||||
function formatPerMinute(count: number, uptimeSec: number): string | null {
|
||||
if (uptimeSec <= 0) return null;
|
||||
const rate = (count * 60) / uptimeSec;
|
||||
return rate >= 10 ? rate.toFixed(0) : rate.toFixed(1);
|
||||
}
|
||||
|
||||
export function TelemetryPane({
|
||||
data,
|
||||
state,
|
||||
@@ -13,6 +29,11 @@ export function TelemetryPane({
|
||||
onRefresh: () => void;
|
||||
disabled?: boolean;
|
||||
}) {
|
||||
const txPct = data ? formatAirtimePercent(data.airtime_seconds, data.uptime_seconds) : null;
|
||||
const rxPct = data ? formatAirtimePercent(data.rx_airtime_seconds, data.uptime_seconds) : null;
|
||||
const rxPerMin = data ? formatPerMinute(data.packets_received, data.uptime_seconds) : null;
|
||||
const txPerMin = data ? formatPerMinute(data.packets_sent, data.uptime_seconds) : null;
|
||||
|
||||
return (
|
||||
<RepeaterPane title="Telemetry" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||
{!data ? (
|
||||
@@ -21,8 +42,24 @@ export function TelemetryPane({
|
||||
<div className="space-y-2">
|
||||
<KvRow label="Battery" value={`${data.battery_volts.toFixed(3)}V`} />
|
||||
<KvRow label="Uptime" value={formatDuration(data.uptime_seconds)} />
|
||||
<KvRow label="TX Airtime" value={formatDuration(data.airtime_seconds)} />
|
||||
<KvRow label="RX Airtime" value={formatDuration(data.rx_airtime_seconds)} />
|
||||
<KvRow
|
||||
label="TX Airtime"
|
||||
value={
|
||||
<>
|
||||
{formatDuration(data.airtime_seconds)}
|
||||
{txPct && <Secondary>({txPct})</Secondary>}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
<KvRow
|
||||
label="RX Airtime"
|
||||
value={
|
||||
<>
|
||||
{formatDuration(data.rx_airtime_seconds)}
|
||||
{rxPct && <Secondary>({rxPct})</Secondary>}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
<Separator className="my-1" />
|
||||
<KvRow label="Noise Floor" value={`${data.noise_floor_dbm} dBm`} />
|
||||
<KvRow label="Last RSSI" value={`${data.last_rssi_dbm} dBm`} />
|
||||
@@ -30,7 +67,17 @@ export function TelemetryPane({
|
||||
<Separator className="my-1" />
|
||||
<KvRow
|
||||
label="Packets"
|
||||
value={`${data.packets_received.toLocaleString()} rx / ${data.packets_sent.toLocaleString()} tx`}
|
||||
value={
|
||||
<>
|
||||
{data.packets_received.toLocaleString()} rx / {data.packets_sent.toLocaleString()}{' '}
|
||||
tx
|
||||
{rxPerMin && txPerMin && (
|
||||
<Secondary>
|
||||
(avg {rxPerMin} rx/min / {txPerMin} tx/min)
|
||||
</Secondary>
|
||||
)}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
<KvRow
|
||||
label="Flood"
|
||||
|
||||
@@ -223,11 +223,26 @@ export const LPP_UNIT_MAP: Record<string, string> = {
|
||||
colour: '',
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the display unit and converted value for an LPP sensor,
|
||||
* respecting the user's unit preference for temperature.
|
||||
*/
|
||||
export function lppDisplayUnit(
|
||||
typeName: string,
|
||||
value: number,
|
||||
unitPref: 'metric' | 'imperial' | string
|
||||
): { unit: string; value: number } {
|
||||
if (typeName === 'temperature' && unitPref === 'imperial') {
|
||||
return { unit: '°F', value: (value * 9) / 5 + 32 };
|
||||
}
|
||||
return { unit: LPP_UNIT_MAP[typeName] ?? '', value };
|
||||
}
|
||||
|
||||
export function formatLppLabel(typeName: string): string {
|
||||
return typeName.charAt(0).toUpperCase() + typeName.slice(1).replace(/_/g, ' ');
|
||||
}
|
||||
|
||||
export function LppSensorRow({ sensor }: { sensor: LppSensor }) {
|
||||
export function LppSensorRow({ sensor, unitPref }: { sensor: LppSensor; unitPref?: string }) {
|
||||
const label = formatLppLabel(sensor.type_name);
|
||||
|
||||
if (typeof sensor.value === 'object' && sensor.value !== null) {
|
||||
@@ -248,10 +263,10 @@ export function LppSensorRow({ sensor }: { sensor: LppSensor }) {
|
||||
);
|
||||
}
|
||||
|
||||
const unit = LPP_UNIT_MAP[sensor.type_name] ?? '';
|
||||
const display = lppDisplayUnit(sensor.type_name, sensor.value as number, unitPref ?? 'metric');
|
||||
const formatted =
|
||||
typeof sensor.value === 'number'
|
||||
? `${sensor.value % 1 === 0 ? sensor.value : sensor.value.toFixed(2)}${unit ? ` ${unit}` : ''}`
|
||||
? `${display.value % 1 === 0 ? display.value : display.value.toFixed(2)}${display.unit ? ` ${display.unit}` : ''}`
|
||||
: String(sensor.value);
|
||||
|
||||
return <KvRow label={label} value={formatted} />;
|
||||
|
||||
@@ -6,6 +6,8 @@ import { Separator } from '../ui/separator';
|
||||
import { toast } from '../ui/sonner';
|
||||
import { api } from '../../api';
|
||||
import { formatTime } from '../../utils/messageParser';
|
||||
import { lppDisplayUnit } from '../repeater/repeaterPaneShared';
|
||||
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||
import { BulkDeleteContactsModal } from './BulkDeleteContactsModal';
|
||||
import type {
|
||||
AppSettings,
|
||||
@@ -13,6 +15,7 @@ import type {
|
||||
Contact,
|
||||
HealthStatus,
|
||||
TelemetryHistoryEntry,
|
||||
TelemetrySchedule,
|
||||
} from '../../types';
|
||||
|
||||
export function SettingsDatabaseSection({
|
||||
@@ -44,6 +47,7 @@ export function SettingsDatabaseSection({
|
||||
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
||||
className?: string;
|
||||
}) {
|
||||
const { distanceUnit } = useDistanceUnit();
|
||||
const [retentionDays, setRetentionDays] = useState('14');
|
||||
const [cleaning, setCleaning] = useState(false);
|
||||
const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false);
|
||||
@@ -51,19 +55,45 @@ export function SettingsDatabaseSection({
|
||||
const [discoveryBlockedTypes, setDiscoveryBlockedTypes] = useState<number[]>([]);
|
||||
const [bulkDeleteOpen, setBulkDeleteOpen] = useState(false);
|
||||
|
||||
const [busy, setBusy] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const [latestTelemetry, setLatestTelemetry] = useState<
|
||||
Record<string, TelemetryHistoryEntry | null>
|
||||
>({});
|
||||
const telemetryFetchedRef = useRef(false);
|
||||
|
||||
const [schedule, setSchedule] = useState<TelemetrySchedule | null>(null);
|
||||
const [intervalDraft, setIntervalDraft] = useState<number>(appSettings.telemetry_interval_hours);
|
||||
|
||||
// Serialization chain for every auto-persisted control on this page.
|
||||
// Without this, rapid successive toggles (or mixed dropdown + checkbox
|
||||
// interactions) can dispatch overlapping PATCHes that land out of order
|
||||
// on HTTP/2 — a stale write then wins, reverting the user's last click.
|
||||
// Each call awaits the previous one before sending its request, so the
|
||||
// server sees updates in the order the user made them.
|
||||
const saveChainRef = useRef<Promise<void>>(Promise.resolve());
|
||||
|
||||
useEffect(() => {
|
||||
setAutoDecryptOnAdvert(appSettings.auto_decrypt_dm_on_advert);
|
||||
setDiscoveryBlockedTypes(appSettings.discovery_blocked_types ?? []);
|
||||
setIntervalDraft(appSettings.telemetry_interval_hours);
|
||||
}, [appSettings]);
|
||||
|
||||
// Re-fetch the scheduler derivation whenever the tracked list changes or
|
||||
// the stored preference changes. Cheap: single GET, no radio lock.
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
api
|
||||
.getTelemetrySchedule()
|
||||
.then((s) => {
|
||||
if (!cancelled) setSchedule(s);
|
||||
})
|
||||
.catch(() => {
|
||||
// Non-critical: dropdown falls back to the unfiltered menu.
|
||||
});
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [trackedTelemetryRepeaters.length, appSettings.telemetry_interval_hours]);
|
||||
|
||||
useEffect(() => {
|
||||
if (trackedTelemetryRepeaters.length === 0 || telemetryFetchedRef.current) return;
|
||||
telemetryFetchedRef.current = true;
|
||||
@@ -129,28 +159,26 @@ export function SettingsDatabaseSection({
|
||||
}
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
setBusy(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const update: AppSettingsUpdate = { auto_decrypt_dm_on_advert: autoDecryptOnAdvert };
|
||||
const currentBlocked = appSettings.discovery_blocked_types ?? [];
|
||||
if (
|
||||
discoveryBlockedTypes.length !== currentBlocked.length ||
|
||||
discoveryBlockedTypes.some((t) => !currentBlocked.includes(t))
|
||||
) {
|
||||
update.discovery_blocked_types = discoveryBlockedTypes;
|
||||
/**
|
||||
* Apply an AppSettings PATCH after any already-queued saves finish, and
|
||||
* revert local state if the save fails. Every auto-persist control on
|
||||
* this page routes through here so the user-visible order of clicks is
|
||||
* the order the backend sees, regardless of network reordering.
|
||||
*/
|
||||
const persistAppSettings = (update: AppSettingsUpdate, revert: () => void): Promise<void> => {
|
||||
const chained = saveChainRef.current.then(async () => {
|
||||
try {
|
||||
await onSaveAppSettings(update);
|
||||
} catch (err) {
|
||||
console.error('Failed to save database settings:', err);
|
||||
revert();
|
||||
toast.error('Failed to save setting', {
|
||||
description: err instanceof Error ? err.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
await onSaveAppSettings(update);
|
||||
toast.success('Database settings saved');
|
||||
} catch (err) {
|
||||
console.error('Failed to save database settings:', err);
|
||||
setError(err instanceof Error ? err.message : 'Failed to save');
|
||||
toast.error('Failed to save settings');
|
||||
} finally {
|
||||
setBusy(false);
|
||||
}
|
||||
});
|
||||
saveChainRef.current = chained;
|
||||
return chained;
|
||||
};
|
||||
|
||||
return (
|
||||
@@ -246,7 +274,14 @@ export function SettingsDatabaseSection({
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={autoDecryptOnAdvert}
|
||||
onChange={(e) => setAutoDecryptOnAdvert(e.target.checked)}
|
||||
onChange={(e) => {
|
||||
const next = e.target.checked;
|
||||
const prev = autoDecryptOnAdvert;
|
||||
setAutoDecryptOnAdvert(next);
|
||||
void persistAppSettings({ auto_decrypt_dm_on_advert: next }, () =>
|
||||
setAutoDecryptOnAdvert(prev)
|
||||
);
|
||||
}}
|
||||
className="w-4 h-4 rounded border-input accent-primary"
|
||||
/>
|
||||
<span className="text-sm">Auto-decrypt historical DMs when new contact advertises</span>
|
||||
@@ -263,10 +298,61 @@ export function SettingsDatabaseSection({
|
||||
<div className="space-y-3">
|
||||
<Label className="text-base">Tracked Repeater Telemetry</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Repeaters opted into automatic telemetry collection are polled every 8 hours. Up to 8
|
||||
repeaters may be tracked at a time ({trackedTelemetryRepeaters.length} / 8 slots used).
|
||||
Repeaters opted into automatic telemetry collection are polled on a scheduled interval. To
|
||||
limit mesh traffic, the app caps telemetry at 24 checks per day across all tracked
|
||||
repeaters — so fewer tracked repeaters allows shorter intervals, and more tracked
|
||||
repeaters forces longer ones. Up to {schedule?.max_tracked ?? 8} repeaters may be tracked
|
||||
at once ({trackedTelemetryRepeaters.length} / {schedule?.max_tracked ?? 8} slots used).
|
||||
</p>
|
||||
|
||||
{/* Interval picker. Legal options depend on current tracked count;
|
||||
we list only those. If the saved preference is no longer legal,
|
||||
the effective interval is shown below so the user knows what the
|
||||
scheduler is actually using. */}
|
||||
<div className="space-y-1.5">
|
||||
<Label htmlFor="telemetry-interval" className="text-sm">
|
||||
Collection interval
|
||||
</Label>
|
||||
<div className="flex items-center gap-2">
|
||||
<select
|
||||
id="telemetry-interval"
|
||||
value={intervalDraft}
|
||||
onChange={(e) => {
|
||||
const nextValue = Number(e.target.value);
|
||||
if (!Number.isFinite(nextValue) || nextValue === intervalDraft) return;
|
||||
const prevValue = intervalDraft;
|
||||
setIntervalDraft(nextValue);
|
||||
void persistAppSettings({ telemetry_interval_hours: nextValue }, () =>
|
||||
setIntervalDraft(prevValue)
|
||||
);
|
||||
}}
|
||||
className="h-9 px-3 rounded-md border border-input bg-background text-sm ring-offset-background focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2"
|
||||
>
|
||||
{(schedule?.options ?? [1, 2, 3, 4, 6, 8, 12, 24]).map((hrs) => (
|
||||
<option key={hrs} value={hrs}>
|
||||
Every {hrs} hour{hrs === 1 ? '' : 's'} ({Math.floor(24 / hrs)} check
|
||||
{Math.floor(24 / hrs) === 1 ? '' : 's'}/day)
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
{schedule && schedule.effective_hours !== schedule.preferred_hours && (
|
||||
<p className="text-xs text-warning">
|
||||
Saved preference is {schedule.preferred_hours} hour
|
||||
{schedule.preferred_hours === 1 ? '' : 's'}, but the scheduler is using{' '}
|
||||
{schedule.effective_hours} hours because {schedule.tracked_count} repeater
|
||||
{schedule.tracked_count === 1 ? '' : 's'}{' '}
|
||||
{schedule.tracked_count === 1 ? 'is' : 'are'} tracked. Your preference will be
|
||||
restored if you drop back to a supported count.
|
||||
</p>
|
||||
)}
|
||||
{schedule?.next_run_at != null && (
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Next run at {formatTime(schedule.next_run_at)} (UTC top of hour).
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{trackedTelemetryRepeaters.length === 0 ? (
|
||||
<p className="text-sm text-muted-foreground italic">
|
||||
No repeaters are being tracked. Enable tracking from a repeater's dashboard.
|
||||
@@ -308,6 +394,22 @@ export function SettingsDatabaseSection({
|
||||
<span>
|
||||
tx {d.packets_sent != null ? d.packets_sent.toLocaleString() : '?'}
|
||||
</span>
|
||||
{d.lpp_sensors?.map((s) => {
|
||||
const display = lppDisplayUnit(s.type_name, s.value, distanceUnit);
|
||||
const val =
|
||||
typeof display.value === 'number'
|
||||
? display.value % 1 === 0
|
||||
? display.value
|
||||
: display.value.toFixed(1)
|
||||
: display.value;
|
||||
const label = s.type_name.charAt(0).toUpperCase() + s.type_name.slice(1);
|
||||
return (
|
||||
<span key={`${s.type_name}-${s.channel}`}>
|
||||
{label} {val}
|
||||
{display.unit ? ` ${display.unit}` : ''}
|
||||
</span>
|
||||
);
|
||||
})}
|
||||
<span className="ml-auto">checked {formatTime(snap.timestamp)}</span>
|
||||
</div>
|
||||
) : snap === null ? (
|
||||
@@ -322,16 +424,6 @@ export function SettingsDatabaseSection({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="text-sm text-destructive" role="alert">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Button onClick={handleSave} disabled={busy} className="w-full">
|
||||
{busy ? 'Saving...' : 'Save Settings'}
|
||||
</Button>
|
||||
|
||||
<Separator />
|
||||
|
||||
{/* ── Contact Management ── */}
|
||||
@@ -361,11 +453,14 @@ export function SettingsDatabaseSection({
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={checked}
|
||||
onChange={() =>
|
||||
setDiscoveryBlockedTypes((prev) =>
|
||||
checked ? prev.filter((t) => t !== typeCode) : [...prev, typeCode]
|
||||
)
|
||||
}
|
||||
onChange={() => {
|
||||
const prev = discoveryBlockedTypes;
|
||||
const next = checked ? prev.filter((t) => t !== typeCode) : [...prev, typeCode];
|
||||
setDiscoveryBlockedTypes(next);
|
||||
void persistAppSettings({ discovery_blocked_types: next }, () =>
|
||||
setDiscoveryBlockedTypes(prev)
|
||||
);
|
||||
}}
|
||||
className="rounded border-input"
|
||||
/>
|
||||
{label}
|
||||
|
||||
@@ -325,7 +325,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
||||
label: 'Map Upload',
|
||||
section: 'Community Sharing',
|
||||
description:
|
||||
'Upload repeaters and room servers to map.meshcore.dev or a compatible map API endpoint.',
|
||||
'Upload repeaters and room servers to map.meshcore.io or a compatible map API endpoint.',
|
||||
defaultName: 'Map Upload',
|
||||
nameMode: 'counted',
|
||||
defaults: {
|
||||
@@ -1004,6 +1004,11 @@ function MqttHaConfigEditor({
|
||||
<li>
|
||||
<code className="text-[0.6875rem]">sensor.meshcore_*_uptime</code> (seconds)
|
||||
</li>
|
||||
<li>
|
||||
<code className="text-[0.6875rem]">sensor.meshcore_*_lpp_temperature_ch*</code>,{' '}
|
||||
<code className="text-[0.6875rem]">*_lpp_humidity_ch*</code>, etc. —
|
||||
CayenneLPP sensors (auto-detected from repeater)
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -1663,12 +1668,12 @@ function MapUploadConfigEditor({
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Automatically upload heard repeater and room server advertisements to{' '}
|
||||
<a
|
||||
href="https://map.meshcore.dev"
|
||||
href="https://map.meshcore.io"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="underline hover:text-foreground"
|
||||
>
|
||||
map.meshcore.dev
|
||||
map.meshcore.io
|
||||
</a>
|
||||
. Requires the radio's private key to be available (firmware must have{' '}
|
||||
<code>ENABLE_PRIVATE_KEY_EXPORT=1</code>). Only raw RF packets are shared — never
|
||||
@@ -1705,12 +1710,12 @@ function MapUploadConfigEditor({
|
||||
<Input
|
||||
id="fanout-map-api-url"
|
||||
type="url"
|
||||
placeholder="https://map.meshcore.dev/api/v1/uploader/node"
|
||||
placeholder="https://map.meshcore.io/api/v1/uploader/node"
|
||||
value={(config.api_url as string) || ''}
|
||||
onChange={(e) => onChange({ ...config, api_url: e.target.value })}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Leave blank to use the default <code>map.meshcore.dev</code> endpoint.
|
||||
Leave blank to use the default <code>map.meshcore.io</code> endpoint.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -1806,6 +1811,162 @@ function getFilterKeys(filter: unknown): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
const MAX_SCOPE_PILL_DISPLAY = 32;
|
||||
|
||||
interface PillsSearchListItem {
|
||||
key: string;
|
||||
label: string;
|
||||
/** Optional trailing monospace hint (e.g. pubkey prefix) */
|
||||
trailing?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search-and-pills picker for the generic fanout scope selector.
|
||||
* Shows selected items as removable pills (up to MAX_SCOPE_PILL_DISPLAY),
|
||||
* a search input, and a scrollable list of filtered items with checkboxes.
|
||||
* When more than MAX_SCOPE_PILL_DISPLAY items are selected, the pill row
|
||||
* collapses to a single informational badge to keep the interface clean.
|
||||
*/
|
||||
function PillsSearchList({
|
||||
label,
|
||||
labelSuffix,
|
||||
items,
|
||||
selectedKeys,
|
||||
onToggle,
|
||||
onAll,
|
||||
onNone,
|
||||
searchPlaceholder,
|
||||
emptyItemsMessage,
|
||||
}: {
|
||||
label: string;
|
||||
labelSuffix: string;
|
||||
items: PillsSearchListItem[];
|
||||
selectedKeys: string[];
|
||||
onToggle: (key: string) => void;
|
||||
onAll: () => void;
|
||||
onNone: () => void;
|
||||
searchPlaceholder: string;
|
||||
emptyItemsMessage: string;
|
||||
}) {
|
||||
const [search, setSearch] = useState('');
|
||||
const searchLower = search.toLowerCase().trim();
|
||||
|
||||
const filtered = useMemo(() => {
|
||||
const matches = items.filter((it) => {
|
||||
if (!searchLower) return true;
|
||||
return (
|
||||
it.label.toLowerCase().includes(searchLower) || it.key.toLowerCase().startsWith(searchLower)
|
||||
);
|
||||
});
|
||||
// Selected items sort to top (mirrors the Home Assistant tracked-contacts picker)
|
||||
return matches.sort((a, b) => {
|
||||
const aSel = selectedKeys.includes(a.key) ? 0 : 1;
|
||||
const bSel = selectedKeys.includes(b.key) ? 0 : 1;
|
||||
if (aSel !== bSel) return aSel - bSel;
|
||||
return a.label.localeCompare(b.label);
|
||||
});
|
||||
}, [items, searchLower, selectedKeys]);
|
||||
|
||||
const selectedDetails = useMemo(
|
||||
() => items.filter((it) => selectedKeys.includes(it.key)),
|
||||
[items, selectedKeys]
|
||||
);
|
||||
const overPillLimit = selectedDetails.length > MAX_SCOPE_PILL_DISPLAY;
|
||||
|
||||
return (
|
||||
<div className="space-y-1">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label className="text-xs">
|
||||
{label} <span className="text-muted-foreground font-normal">({labelSuffix})</span>
|
||||
</Label>
|
||||
<span className="flex gap-1">
|
||||
<button
|
||||
type="button"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={onAll}
|
||||
>
|
||||
All
|
||||
</button>
|
||||
<span className="text-xs text-muted-foreground">/</span>
|
||||
<button
|
||||
type="button"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={onNone}
|
||||
>
|
||||
None
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{selectedDetails.length > 0 && (
|
||||
<div className="flex flex-wrap gap-1.5">
|
||||
{overPillLimit ? (
|
||||
<span className="inline-flex items-center text-[0.6875rem] px-2 py-0.5 rounded-full bg-muted text-muted-foreground">
|
||||
>{MAX_SCOPE_PILL_DISPLAY} selections made; hiding selection preview to keep the
|
||||
interface clean
|
||||
</span>
|
||||
) : (
|
||||
selectedDetails.map((it) => (
|
||||
<span
|
||||
key={it.key}
|
||||
className="inline-flex items-center gap-1 text-[0.6875rem] px-2 py-0.5 rounded-full bg-primary/10 text-primary"
|
||||
>
|
||||
{it.label}
|
||||
<button
|
||||
type="button"
|
||||
className="ml-0.5 hover:text-destructive transition-colors"
|
||||
onClick={() => onToggle(it.key)}
|
||||
aria-label={`Remove ${it.label}`}
|
||||
>
|
||||
×
|
||||
</button>
|
||||
</span>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{items.length === 0 ? (
|
||||
<p className="text-xs text-muted-foreground italic">{emptyItemsMessage}</p>
|
||||
) : (
|
||||
<>
|
||||
<Input
|
||||
type="text"
|
||||
placeholder={searchPlaceholder}
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
className="h-8 text-sm"
|
||||
/>
|
||||
<div className="max-h-48 overflow-y-auto space-y-1 rounded border border-border p-2">
|
||||
{filtered.length === 0 ? (
|
||||
<p className="text-xs text-muted-foreground italic py-1">
|
||||
No {label.toLowerCase()} match “{search}”
|
||||
</p>
|
||||
) : (
|
||||
filtered.map((it) => (
|
||||
<label key={it.key} className="flex items-center gap-2 cursor-pointer text-sm">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={selectedKeys.includes(it.key)}
|
||||
onChange={() => onToggle(it.key)}
|
||||
className="h-3.5 w-3.5 rounded border-input accent-primary"
|
||||
/>
|
||||
<span className="truncate">{it.label}</span>
|
||||
{it.trailing && (
|
||||
<span className="text-[0.625rem] text-muted-foreground ml-auto font-mono shrink-0">
|
||||
{it.trailing}
|
||||
</span>
|
||||
)}
|
||||
</label>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function ScopeSelector({
|
||||
scope,
|
||||
onChange,
|
||||
@@ -1915,9 +2076,6 @@ function ScopeSelector({
|
||||
selectedContacts.length >= filteredContacts.length);
|
||||
const showEmptyScopeWarning = messagesEffectivelyNone && !rawEnabled;
|
||||
|
||||
const isChannelChecked = (key: string) => selectedChannels.includes(key);
|
||||
const isContactChecked = (key: string) => selectedContacts.includes(key);
|
||||
|
||||
const listHint =
|
||||
mode === 'only'
|
||||
? 'Newly added channels or contacts will not be automatically included.'
|
||||
@@ -1971,107 +2129,51 @@ function ScopeSelector({
|
||||
<p className="text-xs text-muted-foreground">{listHint}</p>
|
||||
|
||||
{channels.length > 0 && (
|
||||
<div className="space-y-1">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label className="text-xs">
|
||||
Channels{' '}
|
||||
<span className="text-muted-foreground font-normal">({checkboxLabel})</span>
|
||||
</Label>
|
||||
<span className="flex gap-1">
|
||||
<button
|
||||
type="button"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={() =>
|
||||
onChange({
|
||||
...scope,
|
||||
messages: buildMessages(
|
||||
channels.map((ch) => ch.key),
|
||||
selectedContacts
|
||||
),
|
||||
})
|
||||
}
|
||||
>
|
||||
All
|
||||
</button>
|
||||
<span className="text-xs text-muted-foreground">/</span>
|
||||
<button
|
||||
type="button"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={() =>
|
||||
onChange({ ...scope, messages: buildMessages([], selectedContacts) })
|
||||
}
|
||||
>
|
||||
None
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
<div className="max-h-32 overflow-y-auto border border-input rounded-md p-2 space-y-1">
|
||||
{channels.map((ch) => (
|
||||
<label key={ch.key} className="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={isChannelChecked(ch.key)}
|
||||
onChange={() => toggleChannel(ch.key)}
|
||||
className="h-3.5 w-3.5 rounded border-input accent-primary"
|
||||
/>
|
||||
<span className="text-sm truncate">{ch.name}</span>
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<PillsSearchList
|
||||
label="Channels"
|
||||
labelSuffix={checkboxLabel}
|
||||
items={channels.map((ch) => ({ key: ch.key, label: ch.name }))}
|
||||
selectedKeys={selectedChannels}
|
||||
onToggle={toggleChannel}
|
||||
onAll={() =>
|
||||
onChange({
|
||||
...scope,
|
||||
messages: buildMessages(
|
||||
channels.map((ch) => ch.key),
|
||||
selectedContacts
|
||||
),
|
||||
})
|
||||
}
|
||||
onNone={() => onChange({ ...scope, messages: buildMessages([], selectedContacts) })}
|
||||
searchPlaceholder={`Search ${channels.length} channel${channels.length === 1 ? '' : 's'}...`}
|
||||
emptyItemsMessage="No channels available."
|
||||
/>
|
||||
)}
|
||||
|
||||
{filteredContacts.length > 0 && (
|
||||
<div className="space-y-1">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label className="text-xs">
|
||||
Contacts{' '}
|
||||
<span className="text-muted-foreground font-normal">({checkboxLabel})</span>
|
||||
</Label>
|
||||
<span className="flex gap-1">
|
||||
<button
|
||||
type="button"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={() =>
|
||||
onChange({
|
||||
...scope,
|
||||
messages: buildMessages(
|
||||
selectedChannels,
|
||||
filteredContacts.map((c) => c.public_key)
|
||||
),
|
||||
})
|
||||
}
|
||||
>
|
||||
All
|
||||
</button>
|
||||
<span className="text-xs text-muted-foreground">/</span>
|
||||
<button
|
||||
type="button"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={() =>
|
||||
onChange({ ...scope, messages: buildMessages(selectedChannels, []) })
|
||||
}
|
||||
>
|
||||
None
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
<div className="max-h-32 overflow-y-auto border border-input rounded-md p-2 space-y-1">
|
||||
{filteredContacts.map((c) => (
|
||||
<label key={c.public_key} className="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={isContactChecked(c.public_key)}
|
||||
onChange={() => toggleContact(c.public_key)}
|
||||
className="h-3.5 w-3.5 rounded border-input accent-primary"
|
||||
/>
|
||||
<span className="text-sm truncate">
|
||||
{c.name || c.public_key.substring(0, 12) + '...'}
|
||||
</span>
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<PillsSearchList
|
||||
label="Contacts"
|
||||
labelSuffix={checkboxLabel}
|
||||
items={filteredContacts.map((c) => ({
|
||||
key: c.public_key,
|
||||
label: c.name || c.public_key.slice(0, 12),
|
||||
trailing: c.public_key.slice(0, 12),
|
||||
}))}
|
||||
selectedKeys={selectedContacts}
|
||||
onToggle={toggleContact}
|
||||
onAll={() =>
|
||||
onChange({
|
||||
...scope,
|
||||
messages: buildMessages(
|
||||
selectedChannels,
|
||||
filteredContacts.map((c) => c.public_key)
|
||||
),
|
||||
})
|
||||
}
|
||||
onNone={() => onChange({ ...scope, messages: buildMessages(selectedChannels, []) })}
|
||||
searchPlaceholder={`Search ${filteredContacts.length} contact${filteredContacts.length === 1 ? '' : 's'}...`}
|
||||
emptyItemsMessage="No contacts available."
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import { useState } from 'react';
|
||||
import { ChevronRight, Logs, MessageSquare, Send, Settings } from 'lucide-react';
|
||||
import { useState, useEffect } from 'react';
|
||||
import { ChevronRight, Logs, MessageSquare, Send, Settings, X } from 'lucide-react';
|
||||
import { toast } from '../ui/sonner';
|
||||
import { usePush } from '../../contexts/PushSubscriptionContext';
|
||||
import type { Channel, Contact } from '../../types';
|
||||
import { getContactDisplayName } from '../../utils/pubkey';
|
||||
import { Button } from '../ui/button';
|
||||
import { Input } from '../ui/input';
|
||||
import { Label } from '../ui/label';
|
||||
@@ -35,30 +39,198 @@ import {
|
||||
getShowBatteryVoltage,
|
||||
setShowBatteryVoltage as saveBatteryVoltage,
|
||||
} from '../../utils/batteryDisplay';
|
||||
import {
|
||||
STATUS_DOT_PULSE_CHANGE_EVENT,
|
||||
getStatusDotPulseEnabled,
|
||||
setStatusDotPulseEnabled as saveStatusDotPulse,
|
||||
} from '../../utils/statusDotPulse';
|
||||
|
||||
/** Resolve a state key like "contact-abc123" or "channel-def456" to a display name. */
|
||||
function resolveConversationName(
|
||||
stateKey: string,
|
||||
contacts: Contact[],
|
||||
channels: Channel[]
|
||||
): string {
|
||||
if (stateKey.startsWith('contact-')) {
|
||||
const pubkey = stateKey.slice('contact-'.length);
|
||||
const contact = contacts.find((c) => c.public_key === pubkey);
|
||||
return contact ? getContactDisplayName(contact.name, contact.public_key) : pubkey.slice(0, 12);
|
||||
}
|
||||
if (stateKey.startsWith('channel-')) {
|
||||
const key = stateKey.slice('channel-'.length);
|
||||
const channel = channels.find((c) => c.key === key);
|
||||
if (channel?.name) return channel.name.startsWith('#') ? channel.name : `#${channel.name}`;
|
||||
return `#${key.slice(0, 12)}`;
|
||||
}
|
||||
return stateKey;
|
||||
}
|
||||
|
||||
function PushDeviceManagement({
|
||||
contacts = [],
|
||||
channels = [],
|
||||
}: {
|
||||
contacts?: Contact[];
|
||||
channels?: Channel[];
|
||||
}) {
|
||||
const {
|
||||
isSupported,
|
||||
allSubscriptions,
|
||||
pushConversations,
|
||||
loading,
|
||||
subscribe,
|
||||
currentSubscriptionId,
|
||||
toggleConversation,
|
||||
deleteSubscription,
|
||||
testPush,
|
||||
refreshSubscriptions,
|
||||
} = usePush();
|
||||
|
||||
useEffect(() => {
|
||||
refreshSubscriptions();
|
||||
}, [refreshSubscriptions]);
|
||||
|
||||
if (!isSupported) {
|
||||
return (
|
||||
<div className="space-y-3">
|
||||
<Label>Web Push Notifications</Label>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{window.isSecureContext
|
||||
? 'Push notifications are not supported by this browser.'
|
||||
: 'Web Push requires HTTPS. Access RemoteTerm over HTTPS (self-signed certificates work) to enable push notifications.'}
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-1">
|
||||
<Label>Web Push Notifications</Label>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Receive notifications even when the browser is closed. Use the bell icon in any
|
||||
conversation header to enable push for that contact or channel, or subscribe this browser
|
||||
to receive notifications for all push-enabled conversations.
|
||||
</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
The set of channels or DMs that trigger push notifications are global per-install (i.e.
|
||||
all devices that register for Web Push will have the same set of channels/DMs that trigger
|
||||
notifications). Subscribing or unsubscribing a particular browser only controls whether
|
||||
that browser receives notifications for the configured set of channels/DMs.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{!currentSubscriptionId && (
|
||||
<Button variant="outline" size="sm" onClick={() => void subscribe()} disabled={loading}>
|
||||
{loading ? 'Subscribing...' : 'Subscribe This Browser'}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{pushConversations.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
<span className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||
Push-enabled conversations
|
||||
</span>
|
||||
<div className="flex flex-wrap gap-1.5">
|
||||
{pushConversations.map((key) => (
|
||||
<span
|
||||
key={key}
|
||||
className="inline-flex items-center gap-1 rounded-full bg-muted px-2.5 py-1 text-sm"
|
||||
>
|
||||
{resolveConversationName(key, contacts, channels)}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => void toggleConversation(key)}
|
||||
className="rounded-full p-0.5 hover:bg-accent transition-colors"
|
||||
title="Remove"
|
||||
aria-label={`Remove ${resolveConversationName(key, contacts, channels)} from push`}
|
||||
>
|
||||
<X className="h-3.5 w-3.5" />
|
||||
</button>
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{allSubscriptions.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
<span className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||
Registered Devices
|
||||
</span>
|
||||
<div className="mt-2 space-y-2">
|
||||
{allSubscriptions.map((sub) => (
|
||||
<div
|
||||
key={sub.id}
|
||||
className="flex items-center justify-between gap-3 rounded-md border border-border px-3 py-2"
|
||||
>
|
||||
<div className="min-w-0 flex-1">
|
||||
<div className="flex items-center gap-2 overflow-hidden">
|
||||
<span className="truncate text-sm font-medium">
|
||||
{sub.label || 'Unknown device'}
|
||||
</span>
|
||||
{sub.id === currentSubscriptionId && (
|
||||
<span className="shrink-0 rounded bg-primary/10 px-1.5 py-0.5 text-[0.625rem] font-medium text-primary">
|
||||
Current device
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{sub.last_success_at
|
||||
? `Last push: ${new Date(sub.last_success_at * 1000).toLocaleDateString()}`
|
||||
: 'Never pushed'}
|
||||
{sub.failure_count > 0 && ` · ${sub.failure_count} failures`}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-8 text-sm"
|
||||
onClick={() => void testPush(sub.id)}
|
||||
>
|
||||
Test
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-8 text-sm text-destructive hover:text-destructive"
|
||||
onClick={() => {
|
||||
void deleteSubscription(sub.id).then(() => toast.success('Device removed'));
|
||||
}}
|
||||
>
|
||||
Unsubscribe this device
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function SettingsLocalSection({
|
||||
onLocalLabelChange,
|
||||
contacts,
|
||||
channels,
|
||||
className,
|
||||
}: {
|
||||
onLocalLabelChange?: (label: LocalLabel) => void;
|
||||
contacts?: Contact[];
|
||||
channels?: Channel[];
|
||||
className?: string;
|
||||
}) {
|
||||
const { distanceUnit, setDistanceUnit } = useDistanceUnit();
|
||||
const [reopenLastConversation, setReopenLastConversation] = useState(
|
||||
getReopenLastConversationEnabled
|
||||
);
|
||||
const [darkMap, setDarkMap] = useState(() => {
|
||||
try {
|
||||
return localStorage.getItem('remoteterm-dark-map') === 'true';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
const [localLabelText, setLocalLabelText] = useState(() => getLocalLabel().text);
|
||||
const [localLabelColor, setLocalLabelColor] = useState(() => getLocalLabel().color);
|
||||
const [autoFocusInput, setAutoFocusInput] = useState(getAutoFocusInputEnabled);
|
||||
const [batteryPercent, setBatteryPercent] = useState(getShowBatteryPercent);
|
||||
const [batteryVoltage, setBatteryVoltage] = useState(getShowBatteryVoltage);
|
||||
const [statusDotPulse, setStatusDotPulse] = useState(getStatusDotPulseEnabled);
|
||||
const [fontScale, setFontScale] = useState(getSavedFontScale);
|
||||
const [fontScaleSlider, setFontScaleSlider] = useState(getSavedFontScale);
|
||||
const [fontScaleInput, setFontScaleInput] = useState(() => String(getSavedFontScale()));
|
||||
@@ -178,24 +350,6 @@ export function SettingsLocalSection({
|
||||
<span className="text-sm">Reopen to last viewed channel/conversation</span>
|
||||
</label>
|
||||
|
||||
<label className="flex items-center gap-3 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={darkMap}
|
||||
onChange={(e) => {
|
||||
const v = e.target.checked;
|
||||
setDarkMap(v);
|
||||
try {
|
||||
localStorage.setItem('remoteterm-dark-map', String(v));
|
||||
} catch {
|
||||
// localStorage may be disabled
|
||||
}
|
||||
}}
|
||||
className="w-4 h-4 rounded border-input accent-primary"
|
||||
/>
|
||||
<span className="text-sm">Dark mode map tiles</span>
|
||||
</label>
|
||||
|
||||
<label className="flex items-center gap-3 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
@@ -247,6 +401,24 @@ export function SettingsLocalSection({
|
||||
</p>
|
||||
)}
|
||||
|
||||
<label className="flex items-center gap-3 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={statusDotPulse}
|
||||
onChange={(e) => {
|
||||
const v = e.target.checked;
|
||||
setStatusDotPulse(v);
|
||||
saveStatusDotPulse(v);
|
||||
window.dispatchEvent(new Event(STATUS_DOT_PULSE_CHANGE_EVENT));
|
||||
}}
|
||||
className="w-4 h-4 rounded border-input accent-primary"
|
||||
/>
|
||||
<span className="text-sm">
|
||||
Glitter status dot as packets arrive (blue = channel, purple = DM, cyan = advert, dark
|
||||
green = other)
|
||||
</span>
|
||||
</label>
|
||||
|
||||
<div className="space-y-3">
|
||||
<Label htmlFor="font-scale-input">Relative Font Size</Label>
|
||||
<div className="flex flex-col gap-3 sm:flex-row sm:items-center">
|
||||
@@ -324,6 +496,10 @@ export function SettingsLocalSection({
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Separator />
|
||||
|
||||
<PushDeviceManagement contacts={contacts} channels={channels} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -56,15 +56,68 @@ interface SheetContentProps
|
||||
hideCloseButton?: boolean;
|
||||
}
|
||||
|
||||
// Safe-area insets for each sheet side. Sheets are position:fixed and escape
|
||||
// body padding, so without this they render under the iOS status bar/home
|
||||
// indicator when the app is installed as a PWA.
|
||||
//
|
||||
// NOTE: these inline styles override the matching sides of the `p-6` default
|
||||
// in sheetVariants. All current consumers pass `p-0`; future sheets that want
|
||||
// the default padding should compose explicit per-side padding in their own
|
||||
// className rather than relying on the `p-6` shorthand being preserved.
|
||||
type SheetSide = Exclude<VariantProps<typeof sheetVariants>['side'], null | undefined>;
|
||||
|
||||
const sheetSafeAreaStyles: Record<SheetSide, React.CSSProperties> = {
|
||||
top: {
|
||||
paddingTop: 'var(--safe-area-top)',
|
||||
paddingLeft: 'var(--safe-area-left)',
|
||||
paddingRight: 'var(--safe-area-right)',
|
||||
},
|
||||
bottom: {
|
||||
paddingBottom: 'var(--safe-area-bottom)',
|
||||
paddingLeft: 'var(--safe-area-left)',
|
||||
paddingRight: 'var(--safe-area-right)',
|
||||
},
|
||||
left: {
|
||||
paddingTop: 'var(--safe-area-top)',
|
||||
paddingLeft: 'var(--safe-area-left)',
|
||||
paddingBottom: 'var(--safe-area-bottom)',
|
||||
},
|
||||
right: {
|
||||
paddingTop: 'var(--safe-area-top)',
|
||||
paddingRight: 'var(--safe-area-right)',
|
||||
paddingBottom: 'var(--safe-area-bottom)',
|
||||
},
|
||||
};
|
||||
|
||||
const SheetContent = React.forwardRef<
|
||||
React.ElementRef<typeof SheetPrimitive.Content>,
|
||||
SheetContentProps
|
||||
>(({ side = 'right', className, children, hideCloseButton = false, ...props }, ref) => (
|
||||
>(({ side = 'right', className, children, hideCloseButton = false, style, ...props }, ref) => (
|
||||
<SheetPortal>
|
||||
<SheetOverlay />
|
||||
<SheetPrimitive.Content ref={ref} className={cn(sheetVariants({ side }), className)} {...props}>
|
||||
<SheetPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(sheetVariants({ side }), className)}
|
||||
style={{ ...sheetSafeAreaStyles[side as SheetSide], ...style }}
|
||||
{...props}
|
||||
>
|
||||
{!hideCloseButton && (
|
||||
<SheetPrimitive.Close className="absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-secondary">
|
||||
<SheetPrimitive.Close
|
||||
// Absolute positioning is measured from the containing block's
|
||||
// padding edge, so the safe-area padding on SheetContent does not
|
||||
// push this button down. We offset `top` by safe-area-top manually
|
||||
// for sheets that pin to the viewport top (top/left/right). Bottom
|
||||
// sheets start mid-viewport, so no adjustment is needed there.
|
||||
style={
|
||||
side === 'bottom'
|
||||
? undefined
|
||||
: {
|
||||
top: 'calc(var(--safe-area-top) + 1rem)',
|
||||
right: 'calc(var(--safe-area-right) + 1rem)',
|
||||
}
|
||||
}
|
||||
className="absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-secondary"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
<span className="sr-only">Close</span>
|
||||
</SheetPrimitive.Close>
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
import { createContext, useContext, type ReactNode } from 'react';
|
||||
import { usePushSubscription, type PushSubscriptionState } from '../hooks/usePushSubscription';
|
||||
|
||||
const noopAsync = async () => {};
|
||||
const noopAsyncNull = async () => null;
|
||||
|
||||
const defaultState: PushSubscriptionState = {
|
||||
isSupported: false,
|
||||
isSubscribed: false,
|
||||
currentSubscriptionId: null,
|
||||
allSubscriptions: [],
|
||||
pushConversations: [],
|
||||
loading: false,
|
||||
subscribe: noopAsyncNull,
|
||||
unsubscribe: noopAsync,
|
||||
toggleConversation: noopAsync,
|
||||
isConversationPushEnabled: () => false,
|
||||
deleteSubscription: noopAsync,
|
||||
testPush: noopAsync,
|
||||
refreshSubscriptions: async () => [],
|
||||
refreshConversations: noopAsync,
|
||||
};
|
||||
|
||||
const PushSubscriptionContext = createContext<PushSubscriptionState>(defaultState);
|
||||
|
||||
export function PushSubscriptionProvider({ children }: { children: ReactNode }) {
|
||||
const push = usePushSubscription();
|
||||
return (
|
||||
<PushSubscriptionContext.Provider value={push}>{children}</PushSubscriptionContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function usePush(): PushSubscriptionState {
|
||||
return useContext(PushSubscriptionContext);
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import { toast } from '../components/ui/sonner';
|
||||
import { api } from '../api';
|
||||
import type { PushSubscriptionInfo } from '../types';
|
||||
|
||||
function generateLabel(): string {
|
||||
const ua = navigator.userAgent;
|
||||
if (/Firefox/i.test(ua)) {
|
||||
if (/Android/i.test(ua)) return 'Firefox on Android';
|
||||
if (/Mac/i.test(ua)) return 'Firefox on macOS';
|
||||
if (/Windows/i.test(ua)) return 'Firefox on Windows';
|
||||
if (/Linux/i.test(ua)) return 'Firefox on Linux';
|
||||
return 'Firefox';
|
||||
}
|
||||
if (/Chrome/i.test(ua) && !/Edg/i.test(ua)) {
|
||||
if (/Android/i.test(ua)) return 'Chrome on Android';
|
||||
if (/CrOS/i.test(ua)) return 'Chrome on ChromeOS';
|
||||
if (/Mac/i.test(ua)) return 'Chrome on macOS';
|
||||
if (/Windows/i.test(ua)) return 'Chrome on Windows';
|
||||
if (/Linux/i.test(ua)) return 'Chrome on Linux';
|
||||
return 'Chrome';
|
||||
}
|
||||
if (/Edg/i.test(ua)) return 'Edge';
|
||||
if (/Safari/i.test(ua)) {
|
||||
if (/iPhone|iPad/i.test(ua)) return 'Safari on iOS';
|
||||
return 'Safari on macOS';
|
||||
}
|
||||
return 'Browser';
|
||||
}
|
||||
|
||||
function urlBase64ToUint8Array(base64String: string): Uint8Array {
|
||||
const padding = '='.repeat((4 - (base64String.length % 4)) % 4);
|
||||
const base64 = (base64String + padding).replace(/-/g, '+').replace(/_/g, '/');
|
||||
const raw = atob(base64);
|
||||
const arr = new Uint8Array(raw.length);
|
||||
for (let i = 0; i < raw.length; i++) arr[i] = raw.charCodeAt(i);
|
||||
return arr;
|
||||
}
|
||||
|
||||
function uint8ArraysEqual(a: Uint8Array | null, b: Uint8Array): boolean {
|
||||
if (!a || a.length !== b.length) return false;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function getApplicationServerKeyBytes(
|
||||
key: ArrayBuffer | ArrayBufferView | null | undefined
|
||||
): Uint8Array | null {
|
||||
if (!key) return null;
|
||||
if (ArrayBuffer.isView(key)) {
|
||||
return new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
|
||||
}
|
||||
return new Uint8Array(key);
|
||||
}
|
||||
|
||||
export interface PushSubscriptionState {
|
||||
isSupported: boolean;
|
||||
isSubscribed: boolean;
|
||||
currentSubscriptionId: string | null;
|
||||
allSubscriptions: PushSubscriptionInfo[];
|
||||
/** Global list of push-enabled conversation state keys (device-independent). */
|
||||
pushConversations: string[];
|
||||
loading: boolean;
|
||||
subscribe: () => Promise<string | null>;
|
||||
unsubscribe: () => Promise<void>;
|
||||
/** Toggle a conversation in the global push list (device-independent). */
|
||||
toggleConversation: (conversationKey: string) => Promise<void>;
|
||||
isConversationPushEnabled: (conversationKey: string) => boolean;
|
||||
deleteSubscription: (subscriptionId: string) => Promise<void>;
|
||||
testPush: (subscriptionId: string) => Promise<void>;
|
||||
refreshSubscriptions: () => Promise<PushSubscriptionInfo[]>;
|
||||
refreshConversations: () => Promise<void>;
|
||||
}
|
||||
|
||||
export function usePushSubscription(): PushSubscriptionState {
|
||||
const [isSupported, setIsSupported] = useState(false);
|
||||
const [currentSubscriptionId, setCurrentSubscriptionId] = useState<string | null>(null);
|
||||
const [allSubscriptions, setAllSubscriptions] = useState<PushSubscriptionInfo[]>([]);
|
||||
const [pushConversations, setPushConversations] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const vapidKeyRef = useRef<string | null>(null);
|
||||
|
||||
const reconcileCurrentSubscription = useCallback(
|
||||
(subs: PushSubscriptionInfo[], endpoint: string | null) => {
|
||||
setAllSubscriptions(subs);
|
||||
if (!endpoint) {
|
||||
setCurrentSubscriptionId(null);
|
||||
return;
|
||||
}
|
||||
const match = subs.find((sub) => sub.endpoint === endpoint);
|
||||
setCurrentSubscriptionId(match?.id ?? null);
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const supported =
|
||||
window.isSecureContext &&
|
||||
'serviceWorker' in navigator &&
|
||||
'PushManager' in window &&
|
||||
'Notification' in window;
|
||||
setIsSupported(supported);
|
||||
|
||||
if (supported) {
|
||||
// Always load all registered devices so Settings can manage them even
|
||||
// when this particular browser isn't subscribed.
|
||||
const subsPromise = api.getPushSubscriptions().catch(() => [] as PushSubscriptionInfo[]);
|
||||
|
||||
// Check if THIS browser has an active push subscription and match it
|
||||
// to a backend record.
|
||||
navigator.serviceWorker.ready
|
||||
.then((reg) => reg.pushManager.getSubscription())
|
||||
.then(async (sub) => {
|
||||
const existing = await subsPromise;
|
||||
reconcileCurrentSubscription(existing, sub?.endpoint ?? null);
|
||||
})
|
||||
.catch(() => {});
|
||||
|
||||
// Load global conversation list
|
||||
api
|
||||
.getPushConversations()
|
||||
.then(setPushConversations)
|
||||
.catch(() => {});
|
||||
}
|
||||
}, [reconcileCurrentSubscription]);
|
||||
|
||||
const refreshSubscriptions = useCallback(async () => {
|
||||
try {
|
||||
const subs = await api.getPushSubscriptions();
|
||||
const reg = await navigator.serviceWorker.ready;
|
||||
const sub = await reg.pushManager.getSubscription();
|
||||
reconcileCurrentSubscription(subs, sub?.endpoint ?? null);
|
||||
return subs;
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}, [reconcileCurrentSubscription]);
|
||||
|
||||
const refreshConversations = useCallback(async () => {
|
||||
try {
|
||||
const convos = await api.getPushConversations();
|
||||
setPushConversations(convos);
|
||||
} catch {
|
||||
// best effort
|
||||
}
|
||||
}, []);
|
||||
|
||||
const subscribe = useCallback(async (): Promise<string | null> => {
|
||||
if (!isSupported) return null;
|
||||
setLoading(true);
|
||||
try {
|
||||
const resp = await api.getVapidPublicKey();
|
||||
vapidKeyRef.current = resp.public_key;
|
||||
const vapidKeyBytes = urlBase64ToUint8Array(resp.public_key);
|
||||
|
||||
const reg = await navigator.serviceWorker.ready;
|
||||
let pushSub = await reg.pushManager.getSubscription();
|
||||
const existingKeyBytes = getApplicationServerKeyBytes(pushSub?.options?.applicationServerKey);
|
||||
const requiresRecreate =
|
||||
pushSub !== null && !uint8ArraysEqual(existingKeyBytes, vapidKeyBytes);
|
||||
|
||||
if (requiresRecreate) {
|
||||
await pushSub!.unsubscribe();
|
||||
pushSub = null;
|
||||
}
|
||||
|
||||
if (!pushSub) {
|
||||
pushSub = await reg.pushManager.subscribe({
|
||||
userVisibleOnly: true,
|
||||
applicationServerKey: vapidKeyBytes.buffer as ArrayBuffer,
|
||||
});
|
||||
}
|
||||
|
||||
const json = pushSub.toJSON();
|
||||
const result = await api.pushSubscribe({
|
||||
endpoint: json.endpoint!,
|
||||
p256dh: json.keys!.p256dh!,
|
||||
auth: json.keys!.auth!,
|
||||
label: generateLabel(),
|
||||
});
|
||||
|
||||
setCurrentSubscriptionId(result.id);
|
||||
await refreshSubscriptions();
|
||||
return result.id;
|
||||
} catch (err) {
|
||||
console.error('Push subscribe failed:', err);
|
||||
toast.error('Failed to enable push notifications', {
|
||||
description: err instanceof Error ? err.message : 'Check that notifications are allowed',
|
||||
});
|
||||
return null;
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [isSupported, refreshSubscriptions]);
|
||||
|
||||
const unsubscribe = useCallback(async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const reg = await navigator.serviceWorker.ready;
|
||||
const pushSub = await reg.pushManager.getSubscription();
|
||||
if (pushSub) await pushSub.unsubscribe();
|
||||
|
||||
if (currentSubscriptionId) {
|
||||
await api.deletePushSubscription(currentSubscriptionId).catch(() => {});
|
||||
}
|
||||
|
||||
setCurrentSubscriptionId(null);
|
||||
await refreshSubscriptions();
|
||||
} catch (err) {
|
||||
console.error('Push unsubscribe failed:', err);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [currentSubscriptionId, refreshSubscriptions]);
|
||||
|
||||
const toggleConversation = useCallback(async (conversationKey: string) => {
|
||||
try {
|
||||
const updated = await api.togglePushConversation(conversationKey);
|
||||
setPushConversations(updated);
|
||||
} catch {
|
||||
toast.error('Failed to update push preferences');
|
||||
}
|
||||
}, []);
|
||||
|
||||
const isConversationPushEnabled = useCallback(
|
||||
(conversationKey: string): boolean => {
|
||||
return pushConversations.includes(conversationKey);
|
||||
},
|
||||
[pushConversations]
|
||||
);
|
||||
|
||||
const deleteSubscription = useCallback(
|
||||
async (subscriptionId: string) => {
|
||||
await api.deletePushSubscription(subscriptionId);
|
||||
if (subscriptionId === currentSubscriptionId) {
|
||||
setCurrentSubscriptionId(null);
|
||||
try {
|
||||
const reg = await navigator.serviceWorker.ready;
|
||||
const pushSub = await reg.pushManager.getSubscription();
|
||||
if (pushSub) await pushSub.unsubscribe();
|
||||
} catch {
|
||||
// best effort
|
||||
}
|
||||
}
|
||||
await refreshSubscriptions();
|
||||
},
|
||||
[currentSubscriptionId, refreshSubscriptions]
|
||||
);
|
||||
|
||||
const testPush = useCallback(async (subscriptionId: string) => {
|
||||
try {
|
||||
await api.testPushSubscription(subscriptionId);
|
||||
toast.success('Test notification sent');
|
||||
} catch {
|
||||
toast.error('Test notification failed');
|
||||
}
|
||||
}, []);
|
||||
|
||||
return {
|
||||
isSupported,
|
||||
isSubscribed: !!currentSubscriptionId,
|
||||
currentSubscriptionId,
|
||||
allSubscriptions,
|
||||
pushConversations,
|
||||
loading,
|
||||
subscribe,
|
||||
unsubscribe,
|
||||
toggleConversation,
|
||||
isConversationPushEnabled,
|
||||
deleteSubscription,
|
||||
testPush,
|
||||
refreshSubscriptions,
|
||||
refreshConversations,
|
||||
};
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import { getStateKey } from '../utils/conversationState';
|
||||
import { mergeContactIntoList } from '../utils/contactMerge';
|
||||
import { getContactDisplayName } from '../utils/pubkey';
|
||||
import { appendRawPacketUnique } from '../utils/rawPacketIdentity';
|
||||
import { emitStatusDotPulse } from '../utils/statusDotPulse';
|
||||
import type {
|
||||
Channel,
|
||||
Contact,
|
||||
@@ -253,6 +254,7 @@ export function useRealtimeAppState({
|
||||
},
|
||||
onRawPacket: (packet: RawPacket) => {
|
||||
recordRawPacketObservation?.(packet);
|
||||
emitStatusDotPulse(packet.payload_type);
|
||||
setRawPackets((prev) => appendRawPacketUnique(prev, packet, maxRawPackets));
|
||||
},
|
||||
onMessageAcked: (
|
||||
|
||||
+12
-2
@@ -4,15 +4,25 @@ import { App } from './App';
|
||||
import './index.css';
|
||||
import './themes.css';
|
||||
import './styles.css';
|
||||
import { getSavedTheme, applyTheme } from './utils/theme';
|
||||
import { getSavedTheme, applyTheme, initFollowOSListener } from './utils/theme';
|
||||
import { applyFontScale, getSavedFontScale } from './utils/fontScale';
|
||||
import { PushSubscriptionProvider } from './contexts/PushSubscriptionContext';
|
||||
|
||||
// Apply saved theme before first render
|
||||
applyTheme(getSavedTheme());
|
||||
// Re-apply when the OS color-scheme preference changes, if on "Follow OS".
|
||||
initFollowOSListener();
|
||||
applyFontScale(getSavedFontScale());
|
||||
|
||||
createRoot(document.getElementById('root')!).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
<PushSubscriptionProvider>
|
||||
<App />
|
||||
</PushSubscriptionProvider>
|
||||
</StrictMode>
|
||||
);
|
||||
|
||||
// Register service worker for Web Push (requires secure context)
|
||||
if ('serviceWorker' in navigator && window.isSecureContext) {
|
||||
navigator.serviceWorker.register('./sw.js').catch(() => {});
|
||||
}
|
||||
|
||||
@@ -29,6 +29,13 @@ const mocks = vi.hoisted(() => ({
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
push: {
|
||||
isSupported: false,
|
||||
isSubscribed: false,
|
||||
subscribe: vi.fn<() => Promise<string | null>>(async () => null),
|
||||
toggleConversation: vi.fn(async () => {}),
|
||||
isConversationPushEnabled: vi.fn(() => false),
|
||||
},
|
||||
hookFns: {
|
||||
fetchOlderMessages: vi.fn(async () => {}),
|
||||
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
||||
@@ -51,6 +58,25 @@ vi.mock('../useWebSocket', () => ({
|
||||
useWebSocket: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../contexts/PushSubscriptionContext', () => ({
|
||||
usePush: () => ({
|
||||
isSupported: mocks.push.isSupported,
|
||||
isSubscribed: mocks.push.isSubscribed,
|
||||
currentSubscriptionId: mocks.push.isSubscribed ? 'sub-1' : null,
|
||||
allSubscriptions: [],
|
||||
pushConversations: [],
|
||||
loading: false,
|
||||
subscribe: mocks.push.subscribe,
|
||||
unsubscribe: vi.fn(async () => {}),
|
||||
toggleConversation: mocks.push.toggleConversation,
|
||||
isConversationPushEnabled: mocks.push.isConversationPushEnabled,
|
||||
deleteSubscription: vi.fn(async () => {}),
|
||||
testPush: vi.fn(async () => {}),
|
||||
refreshSubscriptions: vi.fn(async () => []),
|
||||
refreshConversations: vi.fn(async () => {}),
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('../hooks', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../hooks')>();
|
||||
return {
|
||||
@@ -209,6 +235,10 @@ const publicChannel = {
|
||||
describe('App favorite toggle flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mocks.push.isSupported = false;
|
||||
mocks.push.isSubscribed = false;
|
||||
mocks.push.subscribe.mockResolvedValue(null);
|
||||
mocks.push.isConversationPushEnabled.mockReturnValue(false);
|
||||
|
||||
mocks.api.getRadioConfig.mockResolvedValue(baseConfig);
|
||||
mocks.api.getSettings.mockResolvedValue({ ...baseSettings });
|
||||
@@ -313,4 +343,44 @@ describe('App favorite toggle flow', () => {
|
||||
expect(screen.queryByTestId('settings-modal-section')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('subscribes this browser before enabling web push for a conversation', async () => {
|
||||
mocks.push.isSupported = true;
|
||||
mocks.push.isSubscribed = false;
|
||||
mocks.push.subscribe.mockResolvedValue('sub-1');
|
||||
|
||||
render(<App />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: 'Notification settings' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Notification settings' }));
|
||||
fireEvent.click(screen.getByRole('checkbox', { name: /web push/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mocks.push.subscribe).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.push.toggleConversation).toHaveBeenCalledWith(`channel-${publicChannel.key}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not enable web push when subscription setup fails', async () => {
|
||||
mocks.push.isSupported = true;
|
||||
mocks.push.isSubscribed = false;
|
||||
mocks.push.subscribe.mockResolvedValue(null);
|
||||
|
||||
render(<App />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: 'Notification settings' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Notification settings' }));
|
||||
fireEvent.click(screen.getByRole('checkbox', { name: /web push/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mocks.push.subscribe).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
expect(mocks.push.toggleConversation).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -150,7 +150,7 @@ describe('ChatHeader key visibility', () => {
|
||||
expect(screen.getAllByText('#Esperance')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('shows enabled notification state and toggles when clicked', () => {
|
||||
it('shows filled bell when notifications are enabled and toggles via dropdown', () => {
|
||||
const conversation: Conversation = { type: 'contact', id: '11'.repeat(32), name: 'Alice' };
|
||||
const onToggleNotifications = vi.fn();
|
||||
|
||||
@@ -164,12 +164,40 @@ describe('ChatHeader key visibility', () => {
|
||||
/>
|
||||
);
|
||||
|
||||
fireEvent.click(screen.getByText('Notifications On'));
|
||||
// Bell button should be present; open the dropdown
|
||||
const bellBtn = screen.getByRole('button', { name: 'Notification settings' });
|
||||
fireEvent.click(bellBtn);
|
||||
|
||||
expect(screen.getByText('Notifications On')).toBeInTheDocument();
|
||||
// Desktop notifications checkbox should be checked
|
||||
const checkbox = screen.getByRole('checkbox', { name: /desktop notifications/i });
|
||||
expect(checkbox).toBeChecked();
|
||||
|
||||
// Toggling calls the handler
|
||||
fireEvent.click(checkbox);
|
||||
expect(onToggleNotifications).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('keeps desktop notifications available when web push is also supported', () => {
|
||||
const conversation: Conversation = { type: 'contact', id: '13'.repeat(32), name: 'Alice' };
|
||||
|
||||
render(
|
||||
<ChatHeader
|
||||
{...baseProps}
|
||||
conversation={conversation}
|
||||
channels={[]}
|
||||
pushSupported
|
||||
pushSubscribed
|
||||
pushEnabledForConversation
|
||||
onTogglePush={vi.fn()}
|
||||
/>
|
||||
);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Notification settings' }));
|
||||
|
||||
expect(screen.getByRole('checkbox', { name: /desktop notifications/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('checkbox', { name: /web push/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides trace and notification controls for room-server contacts', () => {
|
||||
const pubKey = '41'.repeat(32);
|
||||
const contact: Contact = {
|
||||
@@ -198,9 +226,7 @@ describe('ChatHeader key visibility', () => {
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'Path Discovery' })).not.toBeInTheDocument();
|
||||
expect(screen.queryByRole('button', { name: 'Direct Trace' })).not.toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByRole('button', { name: 'Enable notifications for this conversation' })
|
||||
).not.toBeInTheDocument();
|
||||
expect(screen.queryByRole('button', { name: 'Notification settings' })).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides the delete button for the canonical Public channel', () => {
|
||||
|
||||
@@ -145,6 +145,7 @@ function createProps(overrides: Partial<React.ComponentProps<typeof Conversation
|
||||
onDeleteContact: vi.fn(async () => {}),
|
||||
onDeleteChannel: vi.fn(async () => {}),
|
||||
onSetChannelFloodScopeOverride: vi.fn(async () => {}),
|
||||
onSelectConversation: vi.fn(),
|
||||
onOpenContactInfo: vi.fn(),
|
||||
onOpenChannelInfo: vi.fn(),
|
||||
onSenderClick: vi.fn(),
|
||||
|
||||
@@ -1,26 +1,43 @@
|
||||
import { forwardRef } from 'react';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
import { MapView } from '../components/MapView';
|
||||
import type { Contact } from '../types';
|
||||
|
||||
vi.mock('react-leaflet', () => ({
|
||||
MapContainer: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
||||
TileLayer: () => null,
|
||||
CircleMarker: forwardRef<
|
||||
HTMLDivElement,
|
||||
{ children: React.ReactNode; pathOptions?: { fillColor?: string } }
|
||||
>(({ children, pathOptions }, ref) => (
|
||||
<div ref={ref} data-fill-color={pathOptions?.fillColor}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
Popup: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
||||
useMap: () => ({
|
||||
setView: vi.fn(),
|
||||
fitBounds: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
vi.mock('react-leaflet', () => {
|
||||
const BaseLayer = ({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
name: string;
|
||||
checked?: boolean;
|
||||
}) => <div>{children}</div>;
|
||||
const LayersControlMock = ({ children }: { children: React.ReactNode }) => <div>{children}</div>;
|
||||
(LayersControlMock as unknown as { BaseLayer: typeof BaseLayer }).BaseLayer = BaseLayer;
|
||||
return {
|
||||
MapContainer: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
||||
TileLayer: () => null,
|
||||
CircleMarker: forwardRef<
|
||||
HTMLDivElement,
|
||||
{ children: React.ReactNode; pathOptions?: { fillColor?: string } }
|
||||
>(({ children, pathOptions }, ref) => (
|
||||
<div ref={ref} data-fill-color={pathOptions?.fillColor}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
Popup: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
||||
Polyline: () => null,
|
||||
LayersControl: LayersControlMock,
|
||||
useMap: () => ({
|
||||
setView: vi.fn(),
|
||||
fitBounds: vi.fn(),
|
||||
setMaxZoom: vi.fn(),
|
||||
setZoom: vi.fn(),
|
||||
getZoom: vi.fn(() => 2),
|
||||
}),
|
||||
useMapEvents: () => null,
|
||||
};
|
||||
});
|
||||
|
||||
describe('MapView', () => {
|
||||
it('renders a never-heard fallback for a focused contact without last_seen', () => {
|
||||
@@ -54,6 +71,68 @@ describe('MapView', () => {
|
||||
expect(screen.getByText('Last heard: Never heard by this server')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('invokes onSelectContact when the popup name is clicked', () => {
|
||||
const contact: Contact = {
|
||||
public_key: 'cc'.repeat(32),
|
||||
name: 'Clickable',
|
||||
type: 1,
|
||||
flags: 0,
|
||||
direct_path: null,
|
||||
direct_path_len: -1,
|
||||
direct_path_hash_mode: -1,
|
||||
route_override_path: null,
|
||||
route_override_len: null,
|
||||
route_override_hash_mode: null,
|
||||
last_advert: null,
|
||||
lat: 42,
|
||||
lon: -72,
|
||||
last_seen: Math.floor(Date.now() / 1000),
|
||||
on_radio: false,
|
||||
favorite: false,
|
||||
last_contacted: null,
|
||||
last_read_at: null,
|
||||
first_seen: null,
|
||||
};
|
||||
const onSelectContact = vi.fn();
|
||||
|
||||
render(<MapView contacts={[contact]} onSelectContact={onSelectContact} />);
|
||||
|
||||
const link = screen.getByRole('button', { name: 'Clickable' });
|
||||
expect(link).toHaveAttribute('title', 'Open conversation with Clickable');
|
||||
fireEvent.click(link);
|
||||
|
||||
expect(onSelectContact).toHaveBeenCalledWith(contact);
|
||||
});
|
||||
|
||||
it('renders the popup name as plain text when no onSelectContact is provided', () => {
|
||||
const contact: Contact = {
|
||||
public_key: 'dd'.repeat(32),
|
||||
name: 'Static',
|
||||
type: 1,
|
||||
flags: 0,
|
||||
direct_path: null,
|
||||
direct_path_len: -1,
|
||||
direct_path_hash_mode: -1,
|
||||
route_override_path: null,
|
||||
route_override_len: null,
|
||||
route_override_hash_mode: null,
|
||||
last_advert: null,
|
||||
lat: 42,
|
||||
lon: -72,
|
||||
last_seen: Math.floor(Date.now() / 1000),
|
||||
on_radio: false,
|
||||
favorite: false,
|
||||
last_contacted: null,
|
||||
last_read_at: null,
|
||||
first_seen: null,
|
||||
};
|
||||
|
||||
render(<MapView contacts={[contact]} />);
|
||||
|
||||
expect(screen.queryByRole('button', { name: /open conversation with static/i })).toBeNull();
|
||||
expect(screen.getByText('Static')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('keeps the 7-day cutoff stable for the lifetime of the mounted map', () => {
|
||||
vi.useFakeTimers();
|
||||
try {
|
||||
|
||||
@@ -220,6 +220,24 @@ describe('MessageList channel sender rendering', () => {
|
||||
expect(onChannelReferenceClick).toHaveBeenCalledWith('#ops-room');
|
||||
});
|
||||
|
||||
it('does not strip colon-prefixed text in direct messages (issue #198)', () => {
|
||||
render(
|
||||
<MessageList
|
||||
messages={[
|
||||
createMessage({
|
||||
type: 'PRIV',
|
||||
conversation_key: 'ab'.repeat(32),
|
||||
text: 'TEST1: TEST2',
|
||||
}),
|
||||
]}
|
||||
contacts={[]}
|
||||
loading={false}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.getByText('TEST1: TEST2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders and dismisses an unread marker at the first unread message boundary', async () => {
|
||||
const user = userEvent.setup();
|
||||
const messages = [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { SettingsModal } from '../components/SettingsModal';
|
||||
@@ -70,6 +70,7 @@ const baseSettings: AppSettings = {
|
||||
discovery_blocked_types: [],
|
||||
tracked_telemetry_repeaters: [],
|
||||
auto_resend_channel: false,
|
||||
telemetry_interval_hours: 8,
|
||||
};
|
||||
|
||||
function renderModal(overrides?: {
|
||||
@@ -442,52 +443,86 @@ describe('SettingsModal', () => {
|
||||
expect(screen.getByText('iPhone')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('clears stale errors when switching external desktop sections', async () => {
|
||||
it('reverts checkbox state when auto-persist fails on the database section', async () => {
|
||||
// Auto-persist replaced the old "Save Settings" button on this section.
|
||||
// The risk is now: a toggle gets applied optimistically, the PATCH fails,
|
||||
// and we're left with the UI out of sync with saved state. Verify the
|
||||
// revert-on-error path keeps the checkbox consistent with the server.
|
||||
const onSaveAppSettings = vi.fn(async () => {
|
||||
throw new Error('Save failed');
|
||||
});
|
||||
|
||||
const { view } = renderModal({
|
||||
renderModal({
|
||||
externalSidebarNav: true,
|
||||
desktopSection: 'database',
|
||||
onSaveAppSettings,
|
||||
});
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Save Settings' }));
|
||||
const checkbox = screen.getByRole('checkbox', {
|
||||
name: /Auto-decrypt historical DMs/i,
|
||||
}) as HTMLInputElement;
|
||||
const initialChecked = checkbox.checked;
|
||||
|
||||
fireEvent.click(checkbox);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Save failed')).toBeInTheDocument();
|
||||
expect(onSaveAppSettings).toHaveBeenCalled();
|
||||
});
|
||||
await waitFor(() => {
|
||||
expect(checkbox.checked).toBe(initialChecked);
|
||||
});
|
||||
});
|
||||
|
||||
it('serializes rapid auto-persist clicks so stale writes cannot win', async () => {
|
||||
// Regression test for a race where rapid consecutive checkbox toggles
|
||||
// fire overlapping PATCHes that can land out of order. The page now
|
||||
// chains saves through a single promise, so the server sees them in
|
||||
// the order the user clicked. This test hand-controls resolution
|
||||
// order to force the "stale write" scenario if serialization were off.
|
||||
|
||||
const deferred: { resolve: () => void }[] = [];
|
||||
const callOrder: number[] = [];
|
||||
|
||||
const onSaveAppSettings = vi.fn(async (_update: unknown) => {
|
||||
const index = deferred.length;
|
||||
callOrder.push(index);
|
||||
await new Promise<void>((res) => {
|
||||
deferred.push({ resolve: res });
|
||||
});
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
view.rerender(
|
||||
<SettingsModal
|
||||
open
|
||||
externalSidebarNav
|
||||
desktopSection="fanout"
|
||||
config={baseConfig}
|
||||
health={baseHealth}
|
||||
appSettings={baseSettings}
|
||||
onClose={vi.fn()}
|
||||
onSave={vi.fn(async () => {})}
|
||||
onSaveAppSettings={onSaveAppSettings}
|
||||
onSetPrivateKey={vi.fn(async () => {})}
|
||||
onReboot={vi.fn(async () => {})}
|
||||
onDisconnect={vi.fn(async () => {})}
|
||||
onReconnect={vi.fn(async () => {})}
|
||||
onAdvertise={vi.fn(async () => {})}
|
||||
meshDiscovery={null}
|
||||
meshDiscoveryLoadingTarget={null}
|
||||
onDiscoverMesh={vi.fn(async () => {})}
|
||||
onHealthRefresh={vi.fn(async () => {})}
|
||||
onRefreshAppSettings={vi.fn(async () => {})}
|
||||
/>
|
||||
);
|
||||
await Promise.resolve();
|
||||
renderModal({
|
||||
externalSidebarNav: true,
|
||||
desktopSection: 'database',
|
||||
onSaveAppSettings,
|
||||
});
|
||||
|
||||
expect(api.getFanoutConfigs).toHaveBeenCalled();
|
||||
expect(screen.getByRole('button', { name: 'Add Integration' })).toBeInTheDocument();
|
||||
expect(screen.queryByText('Save failed')).not.toBeInTheDocument();
|
||||
// Two distinct checkboxes in quick succession.
|
||||
const blockClients = screen.getByRole('checkbox', { name: /Block clients/i });
|
||||
const blockRepeaters = screen.getByRole('checkbox', { name: /Block repeaters/i });
|
||||
|
||||
fireEvent.click(blockClients);
|
||||
fireEvent.click(blockRepeaters);
|
||||
|
||||
// Wait for the first PATCH to be registered. Only the first should be
|
||||
// in-flight — the second must be queued behind it.
|
||||
await waitFor(() => {
|
||||
expect(deferred.length).toBe(1);
|
||||
});
|
||||
expect(callOrder).toEqual([0]);
|
||||
|
||||
// Resolve the first PATCH. The chain should now dispatch the second.
|
||||
deferred[0].resolve();
|
||||
await waitFor(() => {
|
||||
expect(deferred.length).toBe(2);
|
||||
});
|
||||
expect(callOrder).toEqual([0, 1]);
|
||||
|
||||
// Resolve the second so the test tears down cleanly.
|
||||
deferred[1].resolve();
|
||||
await waitFor(() => {
|
||||
expect(onSaveAppSettings).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not call onClose after save/reboot flows in page mode', async () => {
|
||||
|
||||
@@ -8,9 +8,12 @@ class ResizeObserver {
|
||||
|
||||
globalThis.ResizeObserver = ResizeObserver;
|
||||
|
||||
// Several components call matchMedia at import time for responsive detection
|
||||
// Several components call matchMedia at import time for responsive detection.
|
||||
// Use a configurable descriptor so individual tests can override the stub.
|
||||
if (typeof globalThis.matchMedia === 'undefined') {
|
||||
Object.defineProperty(globalThis, 'matchMedia', {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: (query: string) => ({
|
||||
matches: false,
|
||||
media: query,
|
||||
|
||||
@@ -513,6 +513,42 @@ describe('Sidebar section summaries', () => {
|
||||
expect(contactRows).toEqual(['DM Recent', 'Advert Only', 'No Recency']);
|
||||
});
|
||||
|
||||
it('floats contacts with unread DMs above read contacts regardless of recency', () => {
|
||||
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||
const readRecent = makeContact('11'.repeat(32), 'Read Recent', 1, { last_advert: 500 });
|
||||
const unreadOld = makeContact('22'.repeat(32), 'Unread Old', 1, { last_advert: 100 });
|
||||
|
||||
render(
|
||||
<Sidebar
|
||||
contacts={[readRecent, unreadOld]}
|
||||
channels={[publicChannel]}
|
||||
activeConversation={null}
|
||||
onSelectConversation={vi.fn()}
|
||||
onNewMessage={vi.fn()}
|
||||
lastMessageTimes={{
|
||||
[getStateKey('contact', readRecent.public_key)]: 500,
|
||||
[getStateKey('contact', unreadOld.public_key)]: 200,
|
||||
}}
|
||||
unreadCounts={{
|
||||
[getStateKey('contact', unreadOld.public_key)]: 3,
|
||||
}}
|
||||
mentions={{}}
|
||||
showCracker={false}
|
||||
crackerRunning={false}
|
||||
onToggleCracker={vi.fn()}
|
||||
onMarkAllRead={vi.fn()}
|
||||
/>
|
||||
);
|
||||
|
||||
const contactRows = screen
|
||||
.getAllByText(/^(Read Recent|Unread Old)$/)
|
||||
.map((node) => node.textContent)
|
||||
.filter((text): text is string => Boolean(text));
|
||||
|
||||
// Unread Old has unread DMs so it floats above Read Recent despite older recency
|
||||
expect(contactRows).toEqual(['Unread Old', 'Read Recent']);
|
||||
});
|
||||
|
||||
it('sorts repeaters by heard recency even when message times disagree', () => {
|
||||
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||
const staleMessageRelay = makeContact(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { StatusBar } from '../components/StatusBar';
|
||||
import type { HealthStatus } from '../types';
|
||||
@@ -77,4 +77,57 @@ describe('StatusBar', () => {
|
||||
expect(localStorage.getItem('remoteterm-theme')).toBe('original');
|
||||
expect(document.documentElement.dataset.theme).toBeUndefined();
|
||||
});
|
||||
|
||||
describe('with Follow OS theme saved', () => {
|
||||
const originalMatchMedia = globalThis.matchMedia;
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.matchMedia = originalMatchMedia;
|
||||
});
|
||||
|
||||
// Stub matchMedia so prefers-color-scheme: light returns the desired value.
|
||||
const setPrefersLight = (isLight: boolean) => {
|
||||
Object.defineProperty(globalThis, 'matchMedia', {
|
||||
configurable: true,
|
||||
value: (query: string) => ({
|
||||
matches: query.includes('light') ? isLight : !isLight,
|
||||
media: query,
|
||||
onchange: null,
|
||||
addListener: () => {},
|
||||
removeListener: () => {},
|
||||
addEventListener: () => {},
|
||||
removeEventListener: () => {},
|
||||
dispatchEvent: () => false,
|
||||
}),
|
||||
});
|
||||
};
|
||||
|
||||
it('clicking toggle while OS prefers dark overrides follow-os into explicit light', () => {
|
||||
setPrefersLight(false);
|
||||
localStorage.setItem('remoteterm-theme', 'follow-os');
|
||||
|
||||
render(<StatusBar health={baseHealth} config={null} onSettingsClick={vi.fn()} />);
|
||||
|
||||
// OS is dark → effective is original → toggle offers "Switch to light theme"
|
||||
const toggle = screen.getByRole('button', { name: 'Switch to light theme' });
|
||||
fireEvent.click(toggle);
|
||||
|
||||
expect(localStorage.getItem('remoteterm-theme')).toBe('light');
|
||||
expect(document.documentElement.dataset.theme).toBe('light');
|
||||
});
|
||||
|
||||
it('clicking toggle while OS prefers light overrides follow-os into explicit dark', () => {
|
||||
setPrefersLight(true);
|
||||
localStorage.setItem('remoteterm-theme', 'follow-os');
|
||||
|
||||
render(<StatusBar health={baseHealth} config={null} onSettingsClick={vi.fn()} />);
|
||||
|
||||
// OS is light → effective is light → toggle offers "Switch to classic theme"
|
||||
const toggle = screen.getByRole('button', { name: 'Switch to classic theme' });
|
||||
fireEvent.click(toggle);
|
||||
|
||||
expect(localStorage.getItem('remoteterm-theme')).toBe('original');
|
||||
expect(document.documentElement.dataset.theme).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import {
|
||||
FOLLOW_OS_THEME_ID,
|
||||
THEMES,
|
||||
applyTheme,
|
||||
getEffectiveTheme,
|
||||
getSavedTheme,
|
||||
} from '../utils/theme';
|
||||
|
||||
const originalMatchMedia = globalThis.matchMedia;
|
||||
|
||||
function stubPrefersLight(isLight: boolean) {
|
||||
Object.defineProperty(globalThis, 'matchMedia', {
|
||||
configurable: true,
|
||||
value: (query: string) => ({
|
||||
matches: query.includes('light') ? isLight : !isLight,
|
||||
media: query,
|
||||
onchange: null,
|
||||
addListener: () => {},
|
||||
removeListener: () => {},
|
||||
addEventListener: () => {},
|
||||
removeEventListener: () => {},
|
||||
dispatchEvent: () => false,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
describe('theme module', () => {
|
||||
beforeEach(() => {
|
||||
localStorage.clear();
|
||||
delete document.documentElement.dataset.theme;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.matchMedia = originalMatchMedia;
|
||||
});
|
||||
|
||||
it('exposes an OS-following theme in the selectable list', () => {
|
||||
const followOS = THEMES.find((t) => t.id === FOLLOW_OS_THEME_ID);
|
||||
expect(followOS).toBeDefined();
|
||||
expect(followOS?.name).toBeTruthy();
|
||||
});
|
||||
|
||||
it('applyTheme("follow-os") resolves to light when OS prefers light', () => {
|
||||
stubPrefersLight(true);
|
||||
|
||||
applyTheme(FOLLOW_OS_THEME_ID);
|
||||
|
||||
// Saved value is the follow-os preference, but the DOM reflects the resolved theme.
|
||||
expect(localStorage.getItem('remoteterm-theme')).toBe(FOLLOW_OS_THEME_ID);
|
||||
expect(getSavedTheme()).toBe(FOLLOW_OS_THEME_ID);
|
||||
expect(document.documentElement.dataset.theme).toBe('light');
|
||||
expect(getEffectiveTheme()).toBe('light');
|
||||
});
|
||||
|
||||
it('applyTheme("follow-os") resolves to original (dark) when OS prefers dark', () => {
|
||||
stubPrefersLight(false);
|
||||
|
||||
applyTheme(FOLLOW_OS_THEME_ID);
|
||||
|
||||
expect(localStorage.getItem('remoteterm-theme')).toBe(FOLLOW_OS_THEME_ID);
|
||||
// Original has no data-theme attribute, it's the default.
|
||||
expect(document.documentElement.dataset.theme).toBeUndefined();
|
||||
expect(getEffectiveTheme()).toBe('original');
|
||||
});
|
||||
|
||||
it('applyTheme updates the PWA meta theme-color to match the effective theme', () => {
|
||||
// Seed the meta tag (jsdom base template has none).
|
||||
const meta = document.createElement('meta');
|
||||
meta.setAttribute('name', 'theme-color');
|
||||
meta.setAttribute('content', '#000000');
|
||||
document.head.appendChild(meta);
|
||||
|
||||
stubPrefersLight(true);
|
||||
applyTheme(FOLLOW_OS_THEME_ID);
|
||||
// Light theme's metaThemeColor
|
||||
expect(meta.getAttribute('content')).toBe('#F8F7F4');
|
||||
|
||||
stubPrefersLight(false);
|
||||
applyTheme(FOLLOW_OS_THEME_ID);
|
||||
// Original theme's metaThemeColor
|
||||
expect(meta.getAttribute('content')).toBe('#111419');
|
||||
|
||||
meta.remove();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,203 @@
|
||||
import { act, renderHook, waitFor } from '@testing-library/react';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { usePushSubscription } from '../hooks/usePushSubscription';
|
||||
|
||||
const mocks = vi.hoisted(() => ({
|
||||
api: {
|
||||
getPushSubscriptions: vi.fn(),
|
||||
getPushConversations: vi.fn(),
|
||||
getVapidPublicKey: vi.fn(),
|
||||
pushSubscribe: vi.fn(),
|
||||
deletePushSubscription: vi.fn(),
|
||||
togglePushConversation: vi.fn(),
|
||||
testPushSubscription: vi.fn(),
|
||||
},
|
||||
toast: {
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../api', () => ({
|
||||
api: mocks.api,
|
||||
}));
|
||||
|
||||
vi.mock('../components/ui/sonner', () => ({
|
||||
toast: mocks.toast,
|
||||
}));
|
||||
|
||||
function bytesToBase64Url(bytes: number[]): string {
|
||||
return btoa(String.fromCharCode(...bytes))
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=+$/g, '');
|
||||
}
|
||||
|
||||
describe('usePushSubscription', () => {
|
||||
const vapidOldBytes = [1, 2, 3, 4];
|
||||
const vapidNewBytes = [5, 6, 7, 8];
|
||||
const oldKey = new Uint8Array(vapidOldBytes).buffer;
|
||||
const newKeyBase64 = bytesToBase64Url(vapidNewBytes);
|
||||
|
||||
let activeSubscription: {
|
||||
endpoint: string;
|
||||
options: { applicationServerKey: ArrayBuffer };
|
||||
toJSON: () => { endpoint: string; keys: { p256dh: string; auth: string } };
|
||||
unsubscribe: ReturnType<typeof vi.fn>;
|
||||
} | null;
|
||||
let replacementSubscription: {
|
||||
endpoint: string;
|
||||
options: { applicationServerKey: ArrayBuffer };
|
||||
toJSON: () => { endpoint: string; keys: { p256dh: string; auth: string } };
|
||||
unsubscribe: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
let getSubscriptionMock: ReturnType<typeof vi.fn>;
|
||||
let subscribeMock: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
activeSubscription = {
|
||||
endpoint: 'https://push.example.test/sub-old',
|
||||
options: { applicationServerKey: oldKey },
|
||||
toJSON: () => ({
|
||||
endpoint: 'https://push.example.test/sub-old',
|
||||
keys: { p256dh: 'p256dh-old', auth: 'auth-old' },
|
||||
}),
|
||||
unsubscribe: vi.fn(async () => {
|
||||
activeSubscription = null;
|
||||
return true;
|
||||
}),
|
||||
};
|
||||
|
||||
replacementSubscription = {
|
||||
endpoint: 'https://push.example.test/sub-new',
|
||||
options: { applicationServerKey: new Uint8Array(vapidNewBytes).buffer },
|
||||
toJSON: () => ({
|
||||
endpoint: 'https://push.example.test/sub-new',
|
||||
keys: { p256dh: 'p256dh-new', auth: 'auth-new' },
|
||||
}),
|
||||
unsubscribe: vi.fn(async () => true),
|
||||
};
|
||||
|
||||
getSubscriptionMock = vi.fn(async () => activeSubscription);
|
||||
subscribeMock = vi.fn(async () => {
|
||||
activeSubscription = replacementSubscription;
|
||||
return replacementSubscription;
|
||||
});
|
||||
|
||||
Object.defineProperty(window, 'isSecureContext', {
|
||||
configurable: true,
|
||||
value: true,
|
||||
});
|
||||
Object.defineProperty(window, 'PushManager', {
|
||||
configurable: true,
|
||||
value: function PushManager() {},
|
||||
});
|
||||
Object.defineProperty(window, 'Notification', {
|
||||
configurable: true,
|
||||
value: function Notification() {},
|
||||
});
|
||||
Object.defineProperty(navigator, 'serviceWorker', {
|
||||
configurable: true,
|
||||
value: {
|
||||
ready: Promise.resolve({
|
||||
pushManager: {
|
||||
getSubscription: getSubscriptionMock,
|
||||
subscribe: subscribeMock,
|
||||
},
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
mocks.api.getPushConversations.mockResolvedValue([]);
|
||||
mocks.api.getPushSubscriptions.mockResolvedValue([
|
||||
{
|
||||
id: 'sub-1',
|
||||
endpoint: 'https://push.example.test/sub-old',
|
||||
p256dh: 'p256dh-old',
|
||||
auth: 'auth-old',
|
||||
label: 'Chrome on macOS',
|
||||
created_at: 1,
|
||||
last_success_at: null,
|
||||
failure_count: 0,
|
||||
},
|
||||
]);
|
||||
mocks.api.getVapidPublicKey.mockResolvedValue({ public_key: newKeyBase64 });
|
||||
mocks.api.pushSubscribe.mockResolvedValue({
|
||||
id: 'sub-2',
|
||||
endpoint: 'https://push.example.test/sub-new',
|
||||
});
|
||||
});
|
||||
|
||||
it('clears currentSubscriptionId when refresh no longer finds this browser on the backend', async () => {
|
||||
const { result } = renderHook(() => usePushSubscription());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.currentSubscriptionId).toBe('sub-1');
|
||||
expect(result.current.isSubscribed).toBe(true);
|
||||
});
|
||||
|
||||
mocks.api.getPushSubscriptions.mockResolvedValueOnce([]);
|
||||
|
||||
await act(async () => {
|
||||
await result.current.refreshSubscriptions();
|
||||
});
|
||||
|
||||
expect(result.current.currentSubscriptionId).toBeNull();
|
||||
expect(result.current.isSubscribed).toBe(false);
|
||||
expect(result.current.allSubscriptions).toEqual([]);
|
||||
});
|
||||
|
||||
it('recreates a stale browser subscription when the server VAPID key changed', async () => {
|
||||
const oldSubscription = activeSubscription;
|
||||
mocks.api.getPushSubscriptions
|
||||
.mockReset()
|
||||
.mockResolvedValueOnce([
|
||||
{
|
||||
id: 'sub-1',
|
||||
endpoint: 'https://push.example.test/sub-old',
|
||||
p256dh: 'p256dh-old',
|
||||
auth: 'auth-old',
|
||||
label: 'Chrome on macOS',
|
||||
created_at: 1,
|
||||
last_success_at: null,
|
||||
failure_count: 0,
|
||||
},
|
||||
])
|
||||
.mockResolvedValueOnce([
|
||||
{
|
||||
id: 'sub-2',
|
||||
endpoint: 'https://push.example.test/sub-new',
|
||||
p256dh: 'p256dh-new',
|
||||
auth: 'auth-new',
|
||||
label: 'Chrome on macOS',
|
||||
created_at: 2,
|
||||
last_success_at: null,
|
||||
failure_count: 0,
|
||||
},
|
||||
]);
|
||||
|
||||
const { result } = renderHook(() => usePushSubscription());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSupported).toBe(true);
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
await result.current.subscribe();
|
||||
});
|
||||
|
||||
expect(oldSubscription?.unsubscribe).toHaveBeenCalledTimes(1);
|
||||
expect(activeSubscription).toBe(replacementSubscription);
|
||||
expect(subscribeMock).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.api.pushSubscribe).toHaveBeenCalledWith({
|
||||
endpoint: 'https://push.example.test/sub-new',
|
||||
p256dh: 'p256dh-new',
|
||||
auth: 'auth-new',
|
||||
label: expect.any(String),
|
||||
});
|
||||
expect(result.current.currentSubscriptionId).toBe('sub-2');
|
||||
});
|
||||
});
|
||||
+30
-1
@@ -355,6 +355,7 @@ export interface AppSettings {
|
||||
discovery_blocked_types: number[];
|
||||
tracked_telemetry_repeaters: string[];
|
||||
auto_resend_channel: boolean;
|
||||
telemetry_interval_hours: number;
|
||||
}
|
||||
|
||||
export interface AppSettingsUpdate {
|
||||
@@ -366,11 +367,22 @@ export interface AppSettingsUpdate {
|
||||
blocked_keys?: string[];
|
||||
blocked_names?: string[];
|
||||
discovery_blocked_types?: number[];
|
||||
telemetry_interval_hours?: number;
|
||||
}
|
||||
|
||||
export interface TelemetrySchedule {
|
||||
preferred_hours: number;
|
||||
effective_hours: number;
|
||||
options: number[];
|
||||
tracked_count: number;
|
||||
max_tracked: number;
|
||||
next_run_at: number | null;
|
||||
}
|
||||
|
||||
export interface TrackedTelemetryResponse {
|
||||
tracked_telemetry_repeaters: string[];
|
||||
names: Record<string, string>;
|
||||
schedule: TelemetrySchedule;
|
||||
}
|
||||
|
||||
/** Contact type constants */
|
||||
@@ -487,9 +499,26 @@ export interface PaneState {
|
||||
fetched_at?: number | null;
|
||||
}
|
||||
|
||||
export interface TelemetryLppSensor {
|
||||
channel: number;
|
||||
type_name: string;
|
||||
value: number;
|
||||
}
|
||||
|
||||
export interface TelemetryHistoryEntry {
|
||||
timestamp: number;
|
||||
data: Record<string, number>;
|
||||
data: Record<string, number> & { lpp_sensors?: TelemetryLppSensor[] };
|
||||
}
|
||||
|
||||
export interface PushSubscriptionInfo {
|
||||
id: string;
|
||||
endpoint: string;
|
||||
p256dh: string;
|
||||
auth: string;
|
||||
label: string;
|
||||
created_at: number;
|
||||
last_success_at: number | null;
|
||||
failure_count: number;
|
||||
}
|
||||
|
||||
export interface TraceResponse {
|
||||
|
||||
@@ -209,6 +209,37 @@ export function formatRouteLabel(pathLen: number, capitalize: boolean = false):
|
||||
return capitalize ? label.charAt(0).toUpperCase() + label.slice(1) : label;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the learned direct route for display in route-editing dialogs,
|
||||
* e.g. "2 hops (AE -> F1)", "Direct", or "Flood".
|
||||
*/
|
||||
export function formatLearnedRouteSummary(contact: Contact): string {
|
||||
const directRoute = getDirectContactRoute(contact);
|
||||
if (!directRoute) {
|
||||
return formatRouteLabel(-1, true);
|
||||
}
|
||||
const hops = parsePathHops(directRoute.path, directRoute.path_len);
|
||||
const label = formatRouteLabel(directRoute.path_len, true);
|
||||
return hops.length > 0 ? `${label} (${hops.join(' -> ')})` : label;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the forced (override) route for display in route-editing dialogs,
|
||||
* matching the learned-route format. Returns null when no override is set.
|
||||
*/
|
||||
export function formatForcedRouteSummary(contact: Contact): string | null {
|
||||
if (!hasRoutingOverride(contact)) {
|
||||
return null;
|
||||
}
|
||||
const effectiveRoute = getEffectiveContactRoute(contact);
|
||||
if (effectiveRoute.pathLen === -1) {
|
||||
return formatRouteLabel(-1, true);
|
||||
}
|
||||
const hops = parsePathHops(effectiveRoute.path, effectiveRoute.pathLen);
|
||||
const label = formatRouteLabel(effectiveRoute.pathLen, true);
|
||||
return hops.length > 0 ? `${label} (${hops.join(' -> ')})` : label;
|
||||
}
|
||||
|
||||
export function formatRoutingOverrideInput(contact: Contact): string {
|
||||
const routeOverride = getRouteOverride(contact);
|
||||
if (!routeOverride) {
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
export const STATUS_DOT_PULSE_CHANGE_EVENT = 'remoteterm-status-dot-pulse-change';
|
||||
export const STATUS_DOT_PULSE_PACKET_EVENT = 'remoteterm-status-dot-pulse-packet';
|
||||
|
||||
const STORAGE_KEY = 'remoteterm-status-dot-pulse';
|
||||
|
||||
export type StatusDotPulseKind = 'channel' | 'dm' | 'advert' | 'other';
|
||||
|
||||
export function getStatusDotPulseEnabled(): boolean {
|
||||
try {
|
||||
return localStorage.getItem(STORAGE_KEY) === 'true';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function setStatusDotPulseEnabled(enabled: boolean): void {
|
||||
try {
|
||||
if (enabled) {
|
||||
localStorage.setItem(STORAGE_KEY, 'true');
|
||||
} else {
|
||||
localStorage.removeItem(STORAGE_KEY);
|
||||
}
|
||||
} catch {
|
||||
// localStorage may be unavailable
|
||||
}
|
||||
}
|
||||
|
||||
export function payloadTypeToPulseKind(payloadType: string | null | undefined): StatusDotPulseKind {
|
||||
switch (payloadType) {
|
||||
case 'GROUP_TEXT':
|
||||
return 'channel';
|
||||
case 'TEXT_MESSAGE':
|
||||
return 'dm';
|
||||
case 'ADVERT':
|
||||
return 'advert';
|
||||
default:
|
||||
return 'other';
|
||||
}
|
||||
}
|
||||
|
||||
const PULSE_COLORS: Record<StatusDotPulseKind, string> = {
|
||||
channel: 'hsl(210, 90%, 55%)', // blue
|
||||
dm: 'hsl(270, 75%, 60%)', // purple
|
||||
advert: 'hsl(185, 85%, 55%)', // cyan
|
||||
other: 'hsl(140, 80%, 22%)', // dark green
|
||||
};
|
||||
|
||||
export function pulseColorFor(kind: StatusDotPulseKind): string {
|
||||
return PULSE_COLORS[kind];
|
||||
}
|
||||
|
||||
export const STATUS_DOT_PULSE_DURATION_MS = 250;
|
||||
|
||||
export function emitStatusDotPulse(payloadType: string | null | undefined): void {
|
||||
const kind = payloadTypeToPulseKind(payloadType);
|
||||
window.dispatchEvent(
|
||||
new CustomEvent<StatusDotPulseKind>(STATUS_DOT_PULSE_PACKET_EVENT, {
|
||||
detail: kind,
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -9,6 +9,8 @@ export interface Theme {
|
||||
|
||||
export const THEME_CHANGE_EVENT = 'remoteterm-theme-change';
|
||||
|
||||
export const FOLLOW_OS_THEME_ID = 'follow-os';
|
||||
|
||||
export const THEMES: Theme[] = [
|
||||
{
|
||||
id: 'original',
|
||||
@@ -22,6 +24,13 @@ export const THEMES: Theme[] = [
|
||||
swatches: ['#F8F7F4', '#FFFFFF', '#1B7D4E', '#EDEBE7', '#D97706', '#3B82F6'],
|
||||
metaThemeColor: '#F8F7F4',
|
||||
},
|
||||
{
|
||||
id: FOLLOW_OS_THEME_ID,
|
||||
name: 'OS Light/Dark Mode',
|
||||
// Top row: light theme preview colors; bottom row: original (dark) preview colors
|
||||
swatches: ['#F8F7F4', '#FFFFFF', '#1B7D4E', '#111419', '#181b21', '#27a05c'],
|
||||
metaThemeColor: '#111419',
|
||||
},
|
||||
{
|
||||
id: 'ios',
|
||||
name: 'iPhone',
|
||||
@@ -94,6 +103,23 @@ export function getSavedTheme(): string {
|
||||
}
|
||||
}
|
||||
|
||||
/** Resolves "Follow OS" to a concrete theme id by inspecting the OS color-scheme preference. */
|
||||
function resolveFollowOS(): 'original' | 'light' {
|
||||
if (typeof window === 'undefined' || typeof window.matchMedia !== 'function') {
|
||||
return 'original';
|
||||
}
|
||||
return window.matchMedia('(prefers-color-scheme: light)').matches ? 'light' : 'original';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the concrete theme id currently applied to the document.
|
||||
* Unlike getSavedTheme, this resolves 'follow-os' to 'original' or 'light'.
|
||||
*/
|
||||
export function getEffectiveTheme(): string {
|
||||
const saved = getSavedTheme();
|
||||
return saved === FOLLOW_OS_THEME_ID ? resolveFollowOS() : saved;
|
||||
}
|
||||
|
||||
export function applyTheme(themeId: string): void {
|
||||
try {
|
||||
localStorage.setItem(THEME_KEY, themeId);
|
||||
@@ -101,14 +127,16 @@ export function applyTheme(themeId: string): void {
|
||||
// localStorage may be unavailable
|
||||
}
|
||||
|
||||
if (themeId === 'original') {
|
||||
const effective = themeId === FOLLOW_OS_THEME_ID ? resolveFollowOS() : themeId;
|
||||
|
||||
if (effective === 'original') {
|
||||
delete document.documentElement.dataset.theme;
|
||||
} else {
|
||||
document.documentElement.dataset.theme = themeId;
|
||||
document.documentElement.dataset.theme = effective;
|
||||
}
|
||||
|
||||
// Update PWA theme-color meta tag
|
||||
const theme = THEMES.find((t) => t.id === themeId);
|
||||
// Update PWA theme-color meta tag — reflect the effective (rendered) theme.
|
||||
const theme = THEMES.find((t) => t.id === effective);
|
||||
if (theme) {
|
||||
const meta = document.querySelector('meta[name="theme-color"]');
|
||||
if (meta) {
|
||||
@@ -117,6 +145,33 @@ export function applyTheme(themeId: string): void {
|
||||
}
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
// Detail is the saved theme id (including 'follow-os'); listeners that need
|
||||
// the rendered appearance should call getEffectiveTheme().
|
||||
window.dispatchEvent(new CustomEvent(THEME_CHANGE_EVENT, { detail: themeId }));
|
||||
}
|
||||
}
|
||||
|
||||
let followOSInitialized = false;
|
||||
|
||||
/**
|
||||
* Installs a one-time listener on prefers-color-scheme so that when the user is
|
||||
* on "Follow OS", OS appearance changes re-apply the theme. Safe to call once
|
||||
* from app bootstrap.
|
||||
*/
|
||||
export function initFollowOSListener(): void {
|
||||
if (followOSInitialized) return;
|
||||
if (typeof window === 'undefined' || typeof window.matchMedia !== 'function') return;
|
||||
followOSInitialized = true;
|
||||
const mql = window.matchMedia('(prefers-color-scheme: light)');
|
||||
const handler = () => {
|
||||
if (getSavedTheme() === FOLLOW_OS_THEME_ID) {
|
||||
applyTheme(FOLLOW_OS_THEME_ID);
|
||||
}
|
||||
};
|
||||
if (typeof mql.addEventListener === 'function') {
|
||||
mql.addEventListener('change', handler);
|
||||
} else if (typeof (mql as MediaQueryList).addListener === 'function') {
|
||||
// Safari < 14 fallback
|
||||
(mql as MediaQueryList).addListener(handler);
|
||||
}
|
||||
}
|
||||
|
||||
+2
-1
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "remoteterm-meshcore"
|
||||
version = "3.11.0"
|
||||
version = "3.11.3"
|
||||
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
@@ -16,6 +16,7 @@ dependencies = [
|
||||
"aiomqtt>=2.0",
|
||||
"apprise>=1.9.8",
|
||||
"boto3>=1.38.0",
|
||||
"pywebpush>=0.14.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
||||
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
Regular → Executable
+17
-2
@@ -28,13 +28,28 @@ def cleanup_test_db_dir():
|
||||
@pytest.fixture
|
||||
async def test_db():
|
||||
"""Create an in-memory test database with schema + migrations."""
|
||||
from app.repository import channels, contacts, messages, raw_packets, settings
|
||||
from app.repository import (
|
||||
channels,
|
||||
contacts,
|
||||
messages,
|
||||
raw_packets,
|
||||
repeater_telemetry,
|
||||
settings,
|
||||
)
|
||||
from app.repository import fanout as fanout_repo
|
||||
|
||||
db = Database(":memory:")
|
||||
await db.connect()
|
||||
|
||||
submodules = [contacts, channels, messages, raw_packets, settings, fanout_repo]
|
||||
submodules = [
|
||||
contacts,
|
||||
channels,
|
||||
messages,
|
||||
raw_packets,
|
||||
settings,
|
||||
fanout_repo,
|
||||
repeater_telemetry,
|
||||
]
|
||||
originals = [(mod, mod.db) for mod in submodules]
|
||||
|
||||
for mod in submodules:
|
||||
|
||||
@@ -105,13 +105,15 @@ class TestCreateContact:
|
||||
data = response.json()
|
||||
assert data["public_key"] == KEY_A
|
||||
assert data["name"] == "NewContact"
|
||||
assert data["last_seen"] is not None
|
||||
# Manually created contacts have no RF observation yet, so last_seen
|
||||
# stays NULL until we actually hear them on the air.
|
||||
assert data["last_seen"] is None
|
||||
|
||||
# Verify in DB
|
||||
contact = await ContactRepository.get_by_key(KEY_A)
|
||||
assert contact is not None
|
||||
assert contact.name == "NewContact"
|
||||
assert data["last_seen"] == contact.last_seen
|
||||
assert contact.last_seen is None
|
||||
mock_broadcast.assert_called_once_with("contact", contact.model_dump())
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
||||
@@ -1134,12 +1134,14 @@ class TestOnNewContact:
|
||||
|
||||
await on_new_contact(MockEvent())
|
||||
|
||||
# Verify contact was created in real DB
|
||||
# Verify contact was created in real DB. NEW_CONTACT is the radio's
|
||||
# stored contact DB, not an RF observation, so last_seen stays NULL
|
||||
# until we actually hear the contact on the air.
|
||||
contact = await ContactRepository.get_by_key("cc" * 32)
|
||||
assert contact is not None
|
||||
assert contact.name == "Charlie"
|
||||
assert contact.on_radio is False
|
||||
assert contact.last_seen == 1700000000
|
||||
assert contact.last_seen is None
|
||||
|
||||
mock_broadcast.assert_called_once()
|
||||
event_type, contact_data = mock_broadcast.call_args[0]
|
||||
|
||||
@@ -69,7 +69,12 @@ def test_valid_dist_serves_static_and_spa_fallback(tmp_path):
|
||||
assert manifest["scope"] == "http://testserver/"
|
||||
assert manifest["id"] == "http://testserver/"
|
||||
assert manifest["display"] == "standalone"
|
||||
assert manifest["icons"][0]["src"] == "http://testserver/web-app-manifest-192x192.png"
|
||||
icon_srcs = {icon["src"] for icon in manifest["icons"]}
|
||||
assert "http://testserver/web-app-manifest-192x192.png" in icon_srcs
|
||||
assert "http://testserver/web-app-manifest-512x512.png" in icon_srcs
|
||||
# SVG icons cause inconsistent PWA icon rendering on iOS; the manifest
|
||||
# must be PNG-only.
|
||||
assert all(icon["type"] == "image/png" for icon in manifest["icons"])
|
||||
|
||||
file_response = client.get("/robots.txt")
|
||||
assert file_response.status_code == 200
|
||||
@@ -152,7 +157,9 @@ def test_webmanifest_includes_forwarded_prefix(tmp_path):
|
||||
assert data["start_url"] == expected_base
|
||||
assert data["scope"] == expected_base
|
||||
assert data["id"] == expected_base
|
||||
assert data["icons"][0]["src"] == f"{expected_base}web-app-manifest-192x192.png"
|
||||
icon_srcs = {icon["src"] for icon in data["icons"]}
|
||||
assert f"{expected_base}web-app-manifest-192x192.png" in icon_srcs
|
||||
assert f"{expected_base}web-app-manifest-512x512.png" in icon_srcs
|
||||
|
||||
|
||||
def test_first_available_prefers_dist_over_prebuilt(tmp_path):
|
||||
|
||||
@@ -479,7 +479,7 @@ class TestLiveSend:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_live_send_defaults_to_map_url(self):
|
||||
"""Empty api_url should default to the map.meshcore.dev endpoint."""
|
||||
"""Empty api_url should default to the map.meshcore.io endpoint."""
|
||||
mod = _make_module({"dry_run": False, "api_url": ""})
|
||||
await mod.start()
|
||||
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
# run ``run_migrations`` to completion assert ``get_version == LATEST`` and
|
||||
# ``applied == LATEST - starting_version`` so only this constant needs to
|
||||
# change, not every individual assertion.
|
||||
LATEST_SCHEMA_VERSION = 56
|
||||
LATEST_SCHEMA_VERSION = 58
|
||||
|
||||
+218
-1
@@ -9,6 +9,8 @@ from app.fanout.mqtt_ha import (
|
||||
MqttHaModule,
|
||||
_contact_tracker_discovery_config,
|
||||
_device_payload,
|
||||
_lpp_discovery_configs,
|
||||
_lpp_sensor_key,
|
||||
_message_event_discovery_config,
|
||||
_node_id,
|
||||
_radio_discovery_configs,
|
||||
@@ -102,6 +104,21 @@ class TestRadioDiscovery:
|
||||
for _, cfg in configs[1:]:
|
||||
assert cfg["expire_after"] == 120
|
||||
|
||||
def test_sensor_configs_have_display_precision(self):
|
||||
configs = _radio_discovery_configs("mc", "aabbccddeeff", "R")
|
||||
# All sensor configs (skip the binary_sensor at index 0)
|
||||
for _, cfg in configs[1:]:
|
||||
assert "suggested_display_precision" in cfg
|
||||
assert isinstance(cfg["suggested_display_precision"], int)
|
||||
|
||||
def test_battery_sensor_uses_volts(self):
|
||||
configs = _radio_discovery_configs("mc", "aabbccddeeff", "R")
|
||||
battery_cfgs = [(t, c) for t, c in configs if "battery" in t]
|
||||
assert len(battery_cfgs) == 1
|
||||
_, cfg = battery_cfgs[0]
|
||||
assert cfg["unit_of_measurement"] == "V"
|
||||
assert cfg["suggested_display_precision"] == 2
|
||||
|
||||
|
||||
class TestRepeaterDiscovery:
|
||||
def test_produces_sensor_per_field(self):
|
||||
@@ -122,6 +139,11 @@ class TestRepeaterDiscovery:
|
||||
for _, cfg in configs:
|
||||
assert cfg["expire_after"] == 36000
|
||||
|
||||
def test_sensors_have_display_precision(self):
|
||||
configs = _repeater_discovery_configs("mc", "ccdd", "Rep1", None)
|
||||
for _, cfg in configs:
|
||||
assert "suggested_display_precision" in cfg
|
||||
|
||||
|
||||
class TestContactTrackerDiscovery:
|
||||
def test_config_shape(self):
|
||||
@@ -261,7 +283,7 @@ class TestMqttHaHealth:
|
||||
payload = health_calls[-1][0][1]
|
||||
assert payload["connected"] is True
|
||||
assert payload["noise_floor_dbm"] == -110
|
||||
assert payload["battery_mv"] == 4150
|
||||
assert payload["battery_volts"] == 4.15
|
||||
assert payload["uptime_secs"] == 3600
|
||||
assert payload["last_rssi"] == -85
|
||||
assert payload["packets_recv"] == 500
|
||||
@@ -479,3 +501,198 @@ class TestMqttHaValidation:
|
||||
result = _enforce_scope("mqtt_ha", {"messages": "all", "raw_packets": "all"})
|
||||
assert result["raw_packets"] == "none"
|
||||
assert result["messages"] == "all"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# LPP sensor discovery and telemetry
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestLppSensorKey:
|
||||
def test_basic(self):
|
||||
assert _lpp_sensor_key("temperature", 1) == "lpp_temperature_ch1"
|
||||
|
||||
def test_zero_channel(self):
|
||||
assert _lpp_sensor_key("humidity", 0) == "lpp_humidity_ch0"
|
||||
|
||||
|
||||
class TestLppDiscoveryConfigs:
|
||||
def test_produces_config_per_sensor(self):
|
||||
nid = "ccdd11223344"
|
||||
device = _device_payload(nid, "Rep1", "Repeater")
|
||||
sensors = [
|
||||
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||
{"channel": 2, "type_name": "humidity", "value": 45.0},
|
||||
]
|
||||
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||
|
||||
assert len(configs) == 2
|
||||
topics = [t for t, _ in configs]
|
||||
assert f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config" in topics
|
||||
assert f"homeassistant/sensor/meshcore_{nid}/lpp_humidity_ch2/config" in topics
|
||||
|
||||
def test_sensor_config_shape(self):
|
||||
nid = "ccdd11223344"
|
||||
device = _device_payload(nid, "Rep1", "Repeater")
|
||||
sensors = [{"channel": 1, "type_name": "temperature", "value": 23.5}]
|
||||
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||
|
||||
_, cfg = configs[0]
|
||||
assert cfg["name"] == "Temperature (Ch 1)"
|
||||
assert cfg["unique_id"] == f"meshcore_{nid}_lpp_temperature_ch1"
|
||||
assert cfg["device_class"] == "temperature"
|
||||
assert cfg["unit_of_measurement"] == "°C"
|
||||
assert cfg["state_class"] == "measurement"
|
||||
assert cfg["expire_after"] == 36000
|
||||
assert cfg["suggested_display_precision"] == 1
|
||||
assert "lpp_temperature_ch1" in cfg["value_template"]
|
||||
|
||||
def test_unknown_sensor_type_no_device_class(self):
|
||||
nid = "ccdd11223344"
|
||||
device = _device_payload(nid, "Rep1", "Repeater")
|
||||
sensors = [{"channel": 0, "type_name": "exotic_sensor", "value": 1.0}]
|
||||
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||
|
||||
_, cfg = configs[0]
|
||||
assert "device_class" not in cfg
|
||||
assert "unit_of_measurement" not in cfg
|
||||
|
||||
|
||||
class TestMqttHaTelemetryWithLpp:
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_flattens_lpp_sensors(self):
|
||||
key = "ccdd11223344"
|
||||
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||
mod._publisher = MagicMock()
|
||||
mod._publisher.connected = True
|
||||
mod._publisher.publish = AsyncMock()
|
||||
# Pretend discovery already covers these sensors
|
||||
nid = _node_id(key)
|
||||
mod._discovery_topics = [
|
||||
f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config",
|
||||
f"homeassistant/sensor/meshcore_{nid}/lpp_humidity_ch2/config",
|
||||
]
|
||||
|
||||
await mod.on_telemetry(
|
||||
{
|
||||
"public_key": key,
|
||||
"battery_volts": 4.1,
|
||||
"lpp_sensors": [
|
||||
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||
{"channel": 2, "type_name": "humidity", "value": 45.0},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
mod._publisher.publish.assert_called_once()
|
||||
payload = mod._publisher.publish.call_args[0][1]
|
||||
assert payload["battery_volts"] == 4.1
|
||||
assert payload["lpp_temperature_ch1"] == 23.5
|
||||
assert payload["lpp_humidity_ch2"] == 45.0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_triggers_rediscovery_for_new_lpp_sensor(self):
|
||||
key = "ccdd11223344"
|
||||
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||
mod._publisher = MagicMock()
|
||||
mod._publisher.connected = True
|
||||
mod._publisher.publish = AsyncMock()
|
||||
mod._discovery_topics = [] # No sensors discovered yet
|
||||
mod._publish_discovery = AsyncMock()
|
||||
|
||||
await mod.on_telemetry(
|
||||
{
|
||||
"public_key": key,
|
||||
"battery_volts": 4.1,
|
||||
"lpp_sensors": [
|
||||
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
mod._publish_discovery.assert_awaited_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_discovery_published_before_state(self):
|
||||
"""Discovery configs must arrive before the state payload so HA knows the entity."""
|
||||
key = "ccdd11223344"
|
||||
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||
mod._publisher = MagicMock()
|
||||
mod._publisher.connected = True
|
||||
mod._publisher.publish = AsyncMock()
|
||||
mod._discovery_topics = [] # New sensor triggers rediscovery
|
||||
|
||||
call_order: list[str] = []
|
||||
|
||||
async def fake_discovery():
|
||||
call_order.append("discovery")
|
||||
|
||||
mod._publish_discovery = AsyncMock(side_effect=fake_discovery)
|
||||
|
||||
original_publish = mod._publisher.publish
|
||||
|
||||
async def tracking_publish(topic, payload, **kw):
|
||||
if "/telemetry" in topic:
|
||||
call_order.append("state")
|
||||
return await original_publish(topic, payload, **kw)
|
||||
|
||||
mod._publisher.publish = AsyncMock(side_effect=tracking_publish)
|
||||
|
||||
await mod.on_telemetry(
|
||||
{
|
||||
"public_key": key,
|
||||
"battery_volts": 4.1,
|
||||
"lpp_sensors": [
|
||||
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
assert call_order == ["discovery", "state"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_no_rediscovery_when_already_known(self):
|
||||
key = "ccdd11223344"
|
||||
nid = _node_id(key)
|
||||
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||
mod._publisher = MagicMock()
|
||||
mod._publisher.connected = True
|
||||
mod._publisher.publish = AsyncMock()
|
||||
mod._discovery_topics = [
|
||||
f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config",
|
||||
]
|
||||
mod._publish_discovery = AsyncMock()
|
||||
|
||||
await mod.on_telemetry(
|
||||
{
|
||||
"public_key": key,
|
||||
"battery_volts": 4.1,
|
||||
"lpp_sensors": [
|
||||
{"channel": 1, "type_name": "temperature", "value": 23.5},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
mod._publish_discovery.assert_not_awaited()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_without_lpp_sensors(self):
|
||||
"""Existing behavior: no lpp_sensors key means no LPP fields in payload."""
|
||||
key = "ccdd11223344"
|
||||
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||
mod._publisher = MagicMock()
|
||||
mod._publisher.connected = True
|
||||
mod._publisher.publish = AsyncMock()
|
||||
|
||||
await mod.on_telemetry(
|
||||
{
|
||||
"public_key": key,
|
||||
"battery_volts": 4.1,
|
||||
"noise_floor_dbm": -112,
|
||||
}
|
||||
)
|
||||
|
||||
payload = mod._publisher.publish.call_args[0][1]
|
||||
assert payload["battery_volts"] == 4.1
|
||||
# No lpp keys
|
||||
assert not any(k.startswith("lpp_") for k in payload)
|
||||
|
||||
@@ -322,7 +322,7 @@ class TestUndecryptedTextPacketStreaming:
|
||||
[],
|
||||
]
|
||||
|
||||
async def fake_execute(*_args, **_kwargs):
|
||||
def fake_execute(*_args, **_kwargs):
|
||||
batch = batches.pop(0)
|
||||
|
||||
class FakeCursor:
|
||||
@@ -332,6 +332,16 @@ class TestUndecryptedTextPacketStreaming:
|
||||
async def close(self):
|
||||
pass
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb):
|
||||
return None
|
||||
|
||||
# aiosqlite's execute() returns a `contextmanager`-decorated
|
||||
# coroutine that is both awaitable and usable as an async-with.
|
||||
# Our repo code now uses `async with conn.execute(...) as cursor:`,
|
||||
# so the mock just needs to return something with __aenter__/__aexit__.
|
||||
return FakeCursor()
|
||||
|
||||
with patch.object(test_db.conn, "execute", side_effect=fake_execute):
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
"""Tests for Web Push delivery transport behavior."""
|
||||
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from app.push.send import (
|
||||
DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS,
|
||||
DEFAULT_PUSH_READ_TIMEOUT_SECONDS,
|
||||
IPV4_FALLBACK_CONNECT_TIMEOUT_SECONDS,
|
||||
IPv4HTTPAdapter,
|
||||
send_push,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_push_prefers_default_dual_stack_session_before_any_ipv4_fallback():
|
||||
"""Successful sends should use the normal requests transport without forcing IPv4."""
|
||||
captured_kwargs: dict = {}
|
||||
|
||||
def fake_webpush(**kwargs):
|
||||
captured_kwargs.update(kwargs)
|
||||
return SimpleNamespace(status_code=201)
|
||||
|
||||
with patch("app.push.send.webpush", side_effect=fake_webpush):
|
||||
status = await send_push(
|
||||
subscription_info={"endpoint": "https://push.example.test", "keys": {}},
|
||||
payload='{"message":"hello"}',
|
||||
vapid_private_key="private-key",
|
||||
vapid_claims={"sub": "mailto:test@example.com"},
|
||||
)
|
||||
|
||||
assert status == 201
|
||||
session = captured_kwargs["requests_session"]
|
||||
assert not isinstance(session.adapters["https://"], IPv4HTTPAdapter)
|
||||
assert captured_kwargs["timeout"] == (
|
||||
DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS,
|
||||
DEFAULT_PUSH_READ_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_push_retries_with_ipv4_session_after_connect_timeout():
|
||||
"""Connect failures should retry through the isolated IPv4-only transport."""
|
||||
calls: list[dict] = []
|
||||
|
||||
def fake_webpush(**kwargs):
|
||||
calls.append(kwargs)
|
||||
if len(calls) == 1:
|
||||
raise requests.exceptions.ConnectTimeout("ipv6 connect timed out")
|
||||
return SimpleNamespace(status_code=201)
|
||||
|
||||
with patch("app.push.send.webpush", side_effect=fake_webpush):
|
||||
status = await send_push(
|
||||
subscription_info={"endpoint": "https://push.example.test", "keys": {}},
|
||||
payload='{"message":"hello"}',
|
||||
vapid_private_key="private-key",
|
||||
vapid_claims={"sub": "mailto:test@example.com"},
|
||||
)
|
||||
|
||||
assert status == 201
|
||||
assert len(calls) == 2
|
||||
assert not isinstance(calls[0]["requests_session"].adapters["https://"], IPv4HTTPAdapter)
|
||||
assert isinstance(calls[1]["requests_session"].adapters["https://"], IPv4HTTPAdapter)
|
||||
assert calls[0]["timeout"] == (
|
||||
DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS,
|
||||
DEFAULT_PUSH_READ_TIMEOUT_SECONDS,
|
||||
)
|
||||
assert calls[1]["timeout"] == (
|
||||
IPV4_FALLBACK_CONNECT_TIMEOUT_SECONDS,
|
||||
DEFAULT_PUSH_READ_TIMEOUT_SECONDS,
|
||||
)
|
||||
+588
-7
@@ -377,14 +377,22 @@ class TestSyncRecentContactsToRadio:
|
||||
assert result["loaded"] == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_fills_remaining_slots_with_recently_contacted_then_advertised(self, test_db):
|
||||
"""Fill order is favorites, then recent contacts, then recent adverts."""
|
||||
await _insert_contact(KEY_A, "Alice", last_contacted=100)
|
||||
await _insert_contact(KEY_B, "Bob", last_contacted=2000)
|
||||
await _insert_contact("cc" * 32, "Carol", last_contacted=1000)
|
||||
async def test_fills_remaining_slots_with_dm_active_then_advertised(self, test_db):
|
||||
"""Fill order is favorites, then DM-active contacts, then recent adverts."""
|
||||
await _insert_contact(KEY_A, "Alice")
|
||||
await _insert_contact(KEY_B, "Bob")
|
||||
await _insert_contact("cc" * 32, "Carol")
|
||||
await _insert_contact("dd" * 32, "Dave", last_advert=3000)
|
||||
await _insert_contact("ee" * 32, "Eve", last_advert=2500)
|
||||
|
||||
# Create DM activity for Alice (oldest), Bob (most recent), Carol (middle)
|
||||
for key, ts in [(KEY_A, 100), (KEY_B, 2000), ("cc" * 32, 1000)]:
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'hi', ?)",
|
||||
(key, ts),
|
||||
)
|
||||
await test_db.conn.commit()
|
||||
|
||||
await AppSettingsRepository.update(max_radio_contacts=5)
|
||||
await ContactRepository.set_favorite(KEY_A, True)
|
||||
|
||||
@@ -401,6 +409,7 @@ class TestSyncRecentContactsToRadio:
|
||||
loaded_keys = [
|
||||
call.args[0]["public_key"] for call in mock_mc.commands.add_contact.call_args_list
|
||||
]
|
||||
# Alice (favorite), then Bob & Carol (DM-active, most recent first), then Dave (advert)
|
||||
assert loaded_keys == [KEY_A, KEY_B, "cc" * 32, "dd" * 32]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -509,8 +518,15 @@ class TestSyncAndOffloadAll:
|
||||
@pytest.mark.asyncio
|
||||
async def test_duplicate_favorite_not_loaded_twice(self, test_db):
|
||||
"""Duplicate favorite entries still load the contact only once."""
|
||||
await _insert_contact(KEY_A, "Alice", last_contacted=2000)
|
||||
await _insert_contact(KEY_B, "Bob", last_contacted=1000)
|
||||
await _insert_contact(KEY_A, "Alice")
|
||||
await _insert_contact(KEY_B, "Bob")
|
||||
|
||||
# Bob has DM activity so he appears in tier 2
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'hi', 1000)",
|
||||
(KEY_B,),
|
||||
)
|
||||
await test_db.conn.commit()
|
||||
|
||||
await AppSettingsRepository.update(max_radio_contacts=2)
|
||||
await ContactRepository.set_favorite(KEY_A, True)
|
||||
@@ -1695,3 +1711,568 @@ class TestPeriodicSyncLoopRaces:
|
||||
mock_cleanup.assert_called_once()
|
||||
mock_sync.assert_not_called()
|
||||
mock_time.assert_called_once_with(mock_mc)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _collect_repeater_telemetry — LPP sensor collection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestCollectRepeaterTelemetryLpp:
|
||||
"""Verify that _collect_repeater_telemetry fetches LPP sensors."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lpp_sensors_included_in_data(self):
|
||||
from app.radio_sync import _collect_repeater_telemetry
|
||||
|
||||
mc = MagicMock()
|
||||
mc.commands.add_contact = AsyncMock()
|
||||
mc.commands.req_status_sync = AsyncMock(
|
||||
return_value={"bat": 4100, "noise_floor": -110, "nb_recv": 10, "nb_sent": 5}
|
||||
)
|
||||
mc.commands.req_telemetry_sync = AsyncMock(
|
||||
return_value=[
|
||||
{"channel": 1, "type": "temperature", "value": 23.5},
|
||||
{"channel": 2, "type": "humidity", "value": 45.0},
|
||||
]
|
||||
)
|
||||
|
||||
contact = MagicMock()
|
||||
contact.public_key = "aabbccddeeff11223344"
|
||||
contact.name = "TestRepeater"
|
||||
contact.to_radio_dict.return_value = {}
|
||||
|
||||
recorded_data = {}
|
||||
|
||||
async def mock_record(public_key, timestamp, data):
|
||||
recorded_data.update(data)
|
||||
|
||||
mock_fanout = MagicMock()
|
||||
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=mock_record,
|
||||
),
|
||||
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||
):
|
||||
result = await _collect_repeater_telemetry(mc, contact)
|
||||
|
||||
assert result is True
|
||||
assert "lpp_sensors" in recorded_data
|
||||
assert len(recorded_data["lpp_sensors"]) == 2
|
||||
assert recorded_data["lpp_sensors"][0]["type_name"] == "temperature"
|
||||
assert recorded_data["lpp_sensors"][0]["value"] == 23.5
|
||||
assert recorded_data["lpp_sensors"][1]["type_name"] == "humidity"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lpp_failure_does_not_fail_collection(self):
|
||||
from app.radio_sync import _collect_repeater_telemetry
|
||||
|
||||
mc = MagicMock()
|
||||
mc.commands.add_contact = AsyncMock()
|
||||
mc.commands.req_status_sync = AsyncMock(return_value={"bat": 4100, "noise_floor": -110})
|
||||
mc.commands.req_telemetry_sync = AsyncMock(side_effect=Exception("no sensors"))
|
||||
|
||||
contact = MagicMock()
|
||||
contact.public_key = "aabbccddeeff11223344"
|
||||
contact.name = "TestRepeater"
|
||||
contact.to_radio_dict.return_value = {}
|
||||
|
||||
recorded_data = {}
|
||||
|
||||
async def mock_record(public_key, timestamp, data):
|
||||
recorded_data.update(data)
|
||||
|
||||
mock_fanout = MagicMock()
|
||||
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=mock_record,
|
||||
),
|
||||
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||
):
|
||||
result = await _collect_repeater_telemetry(mc, contact)
|
||||
|
||||
assert result is True
|
||||
assert "lpp_sensors" not in recorded_data
|
||||
# Status data still present
|
||||
assert recorded_data["battery_volts"] == 4.1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lpp_multivalue_sensors_skipped(self):
|
||||
from app.radio_sync import _collect_repeater_telemetry
|
||||
|
||||
mc = MagicMock()
|
||||
mc.commands.add_contact = AsyncMock()
|
||||
mc.commands.req_status_sync = AsyncMock(return_value={"bat": 4000})
|
||||
mc.commands.req_telemetry_sync = AsyncMock(
|
||||
return_value=[
|
||||
{"channel": 1, "type": "temperature", "value": 23.5},
|
||||
{"channel": 3, "type": "gps", "value": {"lat": 1.0, "lon": 2.0, "alt": 3.0}},
|
||||
]
|
||||
)
|
||||
|
||||
contact = MagicMock()
|
||||
contact.public_key = "aabbccddeeff11223344"
|
||||
contact.name = "TestRepeater"
|
||||
contact.to_radio_dict.return_value = {}
|
||||
|
||||
recorded_data = {}
|
||||
|
||||
async def mock_record(public_key, timestamp, data):
|
||||
recorded_data.update(data)
|
||||
|
||||
mock_fanout = MagicMock()
|
||||
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=mock_record,
|
||||
),
|
||||
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||
):
|
||||
result = await _collect_repeater_telemetry(mc, contact)
|
||||
|
||||
assert result is True
|
||||
assert len(recorded_data["lpp_sensors"]) == 1
|
||||
assert recorded_data["lpp_sensors"][0]["type_name"] == "temperature"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lpp_none_response_no_sensors_key(self):
|
||||
from app.radio_sync import _collect_repeater_telemetry
|
||||
|
||||
mc = MagicMock()
|
||||
mc.commands.add_contact = AsyncMock()
|
||||
mc.commands.req_status_sync = AsyncMock(return_value={"bat": 4000})
|
||||
mc.commands.req_telemetry_sync = AsyncMock(return_value=None)
|
||||
|
||||
contact = MagicMock()
|
||||
contact.public_key = "aabbccddeeff11223344"
|
||||
contact.name = "TestRepeater"
|
||||
contact.to_radio_dict.return_value = {}
|
||||
|
||||
recorded_data = {}
|
||||
|
||||
async def mock_record(public_key, timestamp, data):
|
||||
recorded_data.update(data)
|
||||
|
||||
mock_fanout = MagicMock()
|
||||
mock_fanout.broadcast_telemetry = AsyncMock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.RepeaterTelemetryRepository.record",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=mock_record,
|
||||
),
|
||||
patch("app.fanout.manager.fanout_manager", mock_fanout),
|
||||
):
|
||||
await _collect_repeater_telemetry(mc, contact)
|
||||
|
||||
assert "lpp_sensors" not in recorded_data
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _telemetry_collect_loop — UTC modulo scheduler
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestTelemetryCollectSchedulerDecision:
|
||||
"""Verify the scheduler's run/skip decision at an hourly wake.
|
||||
|
||||
We test the decision logic by stubbing the sleep + datetime functions
|
||||
and asserting ``_run_telemetry_cycle`` is called exactly on matching
|
||||
hours. Full end-to-end of the loop is covered implicitly by the
|
||||
existing telemetry-collect tests; what we're pinning here is the
|
||||
hour-modulo gate the new scheduler depends on.
|
||||
"""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_skips_when_hour_modulo_mismatch(self):
|
||||
"""At 09:00 UTC with interval 8h, the loop must NOT run a cycle."""
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32],
|
||||
telemetry_interval_hours=8,
|
||||
)
|
||||
ran = False
|
||||
|
||||
async def fake_cycle():
|
||||
nonlocal ran
|
||||
ran = True
|
||||
|
||||
def make_fake_datetime(hour: int):
|
||||
class FakeDatetime:
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
import datetime as real_datetime
|
||||
|
||||
return real_datetime.datetime(2026, 4, 16, hour, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
return FakeDatetime
|
||||
|
||||
sleep_count = 0
|
||||
|
||||
async def fake_sleep(_duration):
|
||||
# The loop does: (1) initial-delay sleep, (2) sleep-to-top-of-hour,
|
||||
# then evaluates the run/skip decision. Allow both sleeps to
|
||||
# pass, then cancel on the 3rd (next iteration's top-of-hour sleep).
|
||||
nonlocal sleep_count
|
||||
sleep_count += 1
|
||||
if sleep_count >= 3:
|
||||
raise asyncio.CancelledError()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
patch("app.radio_sync.asyncio.sleep", new=fake_sleep),
|
||||
patch("app.radio_sync.datetime", new=make_fake_datetime(9)),
|
||||
):
|
||||
try:
|
||||
await radio_sync._telemetry_collect_loop()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
assert ran is False, "09:00 UTC is not a multiple of 8h; cycle must not run"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_runs_when_hour_modulo_matches(self):
|
||||
"""At 16:00 UTC with interval 8h, the loop must run a cycle."""
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32],
|
||||
telemetry_interval_hours=8,
|
||||
)
|
||||
ran = False
|
||||
|
||||
async def fake_cycle():
|
||||
nonlocal ran
|
||||
ran = True
|
||||
|
||||
class FakeDatetime:
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
import datetime as real_datetime
|
||||
|
||||
return real_datetime.datetime(2026, 4, 16, 16, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
sleep_count = 0
|
||||
|
||||
async def fake_sleep(_duration):
|
||||
# Let the loop's initial-delay + top-of-hour sleeps pass; cancel
|
||||
# on the third sleep (next iteration's top-of-hour wake).
|
||||
nonlocal sleep_count
|
||||
sleep_count += 1
|
||||
if sleep_count >= 3:
|
||||
raise asyncio.CancelledError()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
patch("app.radio_sync.asyncio.sleep", new=fake_sleep),
|
||||
patch("app.radio_sync.datetime", new=FakeDatetime),
|
||||
):
|
||||
try:
|
||||
await radio_sync._telemetry_collect_loop()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
assert ran is True, "16:00 UTC is a multiple of 8h; cycle must run"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_skips_when_no_repeaters_tracked(self):
|
||||
"""Empty tracked list short-circuits regardless of modulo match."""
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(tracked_telemetry_repeaters=[], telemetry_interval_hours=8)
|
||||
ran = False
|
||||
|
||||
async def fake_cycle():
|
||||
nonlocal ran
|
||||
ran = True
|
||||
|
||||
class FakeDatetime:
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
import datetime as real_datetime
|
||||
|
||||
return real_datetime.datetime(2026, 4, 16, 16, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
sleep_count = 0
|
||||
|
||||
async def fake_sleep(_duration):
|
||||
# Let the loop's initial-delay + top-of-hour sleeps pass; cancel
|
||||
# on the third sleep (next iteration's top-of-hour wake).
|
||||
nonlocal sleep_count
|
||||
sleep_count += 1
|
||||
if sleep_count >= 3:
|
||||
raise asyncio.CancelledError()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
patch("app.radio_sync.asyncio.sleep", new=fake_sleep),
|
||||
patch("app.radio_sync.datetime", new=FakeDatetime),
|
||||
):
|
||||
try:
|
||||
await radio_sync._telemetry_collect_loop()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
assert ran is False, "No tracked repeaters: no cycle regardless of hour"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_runs_on_boundary_immediately_after_initial_delay(self):
|
||||
"""Regression test: if the post-boot initial delay finishes inside a
|
||||
matching hour, the cycle must run even if the first
|
||||
sleep-to-next-top-of-hour would otherwise carry us past the boundary.
|
||||
|
||||
Scenario: server starts at 23:59:30 UTC with a 24-hour interval. The
|
||||
60-second boot guard pushes the first check into 00:00:30 — a matching
|
||||
hour that we must NOT skip. Before the fix, the loop went straight to
|
||||
sleeping until 01:00 and then failing the modulo, missing the entire
|
||||
day's only scheduled collection.
|
||||
"""
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32],
|
||||
telemetry_interval_hours=24, # daily cadence; only matching hour is 00
|
||||
)
|
||||
ran = False
|
||||
|
||||
async def fake_cycle():
|
||||
nonlocal ran
|
||||
ran = True
|
||||
|
||||
class FakeDatetime:
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
import datetime as real_datetime
|
||||
|
||||
# Simulates "initial delay just ended at 00:00:30 UTC on a
|
||||
# restart that began at 23:59:30." Without the post-boot
|
||||
# boundary check, the loop would have skipped this.
|
||||
return real_datetime.datetime(2026, 4, 16, 0, 0, 30, tzinfo=real_datetime.UTC)
|
||||
|
||||
sleep_count = 0
|
||||
|
||||
async def fake_sleep(_duration):
|
||||
# Let the initial delay pass, then cancel before the first
|
||||
# top-of-hour sleep so we isolate the post-boot check as the
|
||||
# only opportunity to run.
|
||||
nonlocal sleep_count
|
||||
sleep_count += 1
|
||||
if sleep_count >= 2:
|
||||
raise asyncio.CancelledError()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
patch("app.radio_sync.asyncio.sleep", new=fake_sleep),
|
||||
patch("app.radio_sync.datetime", new=FakeDatetime),
|
||||
):
|
||||
try:
|
||||
await radio_sync._telemetry_collect_loop()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
assert ran is True, (
|
||||
"Post-boot check must fire the due 00:00 cycle; otherwise a "
|
||||
"restart near midnight suppresses the whole day's collection."
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clamps_up_when_preferred_illegal_for_current_count(self):
|
||||
"""5 tracked repeaters with saved pref 1h: scheduler should use 6h.
|
||||
|
||||
At 02:00 UTC: 2 % 6 == 2 (not a run), so cycle must not fire.
|
||||
If clamping were skipped, 2 % 1 == 0 and cycle would incorrectly run.
|
||||
"""
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32] * 5,
|
||||
telemetry_interval_hours=1, # illegal at N=5; shortest legal is 6h
|
||||
)
|
||||
ran = False
|
||||
|
||||
async def fake_cycle():
|
||||
nonlocal ran
|
||||
ran = True
|
||||
|
||||
class FakeDatetime:
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
import datetime as real_datetime
|
||||
|
||||
return real_datetime.datetime(2026, 4, 16, 2, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
sleep_count = 0
|
||||
|
||||
async def fake_sleep(_duration):
|
||||
# Let the loop's initial-delay + top-of-hour sleeps pass; cancel
|
||||
# on the third sleep (next iteration's top-of-hour wake).
|
||||
nonlocal sleep_count
|
||||
sleep_count += 1
|
||||
if sleep_count >= 3:
|
||||
raise asyncio.CancelledError()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
patch("app.radio_sync.asyncio.sleep", new=fake_sleep),
|
||||
patch("app.radio_sync.datetime", new=FakeDatetime),
|
||||
):
|
||||
try:
|
||||
await radio_sync._telemetry_collect_loop()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
assert ran is False, (
|
||||
"Clamping to 6h must prevent the 02:00 run that 1h cadence would've triggered"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# get_contacts_selected_for_radio_sync — DM-active prioritization
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestContactSelectionDmActive:
|
||||
"""Verify that tier 2 prioritizes contacts with recent DM activity."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_incoming_dm_contact_selected_over_advert_only(self, test_db):
|
||||
"""A contact who sent us a DM should be prioritized over one who only advertised."""
|
||||
from app.radio_sync import get_contacts_selected_for_radio_sync
|
||||
|
||||
# Create two non-repeater contacts
|
||||
dm_sender_key = "aa" * 32
|
||||
advert_only_key = "bb" * 32
|
||||
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO contacts (public_key, name, type, last_seen, last_advert) VALUES (?, ?, 1, 100, 100)",
|
||||
(dm_sender_key, "DM Sender"),
|
||||
)
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO contacts (public_key, name, type, last_seen, last_advert) VALUES (?, ?, 1, 200, 200)",
|
||||
(advert_only_key, "Advert Only"),
|
||||
)
|
||||
|
||||
# DM Sender sent us a message (incoming DM)
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'hello', 300)",
|
||||
(dm_sender_key,),
|
||||
)
|
||||
await test_db.conn.commit()
|
||||
|
||||
with patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=MagicMock(max_radio_contacts=200, tracked_telemetry_repeaters=[]),
|
||||
):
|
||||
selected = await get_contacts_selected_for_radio_sync()
|
||||
|
||||
keys = [c.public_key for c in selected]
|
||||
assert dm_sender_key in keys
|
||||
assert advert_only_key in keys
|
||||
# DM Sender should come before Advert Only (tier 2 before tier 3)
|
||||
assert keys.index(dm_sender_key) < keys.index(advert_only_key)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_outgoing_dm_contact_also_selected(self, test_db):
|
||||
"""A contact we sent a DM to should also appear via DM-active tier."""
|
||||
from app.radio_sync import get_contacts_selected_for_radio_sync
|
||||
|
||||
contact_key = "cc" * 32
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO contacts (public_key, name, type) VALUES (?, ?, 1)",
|
||||
(contact_key, "Outgoing Target"),
|
||||
)
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO messages (type, conversation_key, text, received_at, outgoing) VALUES ('PRIV', ?, 'hey', 300, 1)",
|
||||
(contact_key,),
|
||||
)
|
||||
await test_db.conn.commit()
|
||||
|
||||
with patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=MagicMock(max_radio_contacts=200, tracked_telemetry_repeaters=[]),
|
||||
):
|
||||
selected = await get_contacts_selected_for_radio_sync()
|
||||
|
||||
keys = [c.public_key for c in selected]
|
||||
assert contact_key in keys
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_repeaters_excluded_from_dm_active_tier(self, test_db):
|
||||
"""Repeater contacts should not appear in tier 2 even with DM activity."""
|
||||
from app.radio_sync import get_contacts_selected_for_radio_sync
|
||||
|
||||
repeater_key = "dd" * 32
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO contacts (public_key, name, type) VALUES (?, ?, 2)",
|
||||
(repeater_key, "Repeater"),
|
||||
)
|
||||
await test_db.conn.execute(
|
||||
"INSERT INTO messages (type, conversation_key, text, received_at) VALUES ('PRIV', ?, 'cmd', 300)",
|
||||
(repeater_key,),
|
||||
)
|
||||
await test_db.conn.commit()
|
||||
|
||||
with patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=MagicMock(max_radio_contacts=200, tracked_telemetry_repeaters=[]),
|
||||
):
|
||||
selected = await get_contacts_selected_for_radio_sync()
|
||||
|
||||
keys = [c.public_key for c in selected]
|
||||
assert repeater_key not in keys
|
||||
|
||||
+214
-24
@@ -1,11 +1,12 @@
|
||||
"""Tests for repository layer."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from app.models import Contact, ContactUpsert
|
||||
from app.repository import (
|
||||
AppSettingsRepository,
|
||||
ContactAdvertPathRepository,
|
||||
ContactNameHistoryRepository,
|
||||
ContactRepository,
|
||||
@@ -613,37 +614,103 @@ class TestAppSettingsRepository:
|
||||
"""Test AppSettingsRepository parsing and migration edge cases."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_handles_corrupted_json_and_invalid_sort_order(self):
|
||||
"""Corrupted JSON fields are recovered with safe defaults."""
|
||||
mock_conn = AsyncMock()
|
||||
mock_cursor = AsyncMock()
|
||||
mock_cursor.fetchone = AsyncMock(
|
||||
return_value={
|
||||
"max_radio_contacts": 250,
|
||||
"auto_decrypt_dm_on_advert": 1,
|
||||
"last_message_times": "{also-not-json",
|
||||
"advert_interval": None,
|
||||
"last_advert_time": None,
|
||||
"flood_scope": "",
|
||||
"blocked_keys": "[]",
|
||||
"blocked_names": "[]",
|
||||
"discovery_blocked_types": "[]",
|
||||
}
|
||||
async def test_get_handles_corrupted_json_and_invalid_sort_order(self, test_db):
|
||||
"""Corrupted JSON fields are recovered with safe defaults.
|
||||
|
||||
Uses the real DB so it exercises the lock-aware path. We stuff
|
||||
malformed JSON directly into the row, then verify ``get()`` recovers
|
||||
with defaults rather than propagating a parse error.
|
||||
"""
|
||||
await test_db.conn.execute(
|
||||
"""
|
||||
UPDATE app_settings
|
||||
SET max_radio_contacts = 250,
|
||||
auto_decrypt_dm_on_advert = 1,
|
||||
last_message_times = '{also-not-json',
|
||||
advert_interval = NULL,
|
||||
last_advert_time = NULL,
|
||||
flood_scope = '',
|
||||
blocked_keys = '[]',
|
||||
blocked_names = '[]',
|
||||
discovery_blocked_types = '[]'
|
||||
WHERE id = 1
|
||||
"""
|
||||
)
|
||||
mock_conn.execute = AsyncMock(return_value=mock_cursor)
|
||||
mock_db = MagicMock()
|
||||
mock_db.conn = mock_conn
|
||||
await test_db.conn.commit()
|
||||
|
||||
with patch("app.repository.settings.db", mock_db):
|
||||
from app.repository import AppSettingsRepository
|
||||
|
||||
settings = await AppSettingsRepository.get()
|
||||
settings = await AppSettingsRepository.get()
|
||||
|
||||
assert settings.max_radio_contacts == 250
|
||||
assert settings.last_message_times == {}
|
||||
assert settings.advert_interval == 0
|
||||
assert settings.last_advert_time == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_in_conn_tolerates_missing_columns(self):
|
||||
"""Defend against partial migrations where columns added by later
|
||||
migrations are absent from the row.
|
||||
|
||||
Real DBs can't produce this state (schema init + migrations always
|
||||
run to the latest version on startup), but hand-rolled snapshots,
|
||||
external DB tools, or interrupted migrations might. The
|
||||
``KeyError``-catching branches in ``_get_in_conn`` exist specifically
|
||||
to guarantee graceful degradation.
|
||||
|
||||
We test these directly by mocking the connection boundary with a
|
||||
dict-backed row that mimics a pre-migration snapshot missing:
|
||||
- ``tracked_telemetry_repeaters`` (migration 53)
|
||||
- ``auto_resend_channel`` (migration 54)
|
||||
- ``telemetry_interval_hours`` (migration 57)
|
||||
"""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from app.telemetry_interval import DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
|
||||
# sqlite3.Row raises KeyError for missing columns when accessed by
|
||||
# name, which is what we want to simulate. We mimic that here with a
|
||||
# dict-backed object whose __getitem__ raises KeyError for absent
|
||||
# keys (dict.__getitem__ already does this).
|
||||
class PartialRow(dict):
|
||||
def keys(self): # pragma: no cover - aiosqlite.Row compat
|
||||
return super().keys()
|
||||
|
||||
partial_row = PartialRow(
|
||||
{
|
||||
"max_radio_contacts": 123,
|
||||
"auto_decrypt_dm_on_advert": 1,
|
||||
"last_message_times": "{}",
|
||||
"advert_interval": 0,
|
||||
"last_advert_time": 0,
|
||||
"flood_scope": "",
|
||||
"blocked_keys": "[]",
|
||||
"blocked_names": "[]",
|
||||
"discovery_blocked_types": "[]",
|
||||
# intentionally missing: tracked_telemetry_repeaters,
|
||||
# auto_resend_channel, telemetry_interval_hours
|
||||
}
|
||||
)
|
||||
|
||||
class FakeCursor:
|
||||
async def fetchone(self):
|
||||
return partial_row
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb):
|
||||
return None
|
||||
|
||||
mock_conn = MagicMock()
|
||||
mock_conn.execute = MagicMock(return_value=FakeCursor())
|
||||
|
||||
settings = await AppSettingsRepository._get_in_conn(mock_conn)
|
||||
|
||||
assert settings.max_radio_contacts == 123
|
||||
# Missing-column defaults kick in:
|
||||
assert settings.tracked_telemetry_repeaters == []
|
||||
assert settings.auto_resend_channel is False
|
||||
assert settings.telemetry_interval_hours == DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
|
||||
|
||||
class TestMessageRepositoryGetById:
|
||||
"""Test MessageRepository.get_by_id method."""
|
||||
@@ -697,3 +764,126 @@ class TestContactRepositoryUpsertContracts:
|
||||
assert contact.name == "Bob"
|
||||
assert contact.type == 2
|
||||
assert contact.on_radio is True
|
||||
|
||||
|
||||
class TestContactRepositoryLastSeenSemantics:
|
||||
"""Guard the 'last_seen = last RF reception' contract.
|
||||
|
||||
Radio-driven contact-DB syncs must not clobber an earlier real RF timestamp,
|
||||
and callers that don't supply last_seen must leave the existing value alone.
|
||||
"""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upsert_without_last_seen_preserves_existing(self, test_db):
|
||||
real_rf_observation = 1_700_000_000
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(
|
||||
public_key="aa" * 32,
|
||||
name="Alice",
|
||||
type=1,
|
||||
last_seen=real_rf_observation,
|
||||
on_radio=False,
|
||||
)
|
||||
)
|
||||
|
||||
# A subsequent radio-sync style upsert (no last_seen supplied) must not
|
||||
# overwrite the real RF timestamp with now().
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, name="Alice", type=1, on_radio=False)
|
||||
)
|
||||
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == real_rf_observation
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upsert_monotonically_bumps_last_seen(self, test_db):
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, last_seen=1_700_000_000, on_radio=False)
|
||||
)
|
||||
|
||||
# Newer RF observation advances last_seen.
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, last_seen=1_700_000_500, on_radio=False)
|
||||
)
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == 1_700_000_500
|
||||
|
||||
# An older timestamp (out-of-order arrival) must not move it backwards.
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, last_seen=1_699_999_000, on_radio=False)
|
||||
)
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == 1_700_000_500
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upsert_inserts_null_last_seen_when_not_supplied(self, test_db):
|
||||
# A radio-sync-only contact (never heard on RF) should have last_seen=NULL.
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, name="Alice", type=1, on_radio=False)
|
||||
)
|
||||
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_touch_last_seen_bumps_monotonically(self, test_db):
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, last_seen=1_700_000_000, on_radio=False)
|
||||
)
|
||||
|
||||
await ContactRepository.touch_last_seen("aa" * 32, 1_700_000_500)
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == 1_700_000_500
|
||||
|
||||
# Older timestamps never move last_seen backwards.
|
||||
await ContactRepository.touch_last_seen("aa" * 32, 1_699_999_000)
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == 1_700_000_500
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_last_contacted_does_not_touch_last_seen(self, test_db):
|
||||
# last_contacted = we sent TO them. It must not forge RF reception.
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, last_seen=1_700_000_000, on_radio=False)
|
||||
)
|
||||
|
||||
await ContactRepository.update_last_contacted("aa" * 32, 1_700_500_000)
|
||||
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_contacted == 1_700_500_000
|
||||
assert contact.last_seen == 1_700_000_000
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_direct_path_bumps_last_seen_monotonically(self, test_db):
|
||||
# update_direct_path is driven by RF PATH reception on both callers
|
||||
# (packet processor + firmware PATH_UPDATE, which only fires from
|
||||
# onContactPathRecv during RF reception). It should advance last_seen
|
||||
# forward-only.
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key="aa" * 32, last_seen=1_700_000_000, on_radio=False)
|
||||
)
|
||||
|
||||
await ContactRepository.update_direct_path(
|
||||
"aa" * 32, path="ab", path_len=1, path_hash_mode=0, updated_at=1_700_000_500
|
||||
)
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == 1_700_000_500
|
||||
assert contact.direct_path == "ab"
|
||||
|
||||
# Out-of-order PATH arrival with an older timestamp must not rewind.
|
||||
await ContactRepository.update_direct_path(
|
||||
"aa" * 32, path="cd", path_len=1, path_hash_mode=0, updated_at=1_699_999_000
|
||||
)
|
||||
contact = await ContactRepository.get_by_key("aa" * 32)
|
||||
assert contact is not None
|
||||
assert contact.last_seen == 1_700_000_500
|
||||
# The path itself still updates — only last_seen is monotonic-guarded.
|
||||
assert contact.direct_path == "cd"
|
||||
|
||||
@@ -11,6 +11,7 @@ from app.routers.settings import (
|
||||
AppSettingsUpdate,
|
||||
FavoriteRequest,
|
||||
TrackedTelemetryRequest,
|
||||
get_telemetry_schedule,
|
||||
toggle_favorite,
|
||||
toggle_tracked_telemetry,
|
||||
update_settings,
|
||||
@@ -244,3 +245,88 @@ class TestToggleTrackedTelemetry:
|
||||
result = await toggle_tracked_telemetry(TrackedTelemetryRequest(public_key=keys[0]))
|
||||
assert keys[0] not in result.tracked_telemetry_repeaters
|
||||
assert len(result.tracked_telemetry_repeaters) == 7
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_toggle_response_includes_schedule(self, test_db):
|
||||
"""After toggle, response must carry the schedule derivation so the UI
|
||||
can update the interval dropdown without a follow-up fetch."""
|
||||
key = "aa" * 32
|
||||
await self._create_repeater(key)
|
||||
|
||||
result = await toggle_tracked_telemetry(TrackedTelemetryRequest(public_key=key))
|
||||
|
||||
assert result.schedule.tracked_count == 1
|
||||
# N=1 unlocks the full menu including 1h
|
||||
assert 1 in result.schedule.options
|
||||
assert result.schedule.max_tracked == 8
|
||||
|
||||
|
||||
class TestTelemetryIntervalValidation:
|
||||
"""PATCH /settings validation for telemetry_interval_hours."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_accepts_valid_interval(self, test_db):
|
||||
result = await update_settings(AppSettingsUpdate(telemetry_interval_hours=4))
|
||||
assert result.telemetry_interval_hours == 4
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalid_interval_falls_back_to_default(self, test_db):
|
||||
"""Non-menu values are defaulted rather than 400-ing to keep stale
|
||||
clients from getting stuck on a save error."""
|
||||
result = await update_settings(AppSettingsUpdate(telemetry_interval_hours=99))
|
||||
assert result.telemetry_interval_hours == 8 # DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_preference_is_preserved_even_when_illegal_for_count(self, test_db):
|
||||
"""User picks 1h at N=5 tracked: stored pref must stay 1h. Scheduler
|
||||
handles the clamping at run time; storage is verbatim."""
|
||||
# Seed 5 tracked repeaters
|
||||
keys = [f"{i:02x}" * 32 for i in range(5)]
|
||||
for k in keys:
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key=k, name=f"R{k[:4]}", type=CONTACT_TYPE_REPEATER)
|
||||
)
|
||||
await AppSettingsRepository.update(tracked_telemetry_repeaters=keys)
|
||||
|
||||
result = await update_settings(AppSettingsUpdate(telemetry_interval_hours=1))
|
||||
assert result.telemetry_interval_hours == 1
|
||||
|
||||
# But the GET schedule endpoint should report the clamped effective value.
|
||||
schedule = await get_telemetry_schedule()
|
||||
assert schedule.preferred_hours == 1
|
||||
assert schedule.effective_hours == 6 # N=5 -> shortest legal = 6h
|
||||
|
||||
|
||||
class TestTelemetryScheduleEndpoint:
|
||||
"""GET /settings/tracked-telemetry/schedule."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_schedule_with_no_tracked_repeaters(self, test_db):
|
||||
"""No tracked repeaters means nothing to schedule; next_run_at is None.
|
||||
|
||||
At N=0 the clamp helper returns the default 8h, which is a fine
|
||||
display value for an empty state. Options start at 8h for the same
|
||||
reason — any lower shortest-legal only makes sense once the user
|
||||
has at least one repeater tracked.
|
||||
"""
|
||||
schedule = await get_telemetry_schedule()
|
||||
|
||||
assert schedule.tracked_count == 0
|
||||
assert schedule.next_run_at is None
|
||||
# At N=0 shortest-legal defaults to 8h.
|
||||
assert schedule.options == [8, 12, 24]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_schedule_filters_options_by_tracked_count(self, test_db):
|
||||
keys = [f"{i:02x}" * 32 for i in range(5)]
|
||||
for k in keys:
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key=k, name=f"R{k[:4]}", type=CONTACT_TYPE_REPEATER)
|
||||
)
|
||||
await AppSettingsRepository.update(tracked_telemetry_repeaters=keys)
|
||||
|
||||
schedule = await get_telemetry_schedule()
|
||||
|
||||
assert schedule.tracked_count == 5
|
||||
assert schedule.options == [6, 8, 12, 24]
|
||||
assert schedule.next_run_at is not None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user