mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-05-11 12:00:28 +02:00
Compare commits
57 Commits
channel-mute
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 70cb133b24 | |||
| f95745cb05 | |||
| 39ba88bc4b | |||
| e814653300 | |||
| e76d922752 | |||
| d0e02a42f8 | |||
| dbf14259dc | |||
| a9ac87e668 | |||
| f710a1f2d9 | |||
| 9f6c0f12c5 | |||
| 466f693c21 | |||
| 16f87e640f | |||
| 761fd82da6 | |||
| 2c1279eb9e | |||
| 047d713003 | |||
| 25041e1367 | |||
| b3fe717416 | |||
| 9a4e78c504 | |||
| d436de67a2 | |||
| 89cee49725 | |||
| b37ce89c96 | |||
| f0b7842c60 | |||
| 4eb29f376e | |||
| 82a6553539 | |||
| a69eb9c534 | |||
| 70aabb78aa | |||
| cafd9678ee | |||
| a8e346d0c5 | |||
| 55f05bf03b | |||
| 091ba06ccf | |||
| c5c828a4ed | |||
| 7eac3a9754 | |||
| 329df1a0d2 | |||
| ecb4c99a43 | |||
| 2f412e1a93 | |||
| 0353a98e87 | |||
| 3e2258c34b | |||
| e695d629b9 | |||
| 300677aca3 | |||
| b89f7ce76b | |||
| 82bd25a09f | |||
| 7528e4121f | |||
| b8f0228f68 | |||
| 25089930f1 | |||
| 291bd85c78 | |||
| 4bc87b4a0f | |||
| 6d0434d59e | |||
| f22184c166 | |||
| d10de8abf7 | |||
| 5f78294cd1 | |||
| 6b81dd3082 | |||
| cc2b16e53f | |||
| 330007e120 | |||
| f5a2a21f11 | |||
| a3e62885d4 | |||
| dbdd722c48 | |||
| b8683e57d8 |
@@ -0,0 +1,10 @@
|
||||
name: "RemoteTerm CodeQL config"
|
||||
|
||||
# Exclude rules that flag intentional design decisions:
|
||||
# - AES-ECB is required by the MeshCore radio protocol wire format
|
||||
# - Repeater/room passwords are not meaningfully sensitive secrets
|
||||
query-filters:
|
||||
- exclude:
|
||||
id: py/weak-cryptographic-algorithm
|
||||
- exclude:
|
||||
id: js/clear-text-storage-of-sensitive-data
|
||||
@@ -4,6 +4,9 @@ on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
backend-checks:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: "0 6 * * 1"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [javascript-typescript, python]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: .github/codeql/codeql-config.yml
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
@@ -25,6 +25,9 @@ concurrency:
|
||||
group: publish-aur
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish-aur:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -25,6 +25,7 @@ references/
|
||||
|
||||
# ancillary LLM files
|
||||
.claude/
|
||||
.codex
|
||||
|
||||
# local Docker compose files
|
||||
docker-compose.yml
|
||||
|
||||
@@ -321,6 +321,7 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
||||
| GET | `/api/debug` | Support snapshot: recent logs, live radio probe, contact/channel drift audit, and running version/git info |
|
||||
| GET | `/api/radio/config` | Radio configuration, including `path_hash_mode`, `path_hash_mode_supported`, advert-location on/off, and `multi_acks_enabled` |
|
||||
| PATCH | `/api/radio/config` | Update name, location, advert-location on/off, `multi_acks_enabled`, radio params, and `path_hash_mode` when supported |
|
||||
| GET | `/api/radio/private-key` | Export in-memory private key as hex (requires `MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT=true`) |
|
||||
| PUT | `/api/radio/private-key` | Import private key to radio |
|
||||
| POST | `/api/radio/advertise` | Send advertisement (`mode`: `flood` or `zero_hop`, default `flood`) |
|
||||
| POST | `/api/radio/discover` | Run a short mesh discovery sweep for nearby repeaters/sensors |
|
||||
@@ -379,6 +380,7 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
||||
| POST | `/api/settings/blocked-names/toggle` | Toggle blocked name |
|
||||
| POST | `/api/settings/tracked-telemetry/toggle` | Toggle tracked telemetry repeater |
|
||||
| GET | `/api/settings/tracked-telemetry/schedule` | Current telemetry scheduling derivation and next-run-at timestamp |
|
||||
| POST | `/api/settings/muted-channels/toggle` | Toggle muted status for a channel |
|
||||
| GET | `/api/fanout` | List all fanout configs |
|
||||
| POST | `/api/fanout` | Create new fanout config |
|
||||
| PATCH | `/api/fanout/{id}` | Update fanout config (triggers module reload) |
|
||||
@@ -504,6 +506,7 @@ mc.subscribe(EventType.ACK, handler)
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | `false` | Switch the always-on radio audit task from hourly checks to aggressive 10-second polling; the audit checks both missed message drift and channel-slot cache drift |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | `false` | Disable channel-slot reuse and force `set_channel(...)` before every channel send, even on serial/BLE |
|
||||
| `MESHCORE_LOAD_WITH_AUTOEVICT` | `false` | Enable autoevict contact loading: sets `AUTO_ADD_OVERWRITE_OLDEST` on the radio so adds never fail with TABLE_FULL, skips the removal phase during reconcile, and allows blind loading when `get_contacts` fails. Loaded contacts are not radio-favorited and may be evicted by new adverts when the table is full. |
|
||||
| `MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT` | `false` | Enable `GET /api/radio/private-key` to return the in-memory private key as hex. Disabled by default; only enable on a trusted network where you need to retrieve the key (e.g. for backup or migration). |
|
||||
|
||||
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `advert_interval`, `last_advert_time`, `last_message_times`, `flood_scope`, `blocked_keys`, `blocked_names`, `discovery_blocked_types`, `tracked_telemetry_repeaters`, `auto_resend_channel`, and `telemetry_interval_hours`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
||||
|
||||
|
||||
@@ -1,3 +1,43 @@
|
||||
## [3.13.0] - 2026-04-30
|
||||
|
||||
* Feature: Error counts included in repeater telemetry
|
||||
* Feature: RX error rate + percentage surfaced and tracked for repeaters
|
||||
* Feature: Dynamic as-you-type text replacement for Cyrillic byte optimization
|
||||
* Feature: Permit hourly checks for direct/routed repeaters
|
||||
* Feature: Allow newlines in input
|
||||
* Feature: Packet-send radio time added to packet analyzer
|
||||
* Feature: Enable forced plaintext for Apprise
|
||||
* Bugfix: Less annoying MQTT failure notifications with backoff
|
||||
* Bugfis: Don't obscure input; use dvh everywhere
|
||||
* Bugfix: Clearer save button for advert interval
|
||||
* Misc: Library updates
|
||||
* Misc: Rewrite 5xx to 4xx to avoid issues with proxies that don't react well to 503/504
|
||||
|
||||
## [3.12.3] - 2026-04-24
|
||||
|
||||
* Feature: Customizable Apprise strings
|
||||
* Feature: Choose contact addition type
|
||||
* Featuer: Make bulk-delete sortable by last-heard
|
||||
* Misc: Bypass error on fail-to-unload-contact when it's not there
|
||||
* Misc: Docs & test updates
|
||||
|
||||
## [3.12.2] - 2026-04-21
|
||||
|
||||
* Feature: Auto-disambiguate colliding LPP sensor names
|
||||
* Feature: Radio config import/export
|
||||
* Bugfix: Don't push stale firmware version/model on community MQTT
|
||||
* Misc: Expose env vars in debug blob
|
||||
* Misc: Longer linger for web push error
|
||||
* Misc: Docs, test, & CI/CD improvements
|
||||
|
||||
## [3.12.1] - 2026-04-19
|
||||
|
||||
* Feature: Auto-evict/circular-buffer contact load mode (solves potential T-Beam issues)
|
||||
* Feature: Channel mute
|
||||
* Misc: HA Documentation improvements
|
||||
* Misc: Bump deps & update tests
|
||||
* Misc: Improve warnings around web push in untrusted contexts
|
||||
|
||||
## [3.12.0] - 2026-04-17
|
||||
|
||||
* Feature: Web Push -- get your mesh notifications on a locked phone or when your browser is closed!
|
||||
|
||||
+1
-1
@@ -330,7 +330,7 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
|
||||
</details>
|
||||
|
||||
### meshcore (2.3.2) — MIT
|
||||
### meshcore (2.3.7) — MIT
|
||||
|
||||
<details>
|
||||
<summary>Full license text</summary>
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# RemoteTerm for MeshCore
|
||||
|
||||
Backend server + browser interface for MeshCore mesh radio networks. Connect your radio over Serial, TCP, or BLE, and then you can:
|
||||
Backend server + browser interface for MeshCore mesh radio networks, providing a rich, web-based power-user management and messaging system through a companion radio.
|
||||
|
||||
Connect your radio over Serial, TCP, or BLE, and then you can:
|
||||
|
||||
* Send and receive DMs and channel messages
|
||||
* Cache all received packets, decrypting as you gain keys
|
||||
@@ -8,8 +10,8 @@ Backend server + browser interface for MeshCore mesh radio networks. Connect you
|
||||
* Monitor unlimited contacts and channels (radio limits don't apply -- packets are decrypted server-side)
|
||||
* Access your radio remotely over your network or VPN
|
||||
* Search for hashtag channel names for channels you don't have keys for yet
|
||||
* Forward packets to MQTT, LetsMesh, MeshRank, SQS, Apprise, etc.
|
||||
* Use the more recent 1.14 firmwares which support multibyte pathing
|
||||
* Forward packets, messages, and automatic repeater telemetry to MQTT, Home Assistant, LetsMesh, MeshRank, SQS, Apprise, etc.
|
||||
* Use the more recent 1.14+ firmwares which support multibyte pathing
|
||||
* Visualize the mesh as a map or node set, view repeater stats, and more!
|
||||
|
||||
For advanced setup and troubleshooting see [README_ADVANCED.md](README_ADVANCED.md). If you plan to contribute, read [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
+26
-7
@@ -1,25 +1,44 @@
|
||||
# Advanced Setup And Troubleshooting
|
||||
|
||||
## Remediation Environment Variables
|
||||
## Remediation & Advanced Environment Variables
|
||||
|
||||
These are intended for diagnosing or working around radios that behave oddly.
|
||||
These are intended for diagnosing or working around radios that behave oddly, or enabling advanced functionality.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | false | Run aggressive 10-second `get_msg()` fallback polling to check for messages |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | false | Disable channel-slot reuse and force `set_channel(...)` before every channel send |
|
||||
| `MESHCORE_LOAD_WITH_AUTOEVICT` | false | Enable autoevict mode for contact loading (see [Contact Loading Issues](#contact-loading-issues) below) |
|
||||
| `__CLOWNTOWN_DO_CLOCK_WRAPAROUND` | false | Highly experimental: if the radio clock is ahead of system time, try forcing the clock to `0xFFFFFFFF`, wait for uint32 wraparound, and then retry normal time sync before falling back to reboot |
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | false | Run aggressive 10-second `get_msg()` fallback polling to check for messages ([docs](#message-poll-fallback)) |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | false | Disable channel-slot reuse and force `set_channel(...)` before every channel send ([docs](#force-channel-slot-reconfigure)) |
|
||||
| `MESHCORE_LOAD_WITH_AUTOEVICT` | false | Enable autoevict mode for contact loading ([docs](#autoevict-mode)) |
|
||||
| `__CLOWNTOWN_DO_CLOCK_WRAPAROUND` | false | Highly experimental: if the radio clock is ahead of system time, try forcing the clock to `0xFFFFFFFF`, wait for uint32 wraparound, and then retry normal time sync before falling back to reboot ([docs](#clock-wraparound)) |
|
||||
| `MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT` | false | Enable `GET /api/radio/private-key` to return the in-memory private key as hex for backup or migration. Only enable on a trusted network. Import via `PUT /api/radio/private-key` is always available. ([docs](#private-key-export)) |
|
||||
|
||||
By default the app relies on radio events plus MeshCore auto-fetch for incoming messages, and also runs a low-frequency hourly audit poll. That audit checks both:
|
||||
|
||||
- whether messages were left on the radio without reaching the app through event subscription
|
||||
- whether the app's channel-slot expectations still match the radio's actual channel listing
|
||||
|
||||
If the audit finds a mismatch, you'll see an error in the application UI and your logs. If you see that warning, or if messages on the radio never show up in the app, try `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK=true` to switch that task into a more aggressive 10-second safety net. If room sends appear to be using the wrong channel slot or another client is changing slots underneath this app, try `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true` to force the radio to validate the channel slot is valid before sending (will delay sending by ~500ms).
|
||||
If the audit finds a mismatch, you'll see an error in the application UI and your logs.
|
||||
|
||||
### Message Poll Fallback
|
||||
|
||||
If you see that warning, or if messages on the radio never show up in the app, try `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK=true` to switch that task into a more aggressive 10-second safety net.
|
||||
|
||||
### Force Channel Slot Reconfigure
|
||||
|
||||
If room sends appear to be using the wrong channel slot or another client is changing slots underneath this app, try `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true` to force the radio to validate the channel slot is valid before sending (will delay sending by ~500ms).
|
||||
|
||||
### Clock Wraparound
|
||||
|
||||
`__CLOWNTOWN_DO_CLOCK_WRAPAROUND=true` is a last-resort clock remediation for nodes whose RTC is stuck in the future and where rescue-mode time setting or GPS-based time is not available. It intentionally relies on the clock rolling past the 32-bit epoch boundary, which is board-specific behavior and may not be safe or effective on all MeshCore targets. Treat it as highly experimental.
|
||||
|
||||
### Private Key Export
|
||||
|
||||
`MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT=true` enables `GET /api/radio/private-key`, which returns the in-memory private key as hex for backup or migration. The key is held in memory only (exported from the radio on connect) and is never persisted to disk. Only enable this on a trusted network when you need to retrieve the key.
|
||||
|
||||
Import via `PUT /api/radio/private-key` is always available regardless of this setting — it is write-only and does not expose key material.
|
||||
|
||||
The Radio Settings config export/import feature uses these endpoints. When export is disabled, config exports will omit the private key and show a notice.
|
||||
|
||||
## Contact Loading Issues
|
||||
|
||||
RemoteTerm loads favorite and recently active contacts onto the radio so that the radio can automatically acknowledge incoming DMs on your behalf. To do this, it first enumerates the radio's existing contact table, then reconciles it with the desired working set.
|
||||
|
||||
+9
-1
@@ -196,6 +196,7 @@ Web Push is a standalone subsystem in `app/push/`, separate from the fanout modu
|
||||
### Radio
|
||||
- `GET /radio/config` — includes `path_hash_mode`, `path_hash_mode_supported`, advert-location on/off, and `multi_acks_enabled`
|
||||
- `PATCH /radio/config` — may update `path_hash_mode` (`0..2`) when firmware supports it, and `multi_acks_enabled`
|
||||
- `GET /radio/private-key` — export in-memory private key as hex (requires `MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT=true`)
|
||||
- `PUT /radio/private-key`
|
||||
- `POST /radio/advertise` — manual advert send; request body may set `mode` to `flood` or `zero_hop` (defaults to `flood`)
|
||||
- `POST /radio/discover` — short mesh discovery sweep for nearby repeaters/sensors
|
||||
@@ -266,6 +267,7 @@ Web Push is a standalone subsystem in `app/push/`, separate from the fanout modu
|
||||
- `POST /settings/blocked-names/toggle`
|
||||
- `POST /settings/tracked-telemetry/toggle`
|
||||
- `GET /settings/tracked-telemetry/schedule` — current telemetry scheduling derivation, interval options, and next-run-at timestamp
|
||||
- `POST /settings/muted-channels/toggle`
|
||||
|
||||
### Fanout
|
||||
- `GET /fanout` — list all fanout configs
|
||||
@@ -396,7 +398,7 @@ tests/
|
||||
├── test_message_prefix_claim.py # Message prefix claim logic
|
||||
├── test_mqtt.py # MQTT publisher topic routing and lifecycle
|
||||
├── test_messages_search.py # Message search, around, forward pagination
|
||||
├── test_migrations.py # Schema migration system
|
||||
├── test_mqtt_ha.py # MQTT HA (high-availability) behavior
|
||||
├── test_packet_pipeline.py # End-to-end packet processing
|
||||
├── test_packets_router.py # Packets router endpoints (decrypt, maintenance)
|
||||
├── test_path_utils.py # Path hex rendering helpers
|
||||
@@ -415,7 +417,13 @@ tests/
|
||||
├── test_security.py # Optional Basic Auth middleware / config behavior
|
||||
├── test_send_messages.py # Outgoing messages, bot triggers, concurrent sends
|
||||
├── test_settings_router.py # Settings endpoints, advert validation
|
||||
├── test_push_send.py # Web Push send/dispatch
|
||||
├── test_radio_stats.py # Radio stats sampling and noise-floor history
|
||||
├── test_repeater_telemetry.py # Repeater telemetry history recording
|
||||
├── test_service_installer.py # Service installer script behavior
|
||||
├── test_sqs_fanout.py # SQS fanout module
|
||||
├── test_statistics.py # Statistics aggregation
|
||||
├── test_telemetry_interval.py # Telemetry interval scheduling math
|
||||
├── test_version_info.py # Version/build metadata resolution
|
||||
├── test_websocket.py # WS manager broadcast/cleanup
|
||||
└── test_websocket_route.py # WS endpoint lifecycle
|
||||
|
||||
@@ -26,6 +26,7 @@ class Settings(BaseSettings):
|
||||
default=False,
|
||||
validation_alias="__CLOWNTOWN_DO_CLOCK_WRAPAROUND",
|
||||
)
|
||||
enable_local_private_key_export: bool = False
|
||||
load_with_autoevict: bool = False
|
||||
skip_post_connect_sync: bool = False
|
||||
basic_auth_username: str = ""
|
||||
|
||||
+18
-2
@@ -42,7 +42,8 @@ CREATE TABLE IF NOT EXISTS channels (
|
||||
flood_scope_override TEXT,
|
||||
path_hash_mode_override INTEGER,
|
||||
last_read_at INTEGER,
|
||||
favorite INTEGER DEFAULT 0
|
||||
favorite INTEGER DEFAULT 0,
|
||||
muted INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
@@ -112,7 +113,10 @@ CREATE TABLE IF NOT EXISTS app_settings (
|
||||
discovery_blocked_types TEXT DEFAULT '[]',
|
||||
tracked_telemetry_repeaters TEXT DEFAULT '[]',
|
||||
auto_resend_channel INTEGER DEFAULT 0,
|
||||
telemetry_interval_hours INTEGER DEFAULT 8
|
||||
telemetry_interval_hours INTEGER DEFAULT 8,
|
||||
vapid_private_key TEXT DEFAULT '',
|
||||
vapid_public_key TEXT DEFAULT '',
|
||||
push_conversations TEXT DEFAULT '[]'
|
||||
);
|
||||
INSERT OR IGNORE INTO app_settings (id) VALUES (1);
|
||||
|
||||
@@ -134,6 +138,18 @@ CREATE TABLE IF NOT EXISTS repeater_telemetry_history (
|
||||
data TEXT NOT NULL,
|
||||
FOREIGN KEY (public_key) REFERENCES contacts(public_key) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id TEXT PRIMARY KEY,
|
||||
endpoint TEXT NOT NULL,
|
||||
p256dh TEXT NOT NULL,
|
||||
auth TEXT NOT NULL,
|
||||
label TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL,
|
||||
last_success_at INTEGER,
|
||||
failure_count INTEGER DEFAULT 0,
|
||||
UNIQUE(endpoint)
|
||||
);
|
||||
"""
|
||||
|
||||
# Indexes are created after migrations so that legacy databases have all
|
||||
|
||||
+207
-47
@@ -11,6 +11,37 @@ from app.path_utils import split_path_hex
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_MAX_SEND_ATTEMPTS = 3
|
||||
_RETRY_DELAY_S = 2
|
||||
|
||||
DEFAULT_BODY_FORMAT_DM = "**DM:** {sender_name}: {text} **via:** [{hops_backticked}]"
|
||||
DEFAULT_BODY_FORMAT_CHANNEL = (
|
||||
"**{channel_name}:** {sender_name}: {text} **via:** [{hops_backticked}]"
|
||||
)
|
||||
_DEFAULT_BODY_FORMAT_DM_NO_PATH = "**DM:** {sender_name}: {text}"
|
||||
_DEFAULT_BODY_FORMAT_CHANNEL_NO_PATH = "**{channel_name}:** {sender_name}: {text}"
|
||||
|
||||
# Plain-text variants (no markdown formatting)
|
||||
DEFAULT_BODY_FORMAT_DM_PLAIN = "DM: {sender_name}: {text} via: [{hops}]"
|
||||
DEFAULT_BODY_FORMAT_CHANNEL_PLAIN = "{channel_name}: {sender_name}: {text} via: [{hops}]"
|
||||
_DEFAULT_BODY_FORMAT_DM_NO_PATH_PLAIN = "DM: {sender_name}: {text}"
|
||||
_DEFAULT_BODY_FORMAT_CHANNEL_NO_PATH_PLAIN = "{channel_name}: {sender_name}: {text}"
|
||||
|
||||
# Variables available for user format strings
|
||||
FORMAT_VARIABLES = (
|
||||
"type",
|
||||
"text",
|
||||
"sender_name",
|
||||
"sender_key",
|
||||
"channel_name",
|
||||
"conversation_key",
|
||||
"hops",
|
||||
"hops_backticked",
|
||||
"hop_count",
|
||||
"rssi",
|
||||
"snr",
|
||||
)
|
||||
|
||||
|
||||
def _parse_urls(raw: str) -> list[str]:
|
||||
"""Split multi-line URL string into individual URLs."""
|
||||
@@ -36,46 +67,111 @@ def _normalize_discord_url(url: str) -> str:
|
||||
return urlunsplit((parts.scheme, parts.netloc, parts.path, urlencode(query), parts.fragment))
|
||||
|
||||
|
||||
def _format_body(data: dict, *, include_path: bool) -> str:
|
||||
"""Build a human-readable notification body from message data."""
|
||||
def _compute_hops(data: dict) -> tuple[str, str, int]:
|
||||
"""Extract hop info from message data. Returns (hops, hops_backticked, hop_count)."""
|
||||
paths = data.get("paths")
|
||||
if paths and isinstance(paths, list) and len(paths) > 0:
|
||||
first_path = paths[0] if isinstance(paths[0], dict) else {}
|
||||
path_str = first_path.get("path", "")
|
||||
path_len = first_path.get("path_len")
|
||||
else:
|
||||
path_str = None
|
||||
path_len = None
|
||||
|
||||
if path_str is None or path_str.strip() == "":
|
||||
return ("direct", "`direct`", 0)
|
||||
|
||||
path_str = path_str.strip().lower()
|
||||
hop_count = path_len if isinstance(path_len, int) else len(path_str) // 2
|
||||
hops = split_path_hex(path_str, hop_count)
|
||||
if not hops:
|
||||
return ("direct", "`direct`", 0)
|
||||
|
||||
return (
|
||||
", ".join(hops),
|
||||
", ".join(f"`{h}`" for h in hops),
|
||||
len(hops),
|
||||
)
|
||||
|
||||
|
||||
def _build_template_vars(data: dict) -> dict[str, str]:
|
||||
"""Build the variable dict for format string substitution."""
|
||||
hops_raw, hops_bt, hop_count = _compute_hops(data)
|
||||
|
||||
paths = data.get("paths")
|
||||
rssi = ""
|
||||
snr = ""
|
||||
if paths and isinstance(paths, list) and len(paths) > 0:
|
||||
first_path = paths[0] if isinstance(paths[0], dict) else {}
|
||||
rssi_val = first_path.get("rssi")
|
||||
snr_val = first_path.get("snr")
|
||||
if rssi_val is not None:
|
||||
rssi = str(rssi_val)
|
||||
if snr_val is not None:
|
||||
snr = str(snr_val)
|
||||
|
||||
return {
|
||||
"type": data.get("type", ""),
|
||||
"text": get_fanout_message_text(data),
|
||||
"sender_name": data.get("sender_name") or "Unknown",
|
||||
"sender_key": data.get("sender_key") or "",
|
||||
"channel_name": data.get("channel_name") or data.get("conversation_key", "channel"),
|
||||
"conversation_key": data.get("conversation_key", ""),
|
||||
"hops": hops_raw,
|
||||
"hops_backticked": hops_bt,
|
||||
"hop_count": str(hop_count),
|
||||
"rssi": rssi,
|
||||
"snr": snr,
|
||||
}
|
||||
|
||||
|
||||
def _apply_format(fmt: str, variables: dict[str, str]) -> str:
|
||||
"""Apply template variables in a single pass to avoid re-expanding substituted values."""
|
||||
import re
|
||||
|
||||
def _replacer(m: re.Match[str]) -> str:
|
||||
key = m.group(1)
|
||||
return variables.get(key, m.group(0))
|
||||
|
||||
return re.sub(r"\{(\w+)\}", _replacer, fmt)
|
||||
|
||||
|
||||
def _format_body(
|
||||
data: dict,
|
||||
*,
|
||||
body_format_dm: str | None = None,
|
||||
body_format_channel: str | None = None,
|
||||
markdown: bool = True,
|
||||
) -> str:
|
||||
"""Build a notification body from message data using format strings."""
|
||||
if body_format_dm is None:
|
||||
body_format_dm = DEFAULT_BODY_FORMAT_DM if markdown else DEFAULT_BODY_FORMAT_DM_PLAIN
|
||||
if body_format_channel is None:
|
||||
body_format_channel = (
|
||||
DEFAULT_BODY_FORMAT_CHANNEL if markdown else DEFAULT_BODY_FORMAT_CHANNEL_PLAIN
|
||||
)
|
||||
variables = _build_template_vars(data)
|
||||
msg_type = data.get("type", "")
|
||||
text = get_fanout_message_text(data)
|
||||
sender_name = data.get("sender_name") or "Unknown"
|
||||
|
||||
via = ""
|
||||
if include_path:
|
||||
paths = data.get("paths")
|
||||
if paths and isinstance(paths, list) and len(paths) > 0:
|
||||
first_path = paths[0] if isinstance(paths[0], dict) else {}
|
||||
path_str = first_path.get("path", "")
|
||||
path_len = first_path.get("path_len")
|
||||
fmt = body_format_dm if msg_type == "PRIV" else body_format_channel
|
||||
try:
|
||||
return _apply_format(fmt, variables)
|
||||
except Exception:
|
||||
logger.warning("Apprise format string error, falling back to default")
|
||||
if markdown:
|
||||
default = DEFAULT_BODY_FORMAT_DM if msg_type == "PRIV" else DEFAULT_BODY_FORMAT_CHANNEL
|
||||
else:
|
||||
path_str = None
|
||||
path_len = None
|
||||
|
||||
if msg_type == "PRIV" and path_str is None:
|
||||
via = " **via:** [`direct`]"
|
||||
elif path_str is not None:
|
||||
path_str = path_str.strip().lower()
|
||||
if path_str == "":
|
||||
via = " **via:** [`direct`]"
|
||||
else:
|
||||
hop_count = path_len if isinstance(path_len, int) else len(path_str) // 2
|
||||
hops = split_path_hex(path_str, hop_count)
|
||||
if hops:
|
||||
hop_list = ", ".join(f"`{h}`" for h in hops)
|
||||
via = f" **via:** [{hop_list}]"
|
||||
|
||||
if msg_type == "PRIV":
|
||||
return f"**DM:** {sender_name}: {text}{via}"
|
||||
|
||||
channel_name = data.get("channel_name") or data.get("conversation_key", "channel")
|
||||
return f"**{channel_name}:** {sender_name}: {text}{via}"
|
||||
default = (
|
||||
DEFAULT_BODY_FORMAT_DM_PLAIN
|
||||
if msg_type == "PRIV"
|
||||
else DEFAULT_BODY_FORMAT_CHANNEL_PLAIN
|
||||
)
|
||||
return _apply_format(default, variables)
|
||||
|
||||
|
||||
def _send_sync(urls_raw: str, body: str, *, preserve_identity: bool) -> bool:
|
||||
def _send_sync(urls_raw: str, body: str, *, preserve_identity: bool, markdown: bool = True) -> bool:
|
||||
"""Send notification synchronously via Apprise. Returns True on success."""
|
||||
import apprise as apprise_lib
|
||||
from apprise import NotifyFormat
|
||||
|
||||
urls = _parse_urls(urls_raw)
|
||||
if not urls:
|
||||
@@ -87,7 +183,8 @@ def _send_sync(urls_raw: str, body: str, *, preserve_identity: bool) -> bool:
|
||||
url = _normalize_discord_url(url)
|
||||
notifier.add(url)
|
||||
|
||||
return bool(notifier.notify(title="", body=body))
|
||||
body_fmt = NotifyFormat.MARKDOWN if markdown else NotifyFormat.TEXT
|
||||
return bool(notifier.notify(title="", body=body, body_format=body_fmt))
|
||||
|
||||
|
||||
class AppriseModule(FanoutModule):
|
||||
@@ -106,19 +203,82 @@ class AppriseModule(FanoutModule):
|
||||
return
|
||||
|
||||
preserve_identity = self.config.get("preserve_identity", True)
|
||||
include_path = self.config.get("include_path", True)
|
||||
body = _format_body(data, include_path=include_path)
|
||||
markdown = self.config.get("markdown_format", True)
|
||||
|
||||
try:
|
||||
success = await asyncio.to_thread(
|
||||
_send_sync, urls, body, preserve_identity=preserve_identity
|
||||
)
|
||||
self._set_last_error(None if success else "Apprise notify returned failure")
|
||||
if not success:
|
||||
logger.warning("Apprise notification failed for module %s", self.config_id)
|
||||
except Exception as exc:
|
||||
self._set_last_error(str(exc))
|
||||
logger.exception("Apprise send error for module %s", self.config_id)
|
||||
# Read format strings; treat empty/whitespace as unset (use default).
|
||||
# Fall back to legacy include_path for pre-migration configs.
|
||||
body_format_dm = (self.config.get("body_format_dm") or "").strip() or None
|
||||
body_format_channel = (self.config.get("body_format_channel") or "").strip() or None
|
||||
if body_format_dm is None or body_format_channel is None:
|
||||
include_path = self.config.get("include_path", True)
|
||||
if body_format_dm is None:
|
||||
if markdown:
|
||||
body_format_dm = (
|
||||
DEFAULT_BODY_FORMAT_DM if include_path else _DEFAULT_BODY_FORMAT_DM_NO_PATH
|
||||
)
|
||||
else:
|
||||
body_format_dm = (
|
||||
DEFAULT_BODY_FORMAT_DM_PLAIN
|
||||
if include_path
|
||||
else _DEFAULT_BODY_FORMAT_DM_NO_PATH_PLAIN
|
||||
)
|
||||
if body_format_channel is None:
|
||||
if markdown:
|
||||
body_format_channel = (
|
||||
DEFAULT_BODY_FORMAT_CHANNEL
|
||||
if include_path
|
||||
else _DEFAULT_BODY_FORMAT_CHANNEL_NO_PATH
|
||||
)
|
||||
else:
|
||||
body_format_channel = (
|
||||
DEFAULT_BODY_FORMAT_CHANNEL_PLAIN
|
||||
if include_path
|
||||
else _DEFAULT_BODY_FORMAT_CHANNEL_NO_PATH_PLAIN
|
||||
)
|
||||
|
||||
body = _format_body(
|
||||
data,
|
||||
body_format_dm=body_format_dm,
|
||||
body_format_channel=body_format_channel,
|
||||
markdown=markdown,
|
||||
)
|
||||
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(_MAX_SEND_ATTEMPTS):
|
||||
try:
|
||||
success = await asyncio.to_thread(
|
||||
_send_sync,
|
||||
urls,
|
||||
body,
|
||||
preserve_identity=preserve_identity,
|
||||
markdown=markdown,
|
||||
)
|
||||
if success:
|
||||
self._set_last_error(None)
|
||||
return
|
||||
logger.warning(
|
||||
"Apprise notification failed for module %s (attempt %d/%d)",
|
||||
self.config_id,
|
||||
attempt + 1,
|
||||
_MAX_SEND_ATTEMPTS,
|
||||
)
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
logger.warning(
|
||||
"Apprise send error for module %s (attempt %d/%d): %s",
|
||||
self.config_id,
|
||||
attempt + 1,
|
||||
_MAX_SEND_ATTEMPTS,
|
||||
exc,
|
||||
)
|
||||
if attempt < _MAX_SEND_ATTEMPTS - 1:
|
||||
await asyncio.sleep(_RETRY_DELAY_S)
|
||||
|
||||
# All attempts exhausted
|
||||
if last_exc is not None:
|
||||
self._set_last_error(str(last_exc))
|
||||
else:
|
||||
self._set_last_error("Apprise notify returned failure")
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
|
||||
@@ -245,7 +245,7 @@ def _get_client_version() -> str:
|
||||
class CommunityMqttPublisher(BaseMqttPublisher):
|
||||
"""Manages the community MQTT connection and publishes raw packets."""
|
||||
|
||||
_backoff_max = 60
|
||||
_backoff_max = 3600
|
||||
_log_prefix = "Community MQTT"
|
||||
_not_configured_timeout: float | None = 30
|
||||
|
||||
@@ -477,7 +477,21 @@ class CommunityMqttPublisher(BaseMqttPublisher):
|
||||
if radio_manager.meshcore and radio_manager.meshcore.self_info:
|
||||
device_name = radio_manager.meshcore.self_info.get("name", "")
|
||||
|
||||
device_info = await self._fetch_device_info()
|
||||
# Prefer the always-fresh radio_manager fields (populated on every reconnect by
|
||||
# radio_lifecycle) over the per-module _cached_device_info, which was only
|
||||
# cleared on module restart and therefore served stale firmware versions after
|
||||
# a radio firmware update. Fall back to _fetch_device_info() for older firmware
|
||||
# where device_info_loaded is False.
|
||||
if radio_manager.device_info_loaded:
|
||||
raw_ver = radio_manager.firmware_version or "unknown"
|
||||
fw_build = radio_manager.firmware_build or ""
|
||||
fw_str = f"{raw_ver} (Build: {fw_build})" if fw_build else f"{raw_ver}"
|
||||
device_info = {
|
||||
"model": radio_manager.device_model or "unknown",
|
||||
"firmware_version": fw_str,
|
||||
}
|
||||
else:
|
||||
device_info = await self._fetch_device_info()
|
||||
stats = await self._fetch_stats() if refresh_stats else self._cached_stats
|
||||
|
||||
status_topic = _build_status_topic(settings, pubkey_hex)
|
||||
|
||||
+1
-1
@@ -27,7 +27,7 @@ class PrivateMqttSettings(Protocol):
|
||||
class MqttPublisher(BaseMqttPublisher):
|
||||
"""Manages an MQTT connection and publishes mesh network events."""
|
||||
|
||||
_backoff_max = 30
|
||||
_backoff_max = 3600
|
||||
_log_prefix = "MQTT"
|
||||
|
||||
def _is_configured(self) -> bool:
|
||||
|
||||
@@ -65,6 +65,7 @@ class BaseMqttPublisher(ABC):
|
||||
self.connected: bool = False
|
||||
self.integration_name: str = ""
|
||||
self._last_error: str | None = None
|
||||
self._error_notified: bool = False
|
||||
|
||||
def set_integration_name(self, name: str) -> None:
|
||||
"""Attach the configured fanout-module name for operator-facing logs."""
|
||||
@@ -104,6 +105,7 @@ class BaseMqttPublisher(ABC):
|
||||
self._client = None
|
||||
self.connected = False
|
||||
self._last_error = None
|
||||
self._error_notified = False
|
||||
|
||||
async def restart(self, settings: object) -> None:
|
||||
"""Called when settings change — stop + start."""
|
||||
@@ -217,6 +219,7 @@ class BaseMqttPublisher(ABC):
|
||||
self._client = client
|
||||
self.connected = True
|
||||
self._last_error = None
|
||||
self._error_notified = False
|
||||
backoff = _BACKOFF_MIN
|
||||
|
||||
title, detail = self._on_connected(settings)
|
||||
@@ -281,9 +284,11 @@ class BaseMqttPublisher(ABC):
|
||||
)
|
||||
return
|
||||
|
||||
title, detail = self._on_error()
|
||||
broadcast_error(title, detail)
|
||||
_broadcast_health()
|
||||
if not self._error_notified:
|
||||
title, detail = self._on_error()
|
||||
broadcast_error(title, detail)
|
||||
_broadcast_health()
|
||||
self._error_notified = True
|
||||
logger.warning(
|
||||
"%s connection error. This is usually transient network noise; "
|
||||
"if it self-resolves, it is generally not a concern: %s "
|
||||
|
||||
+34
-9
@@ -81,6 +81,15 @@ _REPEATER_SENSORS: list[dict[str, Any]] = [
|
||||
"unit": None,
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "recv_errors",
|
||||
"name": "RX Errors",
|
||||
"object_id": "recv_errors",
|
||||
"device_class": None,
|
||||
"state_class": "total_increasing",
|
||||
"unit": None,
|
||||
"precision": 0,
|
||||
},
|
||||
{
|
||||
"field": "uptime_seconds",
|
||||
"name": "Uptime",
|
||||
@@ -115,6 +124,22 @@ def _lpp_sensor_key(type_name: str, channel: int) -> str:
|
||||
return f"lpp_{type_name}_ch{channel}"
|
||||
|
||||
|
||||
def _assign_lpp_keys(lpp_sensors: list[dict]) -> list[tuple[dict, str, int]]:
|
||||
"""Pair each LPP sensor dict with a disambiguated flat key and occurrence.
|
||||
|
||||
First occurrence keeps the base key (``lpp_temperature_ch1``), occurrence=1;
|
||||
subsequent duplicates of the same (type_name, channel) get ``_2``, ``_3``, etc.
|
||||
"""
|
||||
counts: dict[str, int] = {}
|
||||
result: list[tuple[dict, str, int]] = []
|
||||
for sensor in lpp_sensors:
|
||||
base = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||
n = counts.get(base, 0) + 1
|
||||
counts[base] = n
|
||||
result.append((sensor, base if n == 1 else f"{base}_{n}", n))
|
||||
return result
|
||||
|
||||
|
||||
def _repeater_telemetry_payload(data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Build the flat HA state payload for a repeater telemetry snapshot."""
|
||||
payload: dict[str, Any] = {}
|
||||
@@ -123,8 +148,7 @@ def _repeater_telemetry_payload(data: dict[str, Any]) -> dict[str, Any]:
|
||||
if field is not None:
|
||||
payload[field] = data.get(field)
|
||||
|
||||
for sensor in data.get("lpp_sensors", []) or []:
|
||||
key = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||
for sensor, key, _ in _assign_lpp_keys(data.get("lpp_sensors", []) or []):
|
||||
payload[key] = sensor.get("value")
|
||||
|
||||
return payload
|
||||
@@ -139,16 +163,19 @@ def _lpp_discovery_configs(
|
||||
) -> list[tuple[str, dict]]:
|
||||
"""Build HA discovery configs for a repeater's LPP sensors."""
|
||||
configs: list[tuple[str, dict]] = []
|
||||
for sensor in lpp_sensors:
|
||||
for sensor, field, occurrence in _assign_lpp_keys(lpp_sensors):
|
||||
type_name = sensor.get("type_name", "unknown")
|
||||
channel = sensor.get("channel", 0)
|
||||
field = _lpp_sensor_key(type_name, channel)
|
||||
meta = _LPP_HA_META.get(type_name, {})
|
||||
|
||||
nid = _node_id(pub_key)
|
||||
object_id = field
|
||||
display = type_name.replace("_", " ").title()
|
||||
name = f"{display} (Ch {channel})"
|
||||
name = (
|
||||
f"{display} (Ch {channel})"
|
||||
if occurrence == 1
|
||||
else f"{display} (Ch {channel}) #{occurrence}"
|
||||
)
|
||||
|
||||
cfg: dict[str, Any] = {
|
||||
"name": name,
|
||||
@@ -289,7 +316,7 @@ def _device_payload(
|
||||
class _HaMqttPublisher(BaseMqttPublisher):
|
||||
"""Thin MQTT lifecycle wrapper for the HA discovery module."""
|
||||
|
||||
_backoff_max = 30
|
||||
_backoff_max = 3600
|
||||
_log_prefix = "HA-MQTT"
|
||||
|
||||
def __init__(self) -> None:
|
||||
@@ -731,9 +758,7 @@ class MqttHaModule(FanoutModule):
|
||||
payload = _repeater_telemetry_payload(data)
|
||||
lpp_sensors: list[dict] = data.get("lpp_sensors", [])
|
||||
rediscover = False
|
||||
for sensor in lpp_sensors:
|
||||
# Check if discovery for this sensor has been published yet
|
||||
key = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||
for _, key, _ in _assign_lpp_keys(lpp_sensors):
|
||||
expected_topic = f"homeassistant/sensor/meshcore_{nid}/{key}/config"
|
||||
if expected_topic not in self._discovery_topics:
|
||||
rediscover = True
|
||||
|
||||
+21
-2
@@ -176,8 +176,27 @@ app.add_middleware(
|
||||
|
||||
@app.exception_handler(RadioDisconnectedError)
|
||||
async def radio_disconnected_handler(request: Request, exc: RadioDisconnectedError):
|
||||
"""Return 503 when a radio disconnect race occurs during an operation."""
|
||||
return JSONResponse(status_code=503, content={"detail": "Radio not connected"})
|
||||
"""Return 423 when a radio disconnect race occurs during an operation."""
|
||||
return JSONResponse(status_code=423, content={"detail": "Radio not connected"})
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def log_server_errors(request: Request, call_next):
|
||||
"""Capture 5xx errors and unhandled exceptions into the log ring buffer.
|
||||
|
||||
Starlette writes unhandled-exception tracebacks to stderr, bypassing
|
||||
Python logging, so they never reach the debug dump. This middleware
|
||||
catches them and logs via ``logger.exception()`` so the full traceback
|
||||
is preserved in the ring buffer for the ``GET /api/debug`` snapshot.
|
||||
"""
|
||||
try:
|
||||
response = await call_next(request)
|
||||
except Exception:
|
||||
logger.exception("Unhandled exception on %s %s", request.method, request.url.path)
|
||||
raise
|
||||
if response.status_code >= 500:
|
||||
logger.error("HTTP %d on %s %s", response.status_code, request.method, request.url.path)
|
||||
return response
|
||||
|
||||
|
||||
# API routes - all prefixed with /api for production compatibility
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import aiosqlite
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_BODY_FORMAT_DM = "**DM:** {sender_name}: {text} **via:** [{hops_backticked}]"
|
||||
DEFAULT_BODY_FORMAT_CHANNEL = (
|
||||
"**{channel_name}:** {sender_name}: {text} **via:** [{hops_backticked}]"
|
||||
)
|
||||
_DEFAULT_BODY_FORMAT_DM_NO_PATH = "**DM:** {sender_name}: {text}"
|
||||
_DEFAULT_BODY_FORMAT_CHANNEL_NO_PATH = "**{channel_name}:** {sender_name}: {text}"
|
||||
|
||||
|
||||
async def migrate(conn: aiosqlite.Connection) -> None:
|
||||
"""Migrate apprise fanout configs from include_path boolean to format strings."""
|
||||
table_check = await conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='fanout_configs'"
|
||||
)
|
||||
if not await table_check.fetchone():
|
||||
await conn.commit()
|
||||
return
|
||||
|
||||
cursor = await conn.execute("SELECT id, config FROM fanout_configs WHERE type = 'apprise'")
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
for row in rows:
|
||||
config_id = row["id"] if isinstance(row, dict) else row[0]
|
||||
config_raw = row["config"] if isinstance(row, dict) else row[1]
|
||||
try:
|
||||
config = json.loads(config_raw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
continue
|
||||
|
||||
# Skip if already migrated
|
||||
if "body_format_dm" in config:
|
||||
continue
|
||||
|
||||
include_path = config.get("include_path", True)
|
||||
config["body_format_dm"] = (
|
||||
DEFAULT_BODY_FORMAT_DM if include_path else _DEFAULT_BODY_FORMAT_DM_NO_PATH
|
||||
)
|
||||
config["body_format_channel"] = (
|
||||
DEFAULT_BODY_FORMAT_CHANNEL if include_path else _DEFAULT_BODY_FORMAT_CHANNEL_NO_PATH
|
||||
)
|
||||
config.pop("include_path", None)
|
||||
|
||||
await conn.execute(
|
||||
"UPDATE fanout_configs SET config = ? WHERE id = ?",
|
||||
(json.dumps(config), config_id),
|
||||
)
|
||||
logger.info(
|
||||
"Migrated apprise config %s: include_path=%s -> format strings", config_id, include_path
|
||||
)
|
||||
|
||||
await conn.commit()
|
||||
@@ -0,0 +1,20 @@
|
||||
import logging
|
||||
|
||||
import aiosqlite
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def migrate(conn: aiosqlite.Connection) -> None:
|
||||
"""Add telemetry_routed_hourly boolean column to app_settings."""
|
||||
tables_cursor = await conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
if "app_settings" not in {row[0] for row in await tables_cursor.fetchall()}:
|
||||
await conn.commit()
|
||||
return
|
||||
col_cursor = await conn.execute("PRAGMA table_info(app_settings)")
|
||||
columns = {row[1] for row in await col_cursor.fetchall()}
|
||||
if "telemetry_routed_hourly" not in columns:
|
||||
await conn.execute(
|
||||
"ALTER TABLE app_settings ADD COLUMN telemetry_routed_hourly INTEGER DEFAULT 0"
|
||||
)
|
||||
await conn.commit()
|
||||
@@ -221,6 +221,9 @@ class CreateContactRequest(BaseModel):
|
||||
|
||||
public_key: str = Field(min_length=64, max_length=64, description="Public key (64-char hex)")
|
||||
name: str | None = Field(default=None, description="Display name for the contact")
|
||||
type: int = Field(
|
||||
default=0, ge=0, le=3, description="Contact type (0=unknown, 1=client, 2=repeater, 3=room)"
|
||||
)
|
||||
try_historical: bool = Field(
|
||||
default=False,
|
||||
description="Attempt to decrypt historical DM packets for this contact",
|
||||
@@ -445,6 +448,8 @@ class RawPacketDecryptedInfo(BaseModel):
|
||||
sender: str | None = None
|
||||
channel_key: str | None = None
|
||||
contact_key: str | None = None
|
||||
sender_timestamp: int | None = None
|
||||
message: str | None = None
|
||||
|
||||
|
||||
class RawPacketBroadcast(BaseModel):
|
||||
@@ -537,6 +542,7 @@ class RepeaterStatusResponse(BaseModel):
|
||||
flood_dups: int = Field(description="Duplicate flood packets")
|
||||
direct_dups: int = Field(description="Duplicate direct packets")
|
||||
full_events: int = Field(description="Full event queue count")
|
||||
recv_errors: int | None = Field(default=None, description="Radio-level RX packet errors")
|
||||
telemetry_history: list["TelemetryHistoryEntry"] = Field(
|
||||
default_factory=list, description="Recent telemetry history snapshots"
|
||||
)
|
||||
@@ -851,6 +857,13 @@ class AppSettings(BaseModel):
|
||||
"tracked repeaters so daily checks stay under a 24/day ceiling."
|
||||
),
|
||||
)
|
||||
telemetry_routed_hourly: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
"When enabled, tracked repeaters with a direct or routed (non-flood) "
|
||||
"path are polled every hour instead of on the normal scheduled interval."
|
||||
),
|
||||
)
|
||||
auto_resend_channel: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
|
||||
@@ -366,6 +366,8 @@ async def process_raw_packet(
|
||||
sender=result["sender"],
|
||||
channel_key=result.get("channel_key"),
|
||||
contact_key=result.get("contact_key"),
|
||||
sender_timestamp=result.get("sender_timestamp"),
|
||||
message=result.get("message"),
|
||||
)
|
||||
if result["decrypted"]
|
||||
else None,
|
||||
@@ -428,6 +430,8 @@ async def _process_group_text(
|
||||
"sender": decrypted.sender,
|
||||
"message_id": msg_id, # None if duplicate, msg_id if new
|
||||
"channel_key": channel.key,
|
||||
"sender_timestamp": decrypted.timestamp,
|
||||
"message": decrypted.message,
|
||||
}
|
||||
|
||||
# Couldn't decrypt with any known key
|
||||
@@ -694,6 +698,8 @@ async def _process_direct_message(
|
||||
"sender": contact.name or contact.public_key[:12],
|
||||
"message_id": msg_id,
|
||||
"contact_key": contact.public_key,
|
||||
"sender_timestamp": result.timestamp,
|
||||
"message": result.message,
|
||||
}
|
||||
|
||||
# Couldn't decrypt with any known contact
|
||||
|
||||
+45
-12
@@ -1273,7 +1273,12 @@ async def _reconcile_radio_contacts_in_background(
|
||||
continue
|
||||
|
||||
budget -= 1
|
||||
if remove_result.type == EventType.OK:
|
||||
not_found = (
|
||||
remove_result.type != EventType.OK
|
||||
and isinstance(remove_result.payload, dict)
|
||||
and remove_result.payload.get("error_code") == 2
|
||||
)
|
||||
if remove_result.type == EventType.OK or not_found:
|
||||
radio_contacts.pop(public_key, None)
|
||||
_evict_removed_contact_from_library_cache(mc, public_key)
|
||||
removed += 1
|
||||
@@ -1816,6 +1821,7 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
||||
"flood_dups": status.get("flood_dups", 0),
|
||||
"direct_dups": status.get("direct_dups", 0),
|
||||
"full_events": status.get("full_evts", 0),
|
||||
"recv_errors": status.get("recv_errors"),
|
||||
}
|
||||
|
||||
# Best-effort LPP sensor fetch — failure here does not fail the overall
|
||||
@@ -1884,8 +1890,13 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
async def _run_telemetry_cycle() -> None:
|
||||
"""Collect one telemetry sample from every tracked repeater."""
|
||||
async def _run_telemetry_cycle(*, routed_only: bool = False) -> None:
|
||||
"""Collect one telemetry sample from tracked repeaters.
|
||||
|
||||
When *routed_only* is True, only repeaters whose effective route is
|
||||
``"direct"`` or ``"override"`` (i.e. not ``"flood"``) are collected.
|
||||
This is used by the hourly routed-path fast-poll feature.
|
||||
"""
|
||||
if not radio_manager.is_connected:
|
||||
logger.debug("Telemetry collect: radio not connected, skipping cycle")
|
||||
return
|
||||
@@ -1895,9 +1906,7 @@ async def _run_telemetry_cycle() -> None:
|
||||
if not tracked:
|
||||
return
|
||||
|
||||
logger.info("Telemetry collect: starting cycle for %d repeater(s)", len(tracked))
|
||||
collected = 0
|
||||
|
||||
candidates: list[tuple[str, Contact]] = []
|
||||
for pub_key in tracked:
|
||||
contact = await ContactRepository.get_by_key(pub_key)
|
||||
if not contact or contact.type != 2:
|
||||
@@ -1906,7 +1915,24 @@ async def _run_telemetry_cycle() -> None:
|
||||
pub_key[:12],
|
||||
)
|
||||
continue
|
||||
if routed_only and (not contact.effective_route or contact.effective_route.path_len < 0):
|
||||
continue
|
||||
candidates.append((pub_key, contact))
|
||||
|
||||
if not candidates:
|
||||
if routed_only:
|
||||
logger.debug("Telemetry collect: no routed repeaters to poll this hour")
|
||||
return
|
||||
|
||||
label = "routed" if routed_only else "full"
|
||||
logger.info(
|
||||
"Telemetry collect: starting %s cycle for %d repeater(s)",
|
||||
label,
|
||||
len(candidates),
|
||||
)
|
||||
collected = 0
|
||||
|
||||
for _pub_key, contact in candidates:
|
||||
try:
|
||||
async with radio_manager.radio_operation(
|
||||
"telemetry_collect",
|
||||
@@ -1918,13 +1944,14 @@ async def _run_telemetry_cycle() -> None:
|
||||
except RadioOperationBusyError:
|
||||
logger.debug(
|
||||
"Telemetry collect: radio busy, skipping %s",
|
||||
pub_key[:12],
|
||||
contact.public_key[:12],
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Telemetry collect: cycle complete, %d/%d successful",
|
||||
"Telemetry collect: %s cycle complete, %d/%d successful",
|
||||
label,
|
||||
collected,
|
||||
len(tracked),
|
||||
len(candidates),
|
||||
)
|
||||
|
||||
|
||||
@@ -1954,9 +1981,15 @@ async def _maybe_run_scheduled_cycle(now: datetime) -> None:
|
||||
effective_hours = clamp_telemetry_interval(app_settings.telemetry_interval_hours, tracked_count)
|
||||
if effective_hours <= 0:
|
||||
return
|
||||
if now.hour % effective_hours != 0:
|
||||
return
|
||||
await _run_telemetry_cycle()
|
||||
|
||||
is_normal_cycle = now.hour % effective_hours == 0
|
||||
|
||||
if is_normal_cycle:
|
||||
# Normal scheduled boundary: collect ALL tracked repeaters.
|
||||
await _run_telemetry_cycle()
|
||||
elif app_settings.telemetry_routed_hourly:
|
||||
# Hourly routed-path fast-poll: only repeaters with a non-flood route.
|
||||
await _run_telemetry_cycle(routed_only=True)
|
||||
|
||||
|
||||
async def _telemetry_collect_loop() -> None:
|
||||
|
||||
@@ -42,7 +42,7 @@ class AppSettingsRepository:
|
||||
advert_interval, last_advert_time, flood_scope,
|
||||
blocked_keys, blocked_names, discovery_blocked_types,
|
||||
tracked_telemetry_repeaters, auto_resend_channel,
|
||||
telemetry_interval_hours
|
||||
telemetry_interval_hours, telemetry_routed_hourly
|
||||
FROM app_settings WHERE id = 1
|
||||
"""
|
||||
) as cursor:
|
||||
@@ -113,6 +113,12 @@ class AppSettingsRepository:
|
||||
except (KeyError, TypeError, ValueError):
|
||||
telemetry_interval_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
|
||||
# Parse telemetry_routed_hourly boolean
|
||||
try:
|
||||
telemetry_routed_hourly = bool(row["telemetry_routed_hourly"])
|
||||
except (KeyError, TypeError):
|
||||
telemetry_routed_hourly = False
|
||||
|
||||
return AppSettings(
|
||||
max_radio_contacts=row["max_radio_contacts"],
|
||||
auto_decrypt_dm_on_advert=bool(row["auto_decrypt_dm_on_advert"]),
|
||||
@@ -126,6 +132,7 @@ class AppSettingsRepository:
|
||||
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
||||
auto_resend_channel=auto_resend_channel,
|
||||
telemetry_interval_hours=telemetry_interval_hours,
|
||||
telemetry_routed_hourly=telemetry_routed_hourly,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -144,6 +151,7 @@ class AppSettingsRepository:
|
||||
tracked_telemetry_repeaters: list[str] | None = None,
|
||||
auto_resend_channel: bool | None = None,
|
||||
telemetry_interval_hours: int | None = None,
|
||||
telemetry_routed_hourly: bool | None = None,
|
||||
) -> None:
|
||||
"""Apply field updates using an already-acquired connection.
|
||||
|
||||
@@ -201,6 +209,10 @@ class AppSettingsRepository:
|
||||
updates.append("telemetry_interval_hours = ?")
|
||||
params.append(telemetry_interval_hours)
|
||||
|
||||
if telemetry_routed_hourly is not None:
|
||||
updates.append("telemetry_routed_hourly = ?")
|
||||
params.append(1 if telemetry_routed_hourly else 0)
|
||||
|
||||
if updates:
|
||||
query = f"UPDATE app_settings SET {', '.join(updates)} WHERE id = 1"
|
||||
async with conn.execute(query, params):
|
||||
@@ -229,6 +241,7 @@ class AppSettingsRepository:
|
||||
tracked_telemetry_repeaters: list[str] | None = None,
|
||||
auto_resend_channel: bool | None = None,
|
||||
telemetry_interval_hours: int | None = None,
|
||||
telemetry_routed_hourly: bool | None = None,
|
||||
) -> AppSettings:
|
||||
"""Update app settings. Only provided fields are updated."""
|
||||
async with db.tx() as conn:
|
||||
@@ -246,6 +259,7 @@ class AppSettingsRepository:
|
||||
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
||||
auto_resend_channel=auto_resend_channel,
|
||||
telemetry_interval_hours=telemetry_interval_hours,
|
||||
telemetry_routed_hourly=telemetry_routed_hourly,
|
||||
)
|
||||
return await AppSettingsRepository._get_in_conn(conn)
|
||||
|
||||
|
||||
@@ -66,11 +66,11 @@ async def _resolve_contact_or_404(
|
||||
|
||||
|
||||
async def _ensure_on_radio(mc, contact: Contact) -> None:
|
||||
"""Add a contact to the radio for routing, raising 500 on failure."""
|
||||
"""Add a contact to the radio for routing, raising 422 on failure."""
|
||||
add_result = await mc.commands.add_contact(contact.to_radio_dict())
|
||||
if add_result is not None and add_result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to add contact to radio: {add_result.payload}"
|
||||
status_code=422, detail=f"Failed to add contact to radio: {add_result.payload}"
|
||||
)
|
||||
|
||||
|
||||
@@ -315,6 +315,7 @@ async def create_contact(
|
||||
contact_upsert = ContactUpsert(
|
||||
public_key=lower_key,
|
||||
name=request.name,
|
||||
type=request.type,
|
||||
on_radio=False,
|
||||
)
|
||||
await ContactRepository.upsert(contact_upsert)
|
||||
@@ -451,7 +452,7 @@ async def request_trace(public_key: str) -> TraceResponse:
|
||||
)
|
||||
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to send trace: {result.payload}")
|
||||
raise HTTPException(status_code=422, detail=f"Failed to send trace: {result.payload}")
|
||||
|
||||
# Wait for the matching TRACE_DATA event
|
||||
event = await mc.wait_for_event(
|
||||
@@ -461,7 +462,7 @@ async def request_trace(public_key: str) -> TraceResponse:
|
||||
)
|
||||
|
||||
if event is None:
|
||||
raise HTTPException(status_code=504, detail="No trace response heard")
|
||||
raise HTTPException(status_code=408, detail="No trace response heard")
|
||||
|
||||
trace = event.payload
|
||||
path = trace.get("path", [])
|
||||
@@ -505,7 +506,7 @@ async def request_path_discovery(public_key: str) -> PathDiscoveryResponse:
|
||||
result = await mc.commands.send_path_discovery(contact.public_key)
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail=f"Failed to send path discovery: {result.payload}",
|
||||
)
|
||||
|
||||
@@ -517,7 +518,7 @@ async def request_path_discovery(public_key: str) -> PathDiscoveryResponse:
|
||||
await response_task
|
||||
|
||||
if event is None:
|
||||
raise HTTPException(status_code=504, detail="No path discovery response heard")
|
||||
raise HTTPException(status_code=408, detail="No path discovery response heard")
|
||||
|
||||
payload = event.payload
|
||||
forward_path = str(payload.get("out_path") or "")
|
||||
|
||||
+34
-5
@@ -64,7 +64,6 @@ class DebugRuntimeInfo(BaseModel):
|
||||
path_hash_mode_supported: bool
|
||||
channel_slot_reuse_enabled: bool
|
||||
channel_send_cache_capacity: int
|
||||
remediation_flags: dict[str, bool]
|
||||
|
||||
|
||||
class DebugContactAudit(BaseModel):
|
||||
@@ -110,6 +109,21 @@ class DebugHealthSummary(BaseModel):
|
||||
basic_auth_enabled: bool = False
|
||||
|
||||
|
||||
class DebugEnvironment(BaseModel):
|
||||
connection_type: str
|
||||
serial_port: str
|
||||
serial_baudrate: int
|
||||
tcp_host: str
|
||||
tcp_port: int
|
||||
ble_address: str
|
||||
log_level: str
|
||||
database_path: str
|
||||
disable_bots: bool
|
||||
enable_message_poll_fallback: bool
|
||||
force_channel_slot_reconfigure: bool
|
||||
load_with_autoevict: bool
|
||||
|
||||
|
||||
class DebugAppSettings(BaseModel):
|
||||
max_radio_contacts: int
|
||||
auto_decrypt_dm_on_advert: bool
|
||||
@@ -123,6 +137,7 @@ class DebugSnapshotResponse(BaseModel):
|
||||
captured_at: str
|
||||
system: DebugSystemInfo
|
||||
application: DebugApplicationInfo
|
||||
environment: DebugEnvironment
|
||||
health: DebugHealthSummary
|
||||
settings: DebugAppSettings
|
||||
runtime: DebugRuntimeInfo
|
||||
@@ -203,6 +218,23 @@ def _coerce_live_max_channels(device_info: dict[str, Any] | None) -> int | None:
|
||||
return None
|
||||
|
||||
|
||||
def _build_environment() -> DebugEnvironment:
|
||||
return DebugEnvironment(
|
||||
connection_type=settings.connection_type,
|
||||
serial_port=settings.serial_port,
|
||||
serial_baudrate=settings.serial_baudrate,
|
||||
tcp_host=settings.tcp_host,
|
||||
tcp_port=settings.tcp_port,
|
||||
ble_address=settings.ble_address,
|
||||
log_level=settings.log_level,
|
||||
database_path=settings.database_path,
|
||||
disable_bots=settings.disable_bots,
|
||||
enable_message_poll_fallback=settings.enable_message_poll_fallback,
|
||||
force_channel_slot_reconfigure=settings.force_channel_slot_reconfigure,
|
||||
load_with_autoevict=settings.load_with_autoevict,
|
||||
)
|
||||
|
||||
|
||||
def _build_debug_app_settings(app_settings: AppSettings) -> DebugAppSettings:
|
||||
return DebugAppSettings(
|
||||
max_radio_contacts=app_settings.max_radio_contacts,
|
||||
@@ -393,6 +425,7 @@ async def debug_support_snapshot() -> DebugSnapshotResponse:
|
||||
captured_at=datetime.now(UTC).isoformat(),
|
||||
system=_build_system_info(),
|
||||
application=_build_application_info(),
|
||||
environment=_build_environment(),
|
||||
health=_build_debug_health_summary(health_data, radio_state=radio_state),
|
||||
settings=_build_debug_app_settings(app_settings),
|
||||
runtime=DebugRuntimeInfo(
|
||||
@@ -404,10 +437,6 @@ async def debug_support_snapshot() -> DebugSnapshotResponse:
|
||||
path_hash_mode_supported=radio_runtime.path_hash_mode_supported,
|
||||
channel_slot_reuse_enabled=radio_runtime.channel_slot_reuse_enabled(),
|
||||
channel_send_cache_capacity=radio_runtime.get_channel_send_cache_capacity(),
|
||||
remediation_flags={
|
||||
"enable_message_poll_fallback": settings.enable_message_poll_fallback,
|
||||
"force_channel_slot_reconfigure": settings.force_channel_slot_reconfigure,
|
||||
},
|
||||
),
|
||||
database=DebugDatabaseInfo(
|
||||
total_dms=message_totals["total_dms"],
|
||||
|
||||
@@ -259,6 +259,25 @@ def _validate_apprise_config(config: dict) -> None:
|
||||
if not urls or not urls.strip():
|
||||
raise HTTPException(status_code=400, detail="At least one Apprise URL is required")
|
||||
|
||||
from app.fanout.apprise_mod import FORMAT_VARIABLES, _apply_format
|
||||
|
||||
dummy_vars: dict[str, str] = dict.fromkeys(FORMAT_VARIABLES, "test")
|
||||
for field in ("body_format_dm", "body_format_channel"):
|
||||
value = config.get(field)
|
||||
if value is not None and not isinstance(value, str):
|
||||
raise HTTPException(status_code=400, detail=f"{field} must be a string")
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
_apply_format(value, dummy_vars)
|
||||
except Exception:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid format string in {field}"
|
||||
) from None
|
||||
|
||||
markdown_format = config.get("markdown_format")
|
||||
if markdown_format is not None:
|
||||
config["markdown_format"] = bool(markdown_format)
|
||||
|
||||
|
||||
def _validate_webhook_config(config: dict) -> None:
|
||||
"""Validate webhook config blob."""
|
||||
|
||||
@@ -40,6 +40,8 @@ class RadioStatsSnapshot(BaseModel):
|
||||
# Core stats
|
||||
battery_mv: int | None = None
|
||||
uptime_secs: int | None = None
|
||||
queue_len: int | None = None
|
||||
errors: int | None = None
|
||||
# Radio stats
|
||||
noise_floor: int | None = None
|
||||
last_rssi: int | None = None
|
||||
@@ -155,6 +157,8 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
|
||||
"timestamp": raw_stats.get("timestamp"),
|
||||
"battery_mv": raw_stats.get("battery_mv"),
|
||||
"uptime_secs": raw_stats.get("uptime_secs"),
|
||||
"queue_len": raw_stats.get("queue_len"),
|
||||
"errors": raw_stats.get("errors"),
|
||||
"noise_floor": raw_stats.get("noise_floor"),
|
||||
"last_rssi": raw_stats.get("last_rssi"),
|
||||
"last_snr": raw_stats.get("last_snr"),
|
||||
|
||||
@@ -128,11 +128,15 @@ async def get_raw_packet(packet_id: int) -> RawPacketDetail:
|
||||
sender=message.sender_name,
|
||||
channel_key=message.conversation_key,
|
||||
contact_key=message.sender_key,
|
||||
sender_timestamp=message.sender_timestamp,
|
||||
message=message.text,
|
||||
)
|
||||
else:
|
||||
decrypted_info = RawPacketDecryptedInfo(
|
||||
sender=message.sender_name,
|
||||
contact_key=message.conversation_key,
|
||||
sender_timestamp=message.sender_timestamp,
|
||||
message=message.text,
|
||||
)
|
||||
|
||||
return RawPacketDetail(
|
||||
|
||||
+5
-5
@@ -48,7 +48,7 @@ async def vapid_public_key() -> VapidPublicKeyResponse:
|
||||
"""Return the VAPID public key for browser PushManager.subscribe()."""
|
||||
key = get_vapid_public_key()
|
||||
if not key:
|
||||
raise HTTPException(status_code=503, detail="VAPID keys not initialized")
|
||||
raise HTTPException(status_code=423, detail="VAPID keys not initialized")
|
||||
return VapidPublicKeyResponse(public_key=key)
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ async def test_push(subscription_id: str) -> dict:
|
||||
|
||||
vapid_key = get_vapid_private_key()
|
||||
if not vapid_key:
|
||||
raise HTTPException(status_code=503, detail="VAPID keys not initialized")
|
||||
raise HTTPException(status_code=423, detail="VAPID keys not initialized")
|
||||
|
||||
payload = json.dumps(
|
||||
{
|
||||
@@ -127,7 +127,7 @@ async def test_push(subscription_id: str) -> dict:
|
||||
)
|
||||
return {"status": "sent"}
|
||||
except TimeoutError:
|
||||
raise HTTPException(status_code=504, detail="Push delivery timed out") from None
|
||||
raise HTTPException(status_code=408, detail="Push delivery timed out") from None
|
||||
except WebPushException as e:
|
||||
status_code = getattr(getattr(e, "response", None), "status_code", 0)
|
||||
if status_code in (403, 404, 410):
|
||||
@@ -143,10 +143,10 @@ async def test_push(subscription_id: str) -> dict:
|
||||
"Re-enable push from a conversation header.",
|
||||
) from None
|
||||
logger.warning("Test push failed: %s", e)
|
||||
raise HTTPException(status_code=502, detail=f"Push delivery failed: {e}") from None
|
||||
raise HTTPException(status_code=422, detail=f"Push delivery failed: {e}") from None
|
||||
except Exception as e:
|
||||
logger.warning("Test push failed: %s", e)
|
||||
raise HTTPException(status_code=502, detail=f"Push delivery failed: {e}") from None
|
||||
raise HTTPException(status_code=422, detail=f"Push delivery failed: {e}") from None
|
||||
|
||||
|
||||
# ── Global push conversation management ──────────────────────────────────
|
||||
|
||||
+39
-15
@@ -338,7 +338,7 @@ async def get_radio_config() -> RadioConfigResponse:
|
||||
|
||||
info = mc.self_info
|
||||
if not info:
|
||||
raise HTTPException(status_code=503, detail="Radio info not available")
|
||||
raise HTTPException(status_code=423, detail="Radio info not available")
|
||||
|
||||
adv_loc_policy = info.get("adv_loc_policy", 1)
|
||||
advert_location_source: AdvertLocationSource = "off" if adv_loc_policy == 0 else "current"
|
||||
@@ -380,11 +380,35 @@ async def update_radio_config(update: RadioConfigUpdate) -> RadioConfigResponse:
|
||||
except PathHashModeUnsupportedError as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
except RadioCommandRejectedError as exc:
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
raise HTTPException(status_code=422, detail=str(exc)) from exc
|
||||
|
||||
return await get_radio_config()
|
||||
|
||||
|
||||
@router.get("/private-key")
|
||||
async def get_private_key() -> dict:
|
||||
"""Return the in-memory private key (exported from radio on startup).
|
||||
|
||||
Gated behind MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT=true.
|
||||
"""
|
||||
from app.config import settings
|
||||
from app.keystore import get_private_key as ks_get
|
||||
|
||||
if not settings.enable_local_private_key_export:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Private key export is disabled (set MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT=true)",
|
||||
)
|
||||
|
||||
key = ks_get()
|
||||
if key is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="Private key not available (not exported from radio)",
|
||||
)
|
||||
return {"private_key": key.hex()}
|
||||
|
||||
|
||||
@router.put("/private-key")
|
||||
async def set_private_key(update: PrivateKeyUpdate) -> dict:
|
||||
"""Set the radio's private key. This is write-only."""
|
||||
@@ -406,7 +430,7 @@ async def set_private_key(update: PrivateKeyUpdate) -> dict:
|
||||
export_and_store_private_key_fn=export_and_store_private_key,
|
||||
)
|
||||
except (RadioCommandRejectedError, KeystoreRefreshError) as exc:
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
raise HTTPException(status_code=422, detail=str(exc)) from exc
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
@@ -430,7 +454,7 @@ async def send_advertisement(request: RadioAdvertiseRequest | None = None) -> di
|
||||
success = await do_send_advertisement(mc, force=True, mode=mode)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to send {mode} advertisement")
|
||||
raise HTTPException(status_code=422, detail=f"Failed to send {mode} advertisement")
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
@@ -462,7 +486,7 @@ async def discover_mesh(request: RadioDiscoveryRequest) -> RadioDiscoveryRespons
|
||||
tag=tag,
|
||||
)
|
||||
if send_result is None or send_result.type == EventType.ERROR:
|
||||
raise HTTPException(status_code=500, detail="Failed to start mesh discovery")
|
||||
raise HTTPException(status_code=422, detail="Failed to start mesh discovery")
|
||||
|
||||
deadline = _monotonic() + DISCOVERY_WINDOW_SECONDS
|
||||
results_by_key: dict[str, RadioDiscoveryResult] = {}
|
||||
@@ -514,7 +538,7 @@ async def trace_path(request: RadioTraceRequest) -> RadioTraceResponse:
|
||||
async with radio_manager.radio_operation("radio_trace", pause_polling=True) as mc:
|
||||
local_public_key = str((mc.self_info or {}).get("public_key") or "").lower()
|
||||
if len(local_public_key) != 64:
|
||||
raise HTTPException(status_code=503, detail="Local radio public key is unavailable")
|
||||
raise HTTPException(status_code=423, detail="Local radio public key is unavailable")
|
||||
local_name = (mc.self_info or {}).get("name")
|
||||
|
||||
response_task = asyncio.create_task(
|
||||
@@ -531,13 +555,13 @@ async def trace_path(request: RadioTraceRequest) -> RadioTraceResponse:
|
||||
flags=trace_flags,
|
||||
)
|
||||
if send_result is None or send_result.type == EventType.ERROR:
|
||||
raise HTTPException(status_code=500, detail="Failed to send trace")
|
||||
raise HTTPException(status_code=422, detail="Failed to send trace")
|
||||
|
||||
timeout_seconds = _trace_timeout_seconds(send_result)
|
||||
try:
|
||||
event = await asyncio.wait_for(response_task, timeout=timeout_seconds)
|
||||
except TimeoutError as exc:
|
||||
raise HTTPException(status_code=504, detail="No trace response heard") from exc
|
||||
raise HTTPException(status_code=408, detail="No trace response heard") from exc
|
||||
finally:
|
||||
if not response_task.done():
|
||||
response_task.cancel()
|
||||
@@ -545,12 +569,12 @@ async def trace_path(request: RadioTraceRequest) -> RadioTraceResponse:
|
||||
await response_task
|
||||
|
||||
if event is None:
|
||||
raise HTTPException(status_code=504, detail="No trace response heard")
|
||||
raise HTTPException(status_code=408, detail="No trace response heard")
|
||||
|
||||
payload = event.payload if isinstance(event.payload, dict) else {}
|
||||
path_len = payload.get("path_len")
|
||||
if not isinstance(path_len, int):
|
||||
raise HTTPException(status_code=500, detail="Trace response was malformed")
|
||||
raise HTTPException(status_code=422, detail="Trace response was malformed")
|
||||
|
||||
raw_path = payload.get("path")
|
||||
path_nodes = raw_path if isinstance(raw_path, list) else []
|
||||
@@ -564,7 +588,7 @@ async def trace_path(request: RadioTraceRequest) -> RadioTraceResponse:
|
||||
hashed_nodes = path_nodes[:-1] if final_local_node is not None else path_nodes
|
||||
|
||||
if len(hashed_nodes) < len(trace_nodes):
|
||||
raise HTTPException(status_code=500, detail="Trace response was incomplete")
|
||||
raise HTTPException(status_code=422, detail="Trace response was incomplete")
|
||||
|
||||
nodes: list[RadioTraceNode] = []
|
||||
for index, trace_node in enumerate(trace_nodes):
|
||||
@@ -617,13 +641,13 @@ async def _attempt_reconnect() -> dict:
|
||||
except Exception as e:
|
||||
logger.exception("Post-connect setup failed after reconnect")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
status_code=423,
|
||||
detail=f"Radio connected but setup failed: {e}",
|
||||
) from e
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=503, detail="Failed to reconnect. Check radio connection and power."
|
||||
status_code=423, detail="Failed to reconnect. Check radio connection and power."
|
||||
)
|
||||
|
||||
return {"status": "ok", "message": "Reconnected successfully", "connected": True}
|
||||
@@ -678,14 +702,14 @@ async def reconnect_radio() -> dict:
|
||||
logger.info("Radio connected but setup incomplete, retrying setup")
|
||||
try:
|
||||
if not await _prepare_connected(broadcast_on_success=True):
|
||||
raise HTTPException(status_code=503, detail="Radio connection is paused")
|
||||
raise HTTPException(status_code=423, detail="Radio connection is paused")
|
||||
return {"status": "ok", "message": "Setup completed", "connected": True}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("Post-connect setup failed")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
status_code=423,
|
||||
detail=f"Radio connected but setup failed: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
@@ -113,7 +113,7 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
||||
logger.debug("LPP sensor fetch failed for %s (non-fatal): %s", public_key[:12], e)
|
||||
|
||||
if status is None:
|
||||
raise HTTPException(status_code=504, detail="No status response from repeater")
|
||||
raise HTTPException(status_code=408, detail="No status response from repeater")
|
||||
|
||||
response = RepeaterStatusResponse(
|
||||
battery_volts=status.get("bat", 0) / 1000.0,
|
||||
@@ -133,6 +133,7 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
||||
flood_dups=status.get("flood_dups", 0),
|
||||
direct_dups=status.get("direct_dups", 0),
|
||||
full_events=status.get("full_evts", 0),
|
||||
recv_errors=status.get("recv_errors"),
|
||||
)
|
||||
|
||||
# Record to telemetry history as a JSON blob (best-effort)
|
||||
@@ -221,7 +222,7 @@ async def repeater_lpp_telemetry(public_key: str) -> RepeaterLppTelemetryRespons
|
||||
)
|
||||
|
||||
if telemetry is None:
|
||||
raise HTTPException(status_code=504, detail="No telemetry response from repeater")
|
||||
raise HTTPException(status_code=408, detail="No telemetry response from repeater")
|
||||
|
||||
sensors: list[LppSensor] = []
|
||||
for entry in telemetry:
|
||||
|
||||
@@ -58,7 +58,7 @@ async def room_status(public_key: str) -> RepeaterStatusResponse:
|
||||
status = await mc.commands.req_status_sync(contact.public_key, timeout=10, min_timeout=5)
|
||||
|
||||
if status is None:
|
||||
raise HTTPException(status_code=504, detail="No status response from room server")
|
||||
raise HTTPException(status_code=408, detail="No status response from room server")
|
||||
|
||||
return RepeaterStatusResponse(
|
||||
battery_volts=status.get("bat", 0) / 1000.0,
|
||||
@@ -78,6 +78,7 @@ async def room_status(public_key: str) -> RepeaterStatusResponse:
|
||||
flood_dups=status.get("flood_dups", 0),
|
||||
direct_dups=status.get("direct_dups", 0),
|
||||
full_events=status.get("full_evts", 0),
|
||||
recv_errors=status.get("recv_errors"),
|
||||
)
|
||||
|
||||
|
||||
@@ -97,7 +98,7 @@ async def room_lpp_telemetry(public_key: str) -> RepeaterLppTelemetryResponse:
|
||||
)
|
||||
|
||||
if telemetry is None:
|
||||
raise HTTPException(status_code=504, detail="No telemetry response from room server")
|
||||
raise HTTPException(status_code=408, detail="No telemetry response from room server")
|
||||
|
||||
sensors = [
|
||||
LppSensor(
|
||||
|
||||
@@ -291,7 +291,7 @@ async def send_contact_cli_command(
|
||||
|
||||
if send_result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to send command: {send_result.payload}"
|
||||
status_code=422, detail=f"Failed to send command: {send_result.payload}"
|
||||
)
|
||||
|
||||
response_event = await fetch_contact_cli_response(mc, contact.public_key[:12])
|
||||
|
||||
+44
-5
@@ -73,6 +73,13 @@ class AppSettingsUpdate(BaseModel):
|
||||
"based on the current tracked-repeater count."
|
||||
),
|
||||
)
|
||||
telemetry_routed_hourly: bool | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"When enabled, tracked repeaters with a direct or routed (non-flood) "
|
||||
"path are polled every hour instead of on the normal scheduled interval."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class BlockKeyRequest(BaseModel):
|
||||
@@ -126,7 +133,18 @@ class TelemetrySchedule(BaseModel):
|
||||
max_tracked: int = Field(description="Maximum number of repeaters that can be tracked")
|
||||
next_run_at: int | None = Field(
|
||||
default=None,
|
||||
description="Unix timestamp (UTC seconds) of the next scheduled cycle",
|
||||
description="Unix timestamp (UTC seconds) of the next scheduled flood cycle",
|
||||
)
|
||||
routed_hourly: bool = Field(
|
||||
default=False,
|
||||
description="Whether hourly routed/direct-path telemetry is enabled",
|
||||
)
|
||||
next_routed_run_at: int | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Unix timestamp (UTC seconds) of the next hourly routed/direct check, "
|
||||
"or None when routed_hourly is off or no repeaters are tracked"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -140,20 +158,27 @@ class TrackedTelemetryResponse(BaseModel):
|
||||
schedule: TelemetrySchedule = Field(description="Current scheduling state")
|
||||
|
||||
|
||||
def _build_schedule(tracked_count: int, preferred_hours: int | None) -> TelemetrySchedule:
|
||||
def _build_schedule(
|
||||
tracked_count: int,
|
||||
preferred_hours: int | None,
|
||||
routed_hourly: bool = False,
|
||||
) -> TelemetrySchedule:
|
||||
pref = (
|
||||
preferred_hours
|
||||
if preferred_hours in TELEMETRY_INTERVAL_OPTIONS_HOURS
|
||||
else DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||
)
|
||||
effective = clamp_telemetry_interval(pref, tracked_count)
|
||||
has_tracked = tracked_count > 0
|
||||
return TelemetrySchedule(
|
||||
preferred_hours=pref,
|
||||
effective_hours=effective,
|
||||
options=legal_interval_options(tracked_count),
|
||||
tracked_count=tracked_count,
|
||||
max_tracked=MAX_TRACKED_TELEMETRY_REPEATERS,
|
||||
next_run_at=next_run_timestamp_utc(effective) if tracked_count > 0 else None,
|
||||
next_run_at=next_run_timestamp_utc(effective) if has_tracked else None,
|
||||
routed_hourly=routed_hourly,
|
||||
next_routed_run_at=(next_run_timestamp_utc(1) if has_tracked and routed_hourly else None),
|
||||
)
|
||||
|
||||
|
||||
@@ -216,6 +241,11 @@ async def update_settings(update: AppSettingsUpdate) -> AppSettings:
|
||||
logger.info("Updating telemetry_interval_hours to %d", raw_interval)
|
||||
kwargs["telemetry_interval_hours"] = raw_interval
|
||||
|
||||
# Telemetry routed hourly
|
||||
if update.telemetry_routed_hourly is not None:
|
||||
logger.info("Updating telemetry_routed_hourly to %s", update.telemetry_routed_hourly)
|
||||
kwargs["telemetry_routed_hourly"] = update.telemetry_routed_hourly
|
||||
|
||||
# Flood scope
|
||||
flood_scope_changed = False
|
||||
if update.flood_scope is not None:
|
||||
@@ -328,7 +358,11 @@ async def toggle_tracked_telemetry(request: TrackedTelemetryRequest) -> TrackedT
|
||||
return TrackedTelemetryResponse(
|
||||
tracked_telemetry_repeaters=new_list,
|
||||
names=await _resolve_names(new_list),
|
||||
schedule=_build_schedule(len(new_list), settings.telemetry_interval_hours),
|
||||
schedule=_build_schedule(
|
||||
len(new_list),
|
||||
settings.telemetry_interval_hours,
|
||||
settings.telemetry_routed_hourly,
|
||||
),
|
||||
)
|
||||
|
||||
# Validate it's a repeater
|
||||
@@ -355,7 +389,11 @@ async def toggle_tracked_telemetry(request: TrackedTelemetryRequest) -> TrackedT
|
||||
return TrackedTelemetryResponse(
|
||||
tracked_telemetry_repeaters=new_list,
|
||||
names=await _resolve_names(new_list),
|
||||
schedule=_build_schedule(len(new_list), settings.telemetry_interval_hours),
|
||||
schedule=_build_schedule(
|
||||
len(new_list),
|
||||
settings.telemetry_interval_hours,
|
||||
settings.telemetry_routed_hourly,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -371,4 +409,5 @@ async def get_telemetry_schedule() -> TelemetrySchedule:
|
||||
return _build_schedule(
|
||||
len(app_settings.tracked_telemetry_repeaters),
|
||||
app_settings.telemetry_interval_hours,
|
||||
app_settings.telemetry_routed_hourly,
|
||||
)
|
||||
|
||||
@@ -159,7 +159,7 @@ async def send_channel_message_with_effective_scope(
|
||||
override_result.payload,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail=(
|
||||
f"Failed to apply regional override {override_scope!r} before {action_label}: "
|
||||
f"{override_result.payload}"
|
||||
@@ -189,7 +189,7 @@ async def send_channel_message_with_effective_scope(
|
||||
phm_result.payload,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail=(
|
||||
f"Failed to apply path hash mode override before {action_label}: "
|
||||
f"{phm_result.payload}"
|
||||
@@ -233,7 +233,7 @@ async def send_channel_message_with_effective_scope(
|
||||
set_result.payload,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail=f"Failed to configure channel on radio before {action_label}",
|
||||
)
|
||||
radio_manager.note_channel_slot_loaded(channel_key, channel_slot)
|
||||
@@ -256,8 +256,14 @@ async def send_channel_message_with_effective_scope(
|
||||
action_label,
|
||||
channel.name,
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
raise HTTPException(status_code=408, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
if send_result.type == EventType.ERROR:
|
||||
logger.error(
|
||||
"Radio returned error during %s for channel %s: %s",
|
||||
action_label,
|
||||
channel.name,
|
||||
send_result.payload,
|
||||
)
|
||||
radio_manager.invalidate_cached_channel_slot(channel_key)
|
||||
else:
|
||||
radio_manager.note_channel_slot_used(channel_key)
|
||||
@@ -592,10 +598,10 @@ async def send_direct_message_to_contact(
|
||||
"No response from radio after direct send to %s; send outcome is unknown",
|
||||
contact.public_key[:12],
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
raise HTTPException(status_code=408, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to send message: {result.payload}")
|
||||
raise HTTPException(status_code=422, detail=f"Failed to send message: {result.payload}")
|
||||
|
||||
message = await create_outgoing_direct_message(
|
||||
conversation_key=contact.public_key.lower(),
|
||||
@@ -607,7 +613,7 @@ async def send_direct_message_to_contact(
|
||||
)
|
||||
if message is None:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail="Failed to store outgoing message - unexpected duplicate",
|
||||
)
|
||||
finally:
|
||||
@@ -620,7 +626,7 @@ async def send_direct_message_to_contact(
|
||||
)
|
||||
|
||||
if sent_at is None or sender_timestamp is None or message is None or result is None:
|
||||
raise HTTPException(status_code=500, detail="Failed to store outgoing message")
|
||||
raise HTTPException(status_code=422, detail="Failed to store outgoing message")
|
||||
|
||||
await contact_repository.update_last_contacted(contact.public_key.lower(), sent_at)
|
||||
|
||||
@@ -785,7 +791,7 @@ async def send_channel_message_to_channel(
|
||||
)
|
||||
if outgoing_message is None:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail="Failed to store outgoing message - unexpected duplicate",
|
||||
)
|
||||
|
||||
@@ -807,11 +813,11 @@ async def send_channel_message_to_channel(
|
||||
"No response from radio after channel send to %s; send outcome is unknown",
|
||||
channel.name,
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
raise HTTPException(status_code=408, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to send message: {result.payload}"
|
||||
status_code=422, detail=f"Failed to send message: {result.payload}"
|
||||
)
|
||||
except Exception:
|
||||
if outgoing_message is not None:
|
||||
@@ -828,7 +834,7 @@ async def send_channel_message_to_channel(
|
||||
)
|
||||
|
||||
if sent_at is None or sender_timestamp is None or outgoing_message is None:
|
||||
raise HTTPException(status_code=500, detail="Failed to store outgoing message")
|
||||
raise HTTPException(status_code=422, detail="Failed to store outgoing message")
|
||||
|
||||
outgoing_message = await build_stored_outgoing_channel_message(
|
||||
message_id=outgoing_message.id,
|
||||
@@ -856,7 +862,7 @@ async def send_channel_message_to_channel(
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
pass # Never let watchdog setup failure break the send
|
||||
logger.error("Echo watchdog setup failed", exc_info=True)
|
||||
|
||||
return outgoing_message
|
||||
|
||||
@@ -922,7 +928,7 @@ async def resend_channel_message_record(
|
||||
)
|
||||
if new_message is None:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail="Failed to store resent message - unexpected duplicate",
|
||||
)
|
||||
|
||||
@@ -943,10 +949,10 @@ async def resend_channel_message_record(
|
||||
"No response from radio after channel resend to %s; send outcome is unknown",
|
||||
channel.name,
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
raise HTTPException(status_code=408, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
status_code=422,
|
||||
detail=f"Failed to resend message: {result.payload}",
|
||||
)
|
||||
except Exception:
|
||||
@@ -965,7 +971,7 @@ async def resend_channel_message_record(
|
||||
|
||||
if new_timestamp:
|
||||
if sent_at is None or new_message is None:
|
||||
raise HTTPException(status_code=500, detail="Failed to assign resend timestamp")
|
||||
raise HTTPException(status_code=422, detail="Failed to assign resend timestamp")
|
||||
|
||||
new_message = await build_stored_outgoing_channel_message(
|
||||
message_id=new_message.id,
|
||||
|
||||
@@ -52,12 +52,12 @@ class RadioRuntime:
|
||||
def require_connected(self):
|
||||
"""Return MeshCore when available, mirroring existing HTTP semantics."""
|
||||
if self.is_setup_in_progress:
|
||||
raise HTTPException(status_code=503, detail="Radio is initializing")
|
||||
raise HTTPException(status_code=423, detail="Radio is initializing")
|
||||
if not self.is_connected:
|
||||
raise HTTPException(status_code=503, detail="Radio not connected")
|
||||
raise HTTPException(status_code=423, detail="Radio not connected")
|
||||
mc = self.meshcore
|
||||
if mc is None:
|
||||
raise HTTPException(status_code=503, detail="Radio not connected")
|
||||
raise HTTPException(status_code=423, detail="Radio not connected")
|
||||
return mc
|
||||
|
||||
@asynccontextmanager
|
||||
|
||||
@@ -44,6 +44,7 @@ services:
|
||||
# MESHCORE_DISABLE_BOTS: "true"
|
||||
# MESHCORE_BASIC_AUTH_USERNAME: changeme
|
||||
# MESHCORE_BASIC_AUTH_PASSWORD: changeme
|
||||
# MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT: "false"
|
||||
|
||||
# Logging
|
||||
# MESHCORE_LOG_LEVEL: INFO
|
||||
|
||||
+3
-6
@@ -75,7 +75,6 @@ frontend/src/
|
||||
├── utils/
|
||||
│ ├── urlHash.ts # Hash parsing and encoding
|
||||
│ ├── conversationState.ts # State keys, in-memory + localStorage helpers
|
||||
│ ├── favorites.ts # LocalStorage migration for favorites
|
||||
│ ├── messageParser.ts # Message text → rendered segments
|
||||
│ ├── pathUtils.ts # Distance/validation helpers for paths + map
|
||||
│ ├── pubkey.ts # getContactDisplayName (12-char prefix fallback)
|
||||
@@ -132,6 +131,9 @@ frontend/src/
|
||||
│ ├── ServerLoginStatusBanner.tsx # Shared repeater/room login state banner
|
||||
│ ├── ChannelInfoPane.tsx # Channel detail sheet (stats, top senders)
|
||||
│ ├── ChannelFloodScopeOverrideModal.tsx # Per-channel flood-scope override editor
|
||||
│ ├── ChannelPathHashModeOverrideModal.tsx # Per-channel path hash mode override editor
|
||||
│ ├── BulkAddChannelResultModal.tsx # Results dialog for bulk channel creation
|
||||
│ ├── CommandPalette.tsx # Command palette overlay
|
||||
│ ├── DirectTraceIcon.tsx # Shared direct-trace glyph used in header/dashboard
|
||||
│ ├── NeighborsMiniMap.tsx # Leaflet mini-map for repeater neighbor locations
|
||||
│ ├── settings/
|
||||
@@ -178,7 +180,6 @@ frontend/src/
|
||||
├── prefetch.test.ts
|
||||
├── rawPacketDetailModal.test.tsx
|
||||
├── rawPacketFeedView.test.tsx
|
||||
├── radioPresets.test.ts
|
||||
├── rawPacketIdentity.test.ts
|
||||
├── repeaterDashboard.test.tsx
|
||||
├── repeaterFormatters.test.ts
|
||||
@@ -350,10 +351,6 @@ It falls back to a 12-char prefix when `name` is missing.
|
||||
|
||||
Distance/validation helpers used by path + map UI.
|
||||
|
||||
### `utils/favorites.ts`
|
||||
|
||||
LocalStorage migration helpers for favorites; canonical favorites are server-side.
|
||||
|
||||
## Types and Contracts (`types.ts`)
|
||||
|
||||
`AppSettings` currently includes:
|
||||
|
||||
Generated
+6
-6
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"version": "3.12.0",
|
||||
"version": "3.12.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"version": "3.12.0",
|
||||
"version": "3.12.3",
|
||||
"dependencies": {
|
||||
"@codemirror/lang-python": "^6.2.1",
|
||||
"@codemirror/theme-one-dark": "^6.1.3",
|
||||
@@ -53,7 +53,7 @@
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-plugin-react-hooks": "^5.1.0",
|
||||
"jsdom": "^25.0.0",
|
||||
"postcss": "^8.5.6",
|
||||
"postcss": "^8.5.10",
|
||||
"prettier": "^3.4.2",
|
||||
"tailwindcss": "^3.4.19",
|
||||
"typescript": "^5.6.3",
|
||||
@@ -5619,9 +5619,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.5.8",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz",
|
||||
"integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==",
|
||||
"version": "8.5.10",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.10.tgz",
|
||||
"integrity": "sha512-pMMHxBOZKFU6HgAZ4eyGnwXF/EvPGGqUr0MnZ5+99485wwW41kW91A4LOGxSHhgugZmSChL5AlElNdwlNgcnLQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"private": true,
|
||||
"version": "3.12.0",
|
||||
"version": "3.13.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -61,7 +61,7 @@
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-plugin-react-hooks": "^5.1.0",
|
||||
"jsdom": "^25.0.0",
|
||||
"postcss": "^8.5.6",
|
||||
"postcss": "^8.5.10",
|
||||
"prettier": "^3.4.2",
|
||||
"tailwindcss": "^3.4.19",
|
||||
"typescript": "^5.6.3",
|
||||
|
||||
+3
-2
@@ -96,6 +96,7 @@ export const api = {
|
||||
method: 'PATCH',
|
||||
body: JSON.stringify(config),
|
||||
}),
|
||||
getPrivateKey: () => fetchJson<{ private_key: string }>('/radio/private-key'),
|
||||
setPrivateKey: (privateKey: string) =>
|
||||
fetchJson<{ status: string }>('/radio/private-key', {
|
||||
method: 'PUT',
|
||||
@@ -157,10 +158,10 @@ export const api = {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ public_keys: publicKeys }),
|
||||
}),
|
||||
createContact: (publicKey: string, name?: string, tryHistorical?: boolean) =>
|
||||
createContact: (publicKey: string, name?: string, tryHistorical?: boolean, type?: number) =>
|
||||
fetchJson<Contact>('/contacts', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ public_key: publicKey, name, try_historical: tryHistorical }),
|
||||
body: JSON.stringify({ public_key: publicKey, name, type, try_historical: tryHistorical }),
|
||||
}),
|
||||
markContactRead: (publicKey: string) =>
|
||||
fetchJson<{ status: string; public_key: string }>(`/contacts/${publicKey}/mark-read`, {
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
import { lazy, Suspense, useCallback, useRef, type ComponentProps } from 'react';
|
||||
import {
|
||||
lazy,
|
||||
Suspense,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useRef,
|
||||
useState,
|
||||
type ComponentProps,
|
||||
} from 'react';
|
||||
import { useSwipeable } from 'react-swipeable';
|
||||
|
||||
import { StatusBar } from './StatusBar';
|
||||
@@ -140,6 +148,26 @@ export function AppShell({
|
||||
crackerMounted.current = true;
|
||||
}
|
||||
|
||||
// Position toasts below the conversation header when in chat, otherwise below the status bar
|
||||
const TOAST_TOP_PADDING = 10;
|
||||
const [toastTopOffset, setToastTopOffset] = useState<number | undefined>(undefined);
|
||||
const hasLocalLabel = !!localLabel.text;
|
||||
const activeType = conversationPaneProps.activeConversation?.type;
|
||||
const activeId = conversationPaneProps.activeConversation?.id;
|
||||
useEffect(() => {
|
||||
const measure = () => {
|
||||
const anchor =
|
||||
document.querySelector('[data-toast-anchor="conversation"]') ??
|
||||
document.querySelector('[data-toast-anchor="statusbar"]');
|
||||
setToastTopOffset(
|
||||
anchor ? anchor.getBoundingClientRect().top + TOAST_TOP_PADDING : undefined
|
||||
);
|
||||
};
|
||||
measure();
|
||||
window.addEventListener('resize', measure);
|
||||
return () => window.removeEventListener('resize', measure);
|
||||
}, [hasLocalLabel, activeType, activeId, showSettings]);
|
||||
|
||||
const settingsSidebarContent = (
|
||||
<nav
|
||||
className="sidebar w-60 h-full min-h-0 overflow-hidden bg-card border-r border-border flex flex-col"
|
||||
@@ -220,6 +248,7 @@ export function AppShell({
|
||||
onSettingsClick={onToggleSettingsView}
|
||||
onMenuClick={showSettings ? undefined : () => onSidebarOpenChange(true)}
|
||||
/>
|
||||
<div data-toast-anchor="statusbar" aria-hidden="true" />
|
||||
|
||||
<div className="flex flex-1 overflow-hidden">
|
||||
<div className="hidden md:block min-h-0 overflow-hidden">{activeSidebarContent}</div>
|
||||
@@ -344,7 +373,11 @@ export function AppShell({
|
||||
<SecurityWarningModal health={statusProps.health} />
|
||||
<ContactInfoPane {...contactInfoPaneProps} />
|
||||
<ChannelInfoPane {...channelInfoPaneProps} />
|
||||
<Toaster position="top-right" />
|
||||
<Toaster
|
||||
position="top-right"
|
||||
offset={toastTopOffset !== undefined ? { top: toastTopOffset } : undefined}
|
||||
mobileOffset={toastTopOffset !== undefined ? { top: toastTopOffset } : undefined}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -326,6 +326,7 @@ export function ConversationPane({
|
||||
{activeContactIsRoom && activeContact && (
|
||||
<RoomServerPanel contact={activeContact} onAuthenticatedChange={setRoomAuthenticated} />
|
||||
)}
|
||||
{showRoomChat && <div data-toast-anchor="conversation" aria-hidden="true" />}
|
||||
{showRoomChat && (
|
||||
<MessageList
|
||||
key={activeConversation.id}
|
||||
|
||||
@@ -4,14 +4,20 @@ import {
|
||||
useImperativeHandle,
|
||||
forwardRef,
|
||||
useRef,
|
||||
useEffect,
|
||||
useMemo,
|
||||
type ChangeEvent,
|
||||
type FormEvent,
|
||||
type KeyboardEvent,
|
||||
} from 'react';
|
||||
import { Input } from './ui/input';
|
||||
import { Button } from './ui/button';
|
||||
import { toast } from './ui/sonner';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
getTextReplaceEnabled,
|
||||
getTextReplaceMapJson,
|
||||
applyTextReplacements,
|
||||
} from '../utils/textReplace';
|
||||
|
||||
// MeshCore message size limits (empirically determined from LoRa packet constraints)
|
||||
// Direct delivery allows ~156 bytes; multi-hop requires buffer for path growth.
|
||||
@@ -53,19 +59,32 @@ export const MessageInput = forwardRef<MessageInputHandle, MessageInputProps>(fu
|
||||
) {
|
||||
const [text, setText] = useState('');
|
||||
const [sending, setSending] = useState(false);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
|
||||
/** Resize textarea to fit content, clamped between 1 row and ~6 rows. */
|
||||
const autoResize = useCallback(() => {
|
||||
const el = textareaRef.current;
|
||||
if (!el) return;
|
||||
el.style.height = 'auto';
|
||||
// Clamp: min 40px (≈1 row), max 160px (≈6 rows)
|
||||
el.style.height = `${Math.min(el.scrollHeight, 160)}px`;
|
||||
}, []);
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
appendText: (appendedText: string) => {
|
||||
setText((prev) => prev + appendedText);
|
||||
// Focus the input after appending
|
||||
inputRef.current?.focus();
|
||||
textareaRef.current?.focus();
|
||||
},
|
||||
focus: () => {
|
||||
inputRef.current?.focus();
|
||||
textareaRef.current?.focus();
|
||||
},
|
||||
}));
|
||||
|
||||
// Re-measure height whenever text changes (covers programmatic updates like appendText)
|
||||
useEffect(() => {
|
||||
autoResize();
|
||||
}, [text, autoResize]);
|
||||
|
||||
// Calculate character limits based on conversation type
|
||||
const limits = useMemo(() => {
|
||||
if (conversationType === 'contact') {
|
||||
@@ -133,18 +152,44 @@ export const MessageInput = forwardRef<MessageInputHandle, MessageInputProps>(fu
|
||||
} finally {
|
||||
setSending(false);
|
||||
}
|
||||
// Refocus after React re-enables the input
|
||||
setTimeout(() => inputRef.current?.focus(), 0);
|
||||
// Refocus after React re-enables the textarea
|
||||
setTimeout(() => textareaRef.current?.focus(), 0);
|
||||
},
|
||||
[text, sending, disabled, onSend]
|
||||
);
|
||||
|
||||
const handleChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
const input = e.target;
|
||||
const raw = input.value;
|
||||
// Skip replacement during IME / dead-key composition to avoid garbling interim input
|
||||
if (!e.nativeEvent || (e.nativeEvent as InputEvent).isComposing) {
|
||||
setText(raw);
|
||||
return;
|
||||
}
|
||||
if (getTextReplaceEnabled()) {
|
||||
const result = applyTextReplacements(
|
||||
raw,
|
||||
input.selectionStart ?? raw.length,
|
||||
getTextReplaceMapJson()
|
||||
);
|
||||
if (result) {
|
||||
setText(result.text);
|
||||
// Schedule cursor restore after React flushes the new value
|
||||
const pos = result.cursor;
|
||||
requestAnimationFrame(() => input.setSelectionRange(pos, pos));
|
||||
return;
|
||||
}
|
||||
}
|
||||
setText(raw);
|
||||
}, []);
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: KeyboardEvent<HTMLInputElement>) => {
|
||||
(e: KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
handleSubmit(e as unknown as FormEvent);
|
||||
}
|
||||
// Shift+Enter falls through naturally and inserts a newline
|
||||
},
|
||||
[handleSubmit]
|
||||
);
|
||||
@@ -162,22 +207,28 @@ export const MessageInput = forwardRef<MessageInputHandle, MessageInputProps>(fu
|
||||
onSubmit={handleSubmit}
|
||||
autoComplete="off"
|
||||
>
|
||||
<div className="flex gap-2">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
<div className="flex gap-2 items-end">
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
autoComplete="off"
|
||||
name="chat-message-input"
|
||||
aria-label={placeholder || 'Type a message'}
|
||||
data-lpignore="true"
|
||||
data-1p-ignore="true"
|
||||
data-bwignore="true"
|
||||
rows={1}
|
||||
value={text}
|
||||
onChange={(e) => setText(e.target.value)}
|
||||
onChange={handleChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder={placeholder || 'Type a message...'}
|
||||
disabled={disabled || sending}
|
||||
className="flex-1 min-w-0"
|
||||
className={cn(
|
||||
'flex-1 min-w-0 resize-none overflow-y-auto',
|
||||
'rounded-md border border-input bg-background px-3 py-2 text-base ring-offset-background',
|
||||
'placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
|
||||
'disabled:cursor-not-allowed disabled:opacity-50 md:text-sm'
|
||||
)}
|
||||
style={{ minHeight: '40px', maxHeight: '160px' }}
|
||||
/>
|
||||
<Button
|
||||
type="submit"
|
||||
|
||||
@@ -32,7 +32,12 @@ interface NewMessageModalProps {
|
||||
nonce: number;
|
||||
} | null;
|
||||
onClose: () => void;
|
||||
onCreateContact: (name: string, publicKey: string, tryHistorical: boolean) => Promise<void>;
|
||||
onCreateContact: (
|
||||
name: string,
|
||||
publicKey: string,
|
||||
tryHistorical: boolean,
|
||||
type?: number
|
||||
) => Promise<void>;
|
||||
onCreateChannel: (name: string, key: string, tryHistorical: boolean) => Promise<void>;
|
||||
onCreateHashtagChannel: (name: string, tryHistorical: boolean) => Promise<void>;
|
||||
onBulkAddHashtagChannels: (channelNames: string[], tryHistorical: boolean) => Promise<void>;
|
||||
@@ -91,6 +96,7 @@ export function NewMessageModal({
|
||||
}: NewMessageModalProps) {
|
||||
const [tab, setTab] = useState<Tab>('new-contact');
|
||||
const [name, setName] = useState('');
|
||||
const [contactType, setContactType] = useState(1);
|
||||
const [contactKey, setContactKey] = useState('');
|
||||
const [channelKey, setChannelKey] = useState('');
|
||||
const [bulkChannelText, setBulkChannelText] = useState('');
|
||||
@@ -103,6 +109,7 @@ export function NewMessageModal({
|
||||
|
||||
const resetForm = () => {
|
||||
setName('');
|
||||
setContactType(1);
|
||||
setContactKey('');
|
||||
setChannelKey('');
|
||||
setBulkChannelText('');
|
||||
@@ -161,7 +168,7 @@ export function NewMessageModal({
|
||||
setError('Name and public key are required');
|
||||
return;
|
||||
}
|
||||
await onCreateContact(name.trim(), contactKey.trim(), tryHistorical);
|
||||
await onCreateContact(name.trim(), contactKey.trim(), tryHistorical, contactType);
|
||||
} else if (tab === 'new-channel') {
|
||||
if (!name.trim() || !channelKey.trim()) {
|
||||
setError('Channel name and key are required');
|
||||
@@ -293,6 +300,19 @@ export function NewMessageModal({
|
||||
placeholder="64-character hex public key"
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="contact-type">Type</Label>
|
||||
<select
|
||||
id="contact-type"
|
||||
value={contactType}
|
||||
onChange={(e) => setContactType(Number(e.target.value))}
|
||||
className="block h-9 w-full rounded-md border border-input bg-background px-3 text-sm shadow-sm"
|
||||
>
|
||||
<option value={1}>Client</option>
|
||||
<option value={2}>Repeater</option>
|
||||
<option value={3}>Room Server</option>
|
||||
</select>
|
||||
</div>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="new-channel" className="mt-4 space-y-4">
|
||||
|
||||
@@ -300,6 +300,7 @@ export function RepeaterDashboard({
|
||||
/>
|
||||
)}
|
||||
</header>
|
||||
<div data-toast-anchor="conversation" aria-hidden="true" />
|
||||
|
||||
{/* Body */}
|
||||
<div className="flex-1 overflow-y-auto p-4">
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { RepeaterPane, NotFetched, LppSensorRow } from './repeaterPaneShared';
|
||||
import { useMemo } from 'react';
|
||||
import { RepeaterPane, NotFetched, LppSensorRow, formatLppLabel } from './repeaterPaneShared';
|
||||
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||
import type { RepeaterLppTelemetryResponse, PaneState } from '../../types';
|
||||
|
||||
@@ -14,6 +15,19 @@ export function LppTelemetryPane({
|
||||
disabled?: boolean;
|
||||
}) {
|
||||
const { distanceUnit } = useDistanceUnit();
|
||||
|
||||
// Build disambiguated labels matching the telemetry history chart names
|
||||
const labels = useMemo(() => {
|
||||
if (!data) return [];
|
||||
const counts = new Map<string, number>();
|
||||
return data.sensors.map((s) => {
|
||||
const base = `${s.type_name}_${s.channel}`;
|
||||
const n = (counts.get(base) ?? 0) + 1;
|
||||
counts.set(base, n);
|
||||
return formatLppLabel(s.type_name) + ` Ch${s.channel}` + (n > 1 ? ` (${n})` : '');
|
||||
});
|
||||
}, [data]);
|
||||
|
||||
return (
|
||||
<RepeaterPane title="LPP Sensors" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||
{!data ? (
|
||||
@@ -23,7 +37,7 @@ export function LppTelemetryPane({
|
||||
) : (
|
||||
<div className="space-y-0.5">
|
||||
{data.sensors.map((sensor, i) => (
|
||||
<LppSensorRow key={i} sensor={sensor} unitPref={distanceUnit} />
|
||||
<LppSensorRow key={i} sensor={sensor} unitPref={distanceUnit} label={labels[i]} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -17,7 +17,12 @@ import type { TelemetryHistoryEntry, TelemetryLppSensor, Contact } from '../../t
|
||||
|
||||
const MAX_TRACKED = 8;
|
||||
|
||||
type BuiltinMetric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
||||
type BuiltinMetric =
|
||||
| 'battery_volts'
|
||||
| 'noise_floor_dbm'
|
||||
| 'packets'
|
||||
| 'recv_errors'
|
||||
| 'uptime_seconds';
|
||||
|
||||
interface MetricConfig {
|
||||
label: string;
|
||||
@@ -29,6 +34,7 @@ const BUILTIN_METRIC_CONFIG: Record<BuiltinMetric, MetricConfig> = {
|
||||
battery_volts: { label: 'Voltage', unit: 'V', color: '#22c55e' },
|
||||
noise_floor_dbm: { label: 'Noise Floor', unit: 'dBm', color: '#8b5cf6' },
|
||||
packets: { label: 'Packets', unit: '', color: '#0ea5e9' },
|
||||
recv_errors: { label: 'RX Errors', unit: '', color: '#ef4444' },
|
||||
uptime_seconds: { label: 'Uptime', unit: 's', color: '#f59e0b' },
|
||||
};
|
||||
|
||||
@@ -37,9 +43,18 @@ const BUILTIN_METRICS: BuiltinMetric[] = Object.keys(BUILTIN_METRIC_CONFIG) as B
|
||||
// Stable color rotation for dynamic LPP sensors
|
||||
const LPP_COLORS = ['#ec4899', '#14b8a6', '#f97316', '#6366f1', '#84cc16', '#e11d48'];
|
||||
|
||||
/** Build a flat data key for an LPP sensor: lpp_{type_name}_ch{channel} */
|
||||
function lppKey(s: TelemetryLppSensor): string {
|
||||
return `lpp_${s.type_name}_ch${s.channel}`;
|
||||
/** Assign disambiguated flat keys to an array of LPP sensors.
|
||||
* First occurrence keeps the base key; duplicates of the same (type, channel) get _2, _3, etc. */
|
||||
function assignLppKeys(
|
||||
sensors: TelemetryLppSensor[]
|
||||
): { sensor: TelemetryLppSensor; key: string; occurrence: number }[] {
|
||||
const counts = new Map<string, number>();
|
||||
return sensors.map((s) => {
|
||||
const base = `lpp_${s.type_name}_ch${s.channel}`;
|
||||
const n = (counts.get(base) ?? 0) + 1;
|
||||
counts.set(base, n);
|
||||
return { sensor: s, key: n === 1 ? base : `${base}_${n}`, occurrence: n };
|
||||
});
|
||||
}
|
||||
|
||||
const TOOLTIP_STYLE = {
|
||||
@@ -93,11 +108,10 @@ export function TelemetryHistoryPane({
|
||||
|
||||
// Discover unique LPP sensors across all history entries
|
||||
const lppMetrics = useMemo(() => {
|
||||
const seen = new Map<string, { type_name: string; channel: number }>();
|
||||
const seen = new Map<string, { type_name: string; channel: number; occurrence: number }>();
|
||||
for (const e of entries) {
|
||||
for (const s of e.data.lpp_sensors ?? []) {
|
||||
const k = lppKey(s);
|
||||
if (!seen.has(k)) seen.set(k, { type_name: s.type_name, channel: s.channel });
|
||||
for (const { sensor: s, key: k, occurrence } of assignLppKeys(e.data.lpp_sensors ?? [])) {
|
||||
if (!seen.has(k)) seen.set(k, { type_name: s.type_name, channel: s.channel, occurrence });
|
||||
}
|
||||
}
|
||||
const result: { key: string; config: MetricConfig; type_name: string; channel: number }[] = [];
|
||||
@@ -106,7 +120,8 @@ export function TelemetryHistoryPane({
|
||||
const label =
|
||||
info.type_name.charAt(0).toUpperCase() +
|
||||
info.type_name.slice(1).replace(/_/g, ' ') +
|
||||
` Ch${info.channel}`;
|
||||
` Ch${info.channel}` +
|
||||
(info.occurrence > 1 ? ` (${info.occurrence})` : '');
|
||||
const { unit } = lppDisplayUnit(info.type_name, 0, distanceUnit);
|
||||
result.push({
|
||||
key: k,
|
||||
@@ -139,18 +154,25 @@ export function TelemetryHistoryPane({
|
||||
const chartData = useMemo(() => {
|
||||
return entries.map((e) => {
|
||||
const d = e.data;
|
||||
const recvErrors = d.recv_errors ?? undefined;
|
||||
const packetsReceived = d.packets_received;
|
||||
const point: Record<string, number | undefined> = {
|
||||
timestamp: e.timestamp,
|
||||
battery_volts: d.battery_volts,
|
||||
noise_floor_dbm: d.noise_floor_dbm,
|
||||
packets_received: d.packets_received,
|
||||
packets_received: packetsReceived,
|
||||
packets_sent: d.packets_sent,
|
||||
recv_errors: recvErrors,
|
||||
recv_error_pct:
|
||||
recvErrors != null && packetsReceived != null && packetsReceived + recvErrors > 0
|
||||
? +((recvErrors / (packetsReceived + recvErrors)) * 100).toFixed(2)
|
||||
: undefined,
|
||||
uptime_seconds: d.uptime_seconds,
|
||||
};
|
||||
// Flatten LPP sensors into the point, converting units as needed
|
||||
for (const s of d.lpp_sensors ?? []) {
|
||||
for (const { sensor: s, key } of assignLppKeys(d.lpp_sensors ?? [])) {
|
||||
if (typeof s.value === 'number') {
|
||||
point[lppKey(s)] = lppDisplayUnit(s.type_name, s.value, distanceUnit).value;
|
||||
point[key] = lppDisplayUnit(s.type_name, s.value, distanceUnit).value;
|
||||
}
|
||||
}
|
||||
return point;
|
||||
@@ -158,7 +180,11 @@ export function TelemetryHistoryPane({
|
||||
}, [entries, distanceUnit]);
|
||||
|
||||
const dataKeys =
|
||||
activeMetric === 'packets' ? ['packets_received', 'packets_sent'] : [activeMetric];
|
||||
activeMetric === 'packets'
|
||||
? ['packets_received', 'packets_sent']
|
||||
: activeMetric === 'recv_errors'
|
||||
? ['recv_errors', 'recv_error_pct']
|
||||
: [activeMetric];
|
||||
|
||||
const yDomain = useMemo<[number, number] | undefined>(() => {
|
||||
if (activeMetric !== 'battery_volts' || chartData.length === 0) return undefined;
|
||||
@@ -169,6 +195,20 @@ export function TelemetryHistoryPane({
|
||||
return [Math.min(3, Math.floor(lo) - 1), Math.max(5, Math.ceil(hi) + 1)];
|
||||
}, [activeMetric, chartData]);
|
||||
|
||||
const yDomainPct = useMemo<[number, number]>(() => {
|
||||
const MIN_SPAN = 5;
|
||||
const values = chartData.map((d) => d.recv_error_pct).filter((v) => v != null) as number[];
|
||||
if (values.length === 0) return [0, MIN_SPAN];
|
||||
const lo = Math.min(...values);
|
||||
const hi = Math.max(...values);
|
||||
const span = hi - lo;
|
||||
if (span >= MIN_SPAN)
|
||||
return [Math.max(0, Math.floor(lo - span * 0.1)), Math.ceil(hi + span * 0.1)];
|
||||
const pad = (MIN_SPAN - span) / 2;
|
||||
const bottom = Math.max(0, Math.floor(lo - pad));
|
||||
return [bottom, Math.ceil(bottom + MIN_SPAN)];
|
||||
}, [chartData]);
|
||||
|
||||
const handleToggle = async () => {
|
||||
setToggling(true);
|
||||
try {
|
||||
@@ -205,16 +245,16 @@ export function TelemetryHistoryPane({
|
||||
via the repeater pane, API calls to the endpoint (
|
||||
<code className="text-[0.6875rem]">POST /api/contacts/<key>/repeater/status</code>
|
||||
), or when the repeater is opted into interval telemetry polling, in which case the
|
||||
repeater will be polled for metrics every 8 hours. You can see which repeaters are opted
|
||||
into this flow in the{' '}
|
||||
repeater will be polled for metrics automatically. Fetch frequency can be configured in{' '}
|
||||
<a
|
||||
href="#settings/database"
|
||||
className="underline text-primary hover:text-primary/80 transition-colors"
|
||||
>
|
||||
Database & Messaging
|
||||
</a>{' '}
|
||||
settings pane. A maximum of {MAX_TRACKED} repeaters may be opted into this for the sake
|
||||
of keeping mesh congestion reasonable.
|
||||
Settings → Database & Messaging
|
||||
</a>
|
||||
, where you can also see which repeaters are currently opted in. A maximum of{' '}
|
||||
{MAX_TRACKED} repeaters may be opted into this for the sake of keeping mesh congestion
|
||||
reasonable.
|
||||
</p>
|
||||
|
||||
{isTracked ? (
|
||||
@@ -243,7 +283,7 @@ export function TelemetryHistoryPane({
|
||||
disabled={toggling}
|
||||
className="border-green-600/50 text-green-600 hover:bg-green-600/10"
|
||||
>
|
||||
{toggling ? 'Updating...' : 'Opt Repeater into 8hr Interval Metrics Tracking'}
|
||||
{toggling ? 'Updating...' : 'Opt Repeater into Interval Metrics Tracking'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
@@ -290,7 +330,15 @@ export function TelemetryHistoryPane({
|
||||
</p>
|
||||
) : (
|
||||
<ResponsiveContainer width="100%" height={180}>
|
||||
<AreaChart data={chartData} margin={{ top: 4, right: 4, bottom: 0, left: -8 }}>
|
||||
<AreaChart
|
||||
data={chartData}
|
||||
margin={{
|
||||
top: 4,
|
||||
right: activeMetric === 'recv_errors' ? 8 : 4,
|
||||
bottom: 0,
|
||||
left: -8,
|
||||
}}
|
||||
>
|
||||
<CartesianGrid strokeDasharray="3 3" stroke="hsl(var(--border))" vertical={false} />
|
||||
<XAxis
|
||||
dataKey="timestamp"
|
||||
@@ -302,6 +350,7 @@ export function TelemetryHistoryPane({
|
||||
tickFormatter={formatTime}
|
||||
/>
|
||||
<YAxis
|
||||
yAxisId="left"
|
||||
domain={yDomain}
|
||||
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
||||
tickLine={false}
|
||||
@@ -310,6 +359,17 @@ export function TelemetryHistoryPane({
|
||||
activeMetric === 'uptime_seconds' ? formatUptime(v) : `${v}`
|
||||
}
|
||||
/>
|
||||
{activeMetric === 'recv_errors' && (
|
||||
<YAxis
|
||||
yAxisId="right"
|
||||
orientation="right"
|
||||
domain={yDomainPct}
|
||||
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
||||
tickLine={false}
|
||||
axisLine={false}
|
||||
tickFormatter={(v) => `${v}%`}
|
||||
/>
|
||||
)}
|
||||
<RechartsTooltip
|
||||
{...TOOLTIP_STYLE}
|
||||
cursor={{
|
||||
@@ -321,6 +381,10 @@ export function TelemetryHistoryPane({
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
formatter={(value: any, name: any) => {
|
||||
const numVal = typeof value === 'number' ? value : Number(value);
|
||||
if (activeMetric === 'recv_errors') {
|
||||
if (name === 'recv_error_pct') return [`${numVal}%`, 'Error Rate'];
|
||||
return [`${value}`, 'RX Errors'];
|
||||
}
|
||||
const display =
|
||||
activeMetric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
||||
const suffix =
|
||||
@@ -338,51 +402,44 @@ export function TelemetryHistoryPane({
|
||||
return [`${display}${suffix}`, label];
|
||||
}}
|
||||
/>
|
||||
{dataKeys.map((key, i) => (
|
||||
<Area
|
||||
key={key}
|
||||
type="linear"
|
||||
dataKey={key}
|
||||
stroke={
|
||||
activeMetric === 'packets'
|
||||
{dataKeys.map((key, i) => {
|
||||
const color =
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeMetric === 'recv_errors'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color
|
||||
}
|
||||
fill={
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color
|
||||
}
|
||||
fillOpacity={0.15}
|
||||
strokeWidth={1.5}
|
||||
dot={{
|
||||
r: 4,
|
||||
fill:
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color,
|
||||
strokeWidth: 1.5,
|
||||
stroke: 'hsl(var(--popover))',
|
||||
}}
|
||||
activeDot={{
|
||||
r: 6,
|
||||
fill:
|
||||
activeMetric === 'packets'
|
||||
? i === 0
|
||||
? '#0ea5e9'
|
||||
: '#f43f5e'
|
||||
: activeConfig.color,
|
||||
strokeWidth: 2,
|
||||
stroke: 'hsl(var(--popover))',
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
? '#ef4444'
|
||||
: '#f59e0b'
|
||||
: activeConfig.color;
|
||||
return (
|
||||
<Area
|
||||
key={key}
|
||||
type="linear"
|
||||
dataKey={key}
|
||||
yAxisId={
|
||||
activeMetric === 'recv_errors' && key === 'recv_error_pct' ? 'right' : 'left'
|
||||
}
|
||||
stroke={color}
|
||||
fill={color}
|
||||
fillOpacity={0.15}
|
||||
strokeWidth={1.5}
|
||||
dot={{
|
||||
r: 4,
|
||||
fill: color,
|
||||
strokeWidth: 1.5,
|
||||
stroke: 'hsl(var(--popover))',
|
||||
}}
|
||||
activeDot={{
|
||||
r: 6,
|
||||
fill: color,
|
||||
strokeWidth: 2,
|
||||
stroke: 'hsl(var(--popover))',
|
||||
}}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</AreaChart>
|
||||
</ResponsiveContainer>
|
||||
)}
|
||||
|
||||
@@ -91,6 +91,26 @@ export function TelemetryPane({
|
||||
label="Duplicates"
|
||||
value={`${data.flood_dups.toLocaleString()} flood / ${data.direct_dups.toLocaleString()} direct`}
|
||||
/>
|
||||
{data.recv_errors != null && (
|
||||
<KvRow
|
||||
label="RX Errors"
|
||||
value={
|
||||
<>
|
||||
{data.recv_errors.toLocaleString()}
|
||||
{data.packets_received > 0 && (
|
||||
<Secondary>
|
||||
(
|
||||
{(
|
||||
(data.recv_errors / (data.packets_received + data.recv_errors)) *
|
||||
100
|
||||
).toFixed(2)}
|
||||
%)
|
||||
</Secondary>
|
||||
)}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<Separator className="my-1" />
|
||||
<KvRow label="TX Queue" value={data.tx_queue_len} />
|
||||
<KvRow label="Debug Flags" value={data.full_events} />
|
||||
|
||||
@@ -242,8 +242,16 @@ export function formatLppLabel(typeName: string): string {
|
||||
return typeName.charAt(0).toUpperCase() + typeName.slice(1).replace(/_/g, ' ');
|
||||
}
|
||||
|
||||
export function LppSensorRow({ sensor, unitPref }: { sensor: LppSensor; unitPref?: string }) {
|
||||
const label = formatLppLabel(sensor.type_name);
|
||||
export function LppSensorRow({
|
||||
sensor,
|
||||
unitPref,
|
||||
label: labelOverride,
|
||||
}: {
|
||||
sensor: LppSensor;
|
||||
unitPref?: string;
|
||||
label?: string;
|
||||
}) {
|
||||
const label = labelOverride ?? formatLppLabel(sensor.type_name);
|
||||
|
||||
if (typeof sensor.value === 'object' && sensor.value !== null) {
|
||||
// Multi-value sensor (GPS, accelerometer, etc.)
|
||||
|
||||
@@ -15,6 +15,9 @@ const CONTACT_TYPE_LABELS: Record<number, string> = {
|
||||
4: 'Sensor',
|
||||
};
|
||||
|
||||
type SortField = 'name' | 'type' | 'key' | 'first_seen' | 'last_seen';
|
||||
type SortDir = 'asc' | 'desc';
|
||||
|
||||
function formatDate(ts: number): string {
|
||||
return new Date(ts * 1000).toLocaleDateString([], {
|
||||
year: 'numeric',
|
||||
@@ -32,6 +35,32 @@ function datetimeToUnix(datetimeStr: string): number {
|
||||
return Math.floor(d.getTime() / 1000);
|
||||
}
|
||||
|
||||
function SortableHeader({
|
||||
label,
|
||||
field,
|
||||
sortField,
|
||||
sortDir,
|
||||
onSort,
|
||||
className,
|
||||
}: {
|
||||
label: string;
|
||||
field: SortField;
|
||||
sortField: SortField;
|
||||
sortDir: SortDir;
|
||||
onSort: (field: SortField) => void;
|
||||
className?: string;
|
||||
}) {
|
||||
const active = sortField === field;
|
||||
return (
|
||||
<th
|
||||
className={`px-3 py-1.5 cursor-pointer select-none hover:text-foreground transition-colors ${className ?? ''}`}
|
||||
onClick={() => onSort(field)}
|
||||
>
|
||||
{label} {active ? (sortDir === 'asc' ? '▲' : '▼') : ''}
|
||||
</th>
|
||||
);
|
||||
}
|
||||
|
||||
interface BulkDeleteContactsModalProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
@@ -49,22 +78,42 @@ export function BulkDeleteContactsModal({
|
||||
const [selectedKeys, setSelectedKeys] = useState<Set<string>>(new Set());
|
||||
const [startDate, setStartDate] = useState('');
|
||||
const [endDate, setEndDate] = useState('');
|
||||
const [lastHeardAfter, setLastHeardAfter] = useState('');
|
||||
const [lastHeardBefore, setLastHeardBefore] = useState('');
|
||||
const [typeFilter, setTypeFilter] = useState<number | 'all'>('all');
|
||||
const [sortField, setSortField] = useState<SortField>('first_seen');
|
||||
const [sortDir, setSortDir] = useState<SortDir>('desc');
|
||||
const [deleting, setDeleting] = useState(false);
|
||||
const lastClickedKeyRef = useRef<string | null>(null);
|
||||
|
||||
const handleSort = useCallback(
|
||||
(field: SortField) => {
|
||||
if (sortField === field) {
|
||||
setSortDir((d) => (d === 'asc' ? 'desc' : 'asc'));
|
||||
} else {
|
||||
setSortField(field);
|
||||
setSortDir(field === 'name' || field === 'key' ? 'asc' : 'desc');
|
||||
}
|
||||
},
|
||||
[sortField]
|
||||
);
|
||||
|
||||
const resetAndClose = useCallback(() => {
|
||||
setStep('select');
|
||||
setSelectedKeys(new Set());
|
||||
setStartDate('');
|
||||
setEndDate('');
|
||||
setLastHeardAfter('');
|
||||
setLastHeardBefore('');
|
||||
setTypeFilter('all');
|
||||
setSortField('first_seen');
|
||||
setSortDir('desc');
|
||||
lastClickedKeyRef.current = null;
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
const filteredContacts = useMemo(() => {
|
||||
let list = [...contacts].sort((a, b) => (b.first_seen ?? 0) - (a.first_seen ?? 0));
|
||||
let list = [...contacts];
|
||||
if (typeFilter !== 'all') {
|
||||
list = list.filter((c) => c.type === typeFilter);
|
||||
}
|
||||
@@ -76,8 +125,44 @@ export function BulkDeleteContactsModal({
|
||||
const end = datetimeToUnix(endDate);
|
||||
list = list.filter((c) => (c.first_seen ?? 0) <= end);
|
||||
}
|
||||
if (lastHeardAfter) {
|
||||
const after = datetimeToUnix(lastHeardAfter);
|
||||
list = list.filter((c) => (c.last_seen ?? 0) >= after);
|
||||
}
|
||||
if (lastHeardBefore) {
|
||||
const before = datetimeToUnix(lastHeardBefore);
|
||||
list = list.filter((c) => (c.last_seen ?? 0) <= before);
|
||||
}
|
||||
|
||||
const dir = sortDir === 'asc' ? 1 : -1;
|
||||
list.sort((a, b) => {
|
||||
switch (sortField) {
|
||||
case 'name': {
|
||||
const an = getContactDisplayName(a.name, a.public_key, a.last_advert).toLowerCase();
|
||||
const bn = getContactDisplayName(b.name, b.public_key, b.last_advert).toLowerCase();
|
||||
return an < bn ? -dir : an > bn ? dir : 0;
|
||||
}
|
||||
case 'type':
|
||||
return (a.type - b.type) * dir;
|
||||
case 'key':
|
||||
return a.public_key < b.public_key ? -dir : a.public_key > b.public_key ? dir : 0;
|
||||
case 'first_seen':
|
||||
return ((a.first_seen ?? 0) - (b.first_seen ?? 0)) * dir;
|
||||
case 'last_seen':
|
||||
return ((a.last_seen ?? 0) - (b.last_seen ?? 0)) * dir;
|
||||
}
|
||||
});
|
||||
return list;
|
||||
}, [contacts, typeFilter, startDate, endDate]);
|
||||
}, [
|
||||
contacts,
|
||||
typeFilter,
|
||||
startDate,
|
||||
endDate,
|
||||
lastHeardAfter,
|
||||
lastHeardBefore,
|
||||
sortField,
|
||||
sortDir,
|
||||
]);
|
||||
|
||||
const handleToggle = (key: string, shiftKey: boolean) => {
|
||||
if (shiftKey && lastClickedKeyRef.current && lastClickedKeyRef.current !== key) {
|
||||
@@ -148,6 +233,8 @@ export function BulkDeleteContactsModal({
|
||||
}
|
||||
};
|
||||
|
||||
const hasFilters = startDate || endDate || lastHeardAfter || lastHeardBefore;
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={(isOpen) => !isOpen && resetAndClose()}>
|
||||
<DialogContent className="sm:max-w-2xl max-h-[85dvh] flex flex-col">
|
||||
@@ -164,40 +251,64 @@ export function BulkDeleteContactsModal({
|
||||
|
||||
{step === 'select' && (
|
||||
<>
|
||||
<div className="flex flex-wrap items-end gap-3">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Show</label>
|
||||
<select
|
||||
value={typeFilter === 'all' ? 'all' : String(typeFilter)}
|
||||
onChange={(e) =>
|
||||
setTypeFilter(e.target.value === 'all' ? 'all' : Number(e.target.value))
|
||||
}
|
||||
className="block h-8 rounded-md border border-input bg-background px-2 text-sm"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="1">Clients</option>
|
||||
<option value="2">Repeaters</option>
|
||||
<option value="3">Room Servers</option>
|
||||
<option value="4">Sensors</option>
|
||||
</select>
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex flex-wrap items-end gap-3">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Show</label>
|
||||
<select
|
||||
value={typeFilter === 'all' ? 'all' : String(typeFilter)}
|
||||
onChange={(e) =>
|
||||
setTypeFilter(e.target.value === 'all' ? 'all' : Number(e.target.value))
|
||||
}
|
||||
className="block h-8 rounded-md border border-input bg-background px-2 text-sm"
|
||||
>
|
||||
<option value="all">All</option>
|
||||
<option value="1">Clients</option>
|
||||
<option value="2">Repeaters</option>
|
||||
<option value="3">Room Servers</option>
|
||||
<option value="4">Sensors</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Created after</label>
|
||||
<Input
|
||||
type="datetime-local"
|
||||
value={startDate}
|
||||
onChange={(e) => setStartDate(e.target.value)}
|
||||
className="w-48 h-8 text-sm"
|
||||
/>
|
||||
<div className="flex flex-wrap items-end gap-3">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Created after</label>
|
||||
<Input
|
||||
type="datetime-local"
|
||||
value={startDate}
|
||||
onChange={(e) => setStartDate(e.target.value)}
|
||||
className="w-48 h-8 text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Created before</label>
|
||||
<Input
|
||||
type="datetime-local"
|
||||
value={endDate}
|
||||
onChange={(e) => setEndDate(e.target.value)}
|
||||
className="w-48 h-8 text-sm"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Created before</label>
|
||||
<Input
|
||||
type="datetime-local"
|
||||
value={endDate}
|
||||
onChange={(e) => setEndDate(e.target.value)}
|
||||
className="w-48 h-8 text-sm"
|
||||
/>
|
||||
<div className="flex flex-wrap items-end gap-3">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Last heard after</label>
|
||||
<Input
|
||||
type="datetime-local"
|
||||
value={lastHeardAfter}
|
||||
onChange={(e) => setLastHeardAfter(e.target.value)}
|
||||
className="w-48 h-8 text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs text-muted-foreground">Last heard before</label>
|
||||
<Input
|
||||
type="datetime-local"
|
||||
value={lastHeardBefore}
|
||||
onChange={(e) => setLastHeardBefore(e.target.value)}
|
||||
className="w-48 h-8 text-sm"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-1.5">
|
||||
<Button type="button" variant="outline" size="sm" onClick={handleSelectAll}>
|
||||
@@ -211,7 +322,7 @@ export function BulkDeleteContactsModal({
|
||||
|
||||
<div className="text-xs text-muted-foreground">
|
||||
{filteredContacts.length} contact{filteredContacts.length === 1 ? '' : 's'} shown
|
||||
{(startDate || endDate) && ' (filtered)'}
|
||||
{hasFilters && ' (filtered)'}
|
||||
{' · '}
|
||||
{selectedKeys.size} selected
|
||||
</div>
|
||||
@@ -219,17 +330,51 @@ export function BulkDeleteContactsModal({
|
||||
<div className="flex-1 overflow-y-auto min-h-0 border border-border rounded-md">
|
||||
{filteredContacts.length === 0 ? (
|
||||
<div className="p-4 text-center text-sm text-muted-foreground">
|
||||
No contacts match the selected date range.
|
||||
No contacts match the selected filters.
|
||||
</div>
|
||||
) : (
|
||||
<table className="w-full text-sm">
|
||||
<thead className="sticky top-0 bg-muted/90 backdrop-blur-sm">
|
||||
<tr className="text-left text-xs text-muted-foreground">
|
||||
<th className="px-3 py-1.5 w-8" />
|
||||
<th className="px-3 py-1.5">Name</th>
|
||||
<th className="px-3 py-1.5 hidden sm:table-cell">Type</th>
|
||||
<th className="px-3 py-1.5">Key</th>
|
||||
<th className="px-3 py-1.5 hidden sm:table-cell">Created</th>
|
||||
<SortableHeader
|
||||
label="Name"
|
||||
field="name"
|
||||
sortField={sortField}
|
||||
sortDir={sortDir}
|
||||
onSort={handleSort}
|
||||
/>
|
||||
<SortableHeader
|
||||
label="Type"
|
||||
field="type"
|
||||
sortField={sortField}
|
||||
sortDir={sortDir}
|
||||
onSort={handleSort}
|
||||
className="hidden sm:table-cell"
|
||||
/>
|
||||
<SortableHeader
|
||||
label="Key"
|
||||
field="key"
|
||||
sortField={sortField}
|
||||
sortDir={sortDir}
|
||||
onSort={handleSort}
|
||||
/>
|
||||
<SortableHeader
|
||||
label="Created"
|
||||
field="first_seen"
|
||||
sortField={sortField}
|
||||
sortDir={sortDir}
|
||||
onSort={handleSort}
|
||||
className="hidden sm:table-cell"
|
||||
/>
|
||||
<SortableHeader
|
||||
label="Last heard"
|
||||
field="last_seen"
|
||||
sortField={sortField}
|
||||
sortDir={sortDir}
|
||||
onSort={handleSort}
|
||||
className="hidden sm:table-cell"
|
||||
/>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@@ -265,6 +410,9 @@ export function BulkDeleteContactsModal({
|
||||
<td className="px-3 py-1.5 hidden sm:table-cell text-xs text-muted-foreground">
|
||||
{c.first_seen ? formatDate(c.first_seen) : '—'}
|
||||
</td>
|
||||
<td className="px-3 py-1.5 hidden sm:table-cell text-xs text-muted-foreground">
|
||||
{c.last_seen ? formatDate(c.last_seen) : '—'}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
@@ -298,6 +446,7 @@ export function BulkDeleteContactsModal({
|
||||
<th className="px-3 py-1.5">Type</th>
|
||||
<th className="px-3 py-1.5">Key</th>
|
||||
<th className="px-3 py-1.5 hidden sm:table-cell">Created</th>
|
||||
<th className="px-3 py-1.5 hidden sm:table-cell">Last heard</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@@ -315,6 +464,9 @@ export function BulkDeleteContactsModal({
|
||||
<td className="px-3 py-1.5 hidden sm:table-cell text-xs text-muted-foreground">
|
||||
{c.first_seen ? formatDate(c.first_seen) : '—'}
|
||||
</td>
|
||||
<td className="px-3 py-1.5 hidden sm:table-cell text-xs text-muted-foreground">
|
||||
{c.last_seen ? formatDate(c.last_seen) : '—'}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
|
||||
@@ -92,7 +92,11 @@ export function SettingsDatabaseSection({
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [trackedTelemetryRepeaters.length, appSettings.telemetry_interval_hours]);
|
||||
}, [
|
||||
trackedTelemetryRepeaters.length,
|
||||
appSettings.telemetry_interval_hours,
|
||||
appSettings.telemetry_routed_hourly,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (trackedTelemetryRepeaters.length === 0 || telemetryFetchedRef.current) return;
|
||||
@@ -346,13 +350,41 @@ export function SettingsDatabaseSection({
|
||||
restored if you drop back to a supported count.
|
||||
</p>
|
||||
)}
|
||||
{schedule?.next_run_at != null && (
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Next run at {formatTime(schedule.next_run_at)} (UTC top of hour).
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Routed hourly toggle */}
|
||||
<label className="flex items-start gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={appSettings.telemetry_routed_hourly}
|
||||
onChange={() => {
|
||||
const next = !appSettings.telemetry_routed_hourly;
|
||||
void persistAppSettings({ telemetry_routed_hourly: next }, () => {});
|
||||
}}
|
||||
className="w-4 h-4 rounded border-input accent-primary mt-0.5"
|
||||
/>
|
||||
<div>
|
||||
<span className="text-sm">Poll direct/routed-path repeaters hourly</span>
|
||||
<p className="text-[0.8125rem] text-muted-foreground">
|
||||
When enabled, tracked repeaters with a direct or routed path (not flood) are polled
|
||||
every hour instead of on the scheduled interval above. Flood-only repeaters still
|
||||
follow the normal schedule.
|
||||
</p>
|
||||
</div>
|
||||
</label>
|
||||
|
||||
{schedule?.next_run_at != null && (
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{schedule.routed_hourly ? 'Next flood run at' : 'Next run at'}{' '}
|
||||
{formatTime(schedule.next_run_at)} (UTC top of hour).
|
||||
</p>
|
||||
)}
|
||||
{schedule?.next_routed_run_at != null && (
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Next direct/routed run at {formatTime(schedule.next_routed_run_at)} (UTC top of hour).
|
||||
</p>
|
||||
)}
|
||||
|
||||
{trackedTelemetryRepeaters.length === 0 ? (
|
||||
<p className="text-sm text-muted-foreground italic">
|
||||
No repeaters are being tracked. Enable tracking from a repeater's dashboard.
|
||||
@@ -362,6 +394,21 @@ export function SettingsDatabaseSection({
|
||||
{trackedTelemetryRepeaters.map((key) => {
|
||||
const contact = contacts.find((c) => c.public_key === key);
|
||||
const displayName = contact?.name ?? key.slice(0, 12);
|
||||
const routeSource = contact?.effective_route_source ?? 'flood';
|
||||
// A forced-flood override (path_len < 0) still reports source
|
||||
// "override", but the actual route is flood. Check the real path.
|
||||
const hasRealPath =
|
||||
contact?.effective_route != null && contact.effective_route.path_len >= 0;
|
||||
const routeLabel = !hasRealPath
|
||||
? 'flood'
|
||||
: routeSource === 'override'
|
||||
? 'routed'
|
||||
: routeSource === 'direct'
|
||||
? 'direct'
|
||||
: 'flood';
|
||||
const routeColor = hasRealPath
|
||||
? 'text-primary bg-primary/10'
|
||||
: 'text-muted-foreground bg-muted';
|
||||
const snap = latestTelemetry[key];
|
||||
const d = snap?.data;
|
||||
return (
|
||||
@@ -369,9 +416,16 @@ export function SettingsDatabaseSection({
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
<div className="flex-1 min-w-0">
|
||||
<span className="text-sm truncate block">{displayName}</span>
|
||||
<span className="text-[0.625rem] text-muted-foreground font-mono">
|
||||
{key.slice(0, 12)}
|
||||
</span>
|
||||
<div className="flex items-center gap-1.5">
|
||||
<span className="text-[0.625rem] text-muted-foreground font-mono">
|
||||
{key.slice(0, 12)}
|
||||
</span>
|
||||
<span
|
||||
className={`text-[0.625rem] uppercase tracking-wider px-1.5 py-0.5 rounded font-medium ${routeColor}`}
|
||||
>
|
||||
{routeLabel}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
{onToggleTrackedTelemetry && (
|
||||
<Button
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
import { useState, useEffect, useCallback, useMemo, useRef, lazy, Suspense } from 'react';
|
||||
import {
|
||||
useState,
|
||||
useEffect,
|
||||
useCallback,
|
||||
useMemo,
|
||||
useRef,
|
||||
lazy,
|
||||
Suspense,
|
||||
type ReactNode,
|
||||
} from 'react';
|
||||
import { ChevronDown, Info } from 'lucide-react';
|
||||
import { Input } from '../ui/input';
|
||||
import { Label } from '../ui/label';
|
||||
@@ -278,7 +287,10 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
||||
config: {
|
||||
urls: '',
|
||||
preserve_identity: true,
|
||||
include_path: true,
|
||||
markdown_format: true,
|
||||
body_format_dm: '**DM:** {sender_name}: {text} **via:** [{hops_backticked}]',
|
||||
body_format_channel:
|
||||
'**{channel_name}:** {sender_name}: {text} **via:** [{hops_backticked}]',
|
||||
},
|
||||
scope: { messages: 'all', raw_packets: 'none' },
|
||||
},
|
||||
@@ -2376,6 +2388,116 @@ function ScopeSelector({
|
||||
);
|
||||
}
|
||||
|
||||
const APPRISE_DEFAULT_DM = '**DM:** {sender_name}: {text} **via:** [{hops_backticked}]';
|
||||
const APPRISE_DEFAULT_CHANNEL =
|
||||
'**{channel_name}:** {sender_name}: {text} **via:** [{hops_backticked}]';
|
||||
const APPRISE_DEFAULT_DM_PLAIN = 'DM: {sender_name}: {text} via: [{hops}]';
|
||||
const APPRISE_DEFAULT_CHANNEL_PLAIN = '{channel_name}: {sender_name}: {text} via: [{hops}]';
|
||||
|
||||
const APPRISE_SAMPLE_VARS: Record<string, string> = {
|
||||
type: 'CHAN',
|
||||
text: 'hello world',
|
||||
sender_name: 'Alice',
|
||||
sender_key: 'a1b2c3d4e5f6',
|
||||
channel_name: '#general',
|
||||
conversation_key: 'abcdef1234567890',
|
||||
hops: '2a, 3b',
|
||||
hops_backticked: '`2a`, `3b`',
|
||||
hop_count: '2',
|
||||
rssi: '-95',
|
||||
snr: '6.5',
|
||||
};
|
||||
|
||||
const APPRISE_SAMPLE_VARS_DM: Record<string, string> = {
|
||||
...APPRISE_SAMPLE_VARS,
|
||||
type: 'PRIV',
|
||||
channel_name: '',
|
||||
conversation_key: 'a1b2c3d4e5f6',
|
||||
};
|
||||
|
||||
function appriseApplyFormat(fmt: string, vars: Record<string, string>): string {
|
||||
let result = fmt;
|
||||
for (const [key, value] of Object.entries(vars)) {
|
||||
result = result.split(`{${key}}`).join(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Render a markdown-ish string into inline React elements (bold, italic, code). */
|
||||
function appriseRenderMarkdown(s: string): ReactNode[] {
|
||||
const nodes: ReactNode[] = [];
|
||||
let key = 0;
|
||||
// Split on **bold**, __bold__, *italic*, _italic_, and `code` spans.
|
||||
// Longer delimiters first so ** and __ match before * and _.
|
||||
const parts = s.split(/(\*\*[^*]+\*\*|__[^_]+__|`[^`]+`|\*[^*]+\*|_[^_]+_)/g);
|
||||
for (const part of parts) {
|
||||
if (
|
||||
(part.startsWith('**') && part.endsWith('**')) ||
|
||||
(part.startsWith('__') && part.endsWith('__'))
|
||||
) {
|
||||
nodes.push(
|
||||
<strong key={key++} className="font-bold">
|
||||
{part.slice(2, -2)}
|
||||
</strong>
|
||||
);
|
||||
} else if (
|
||||
(part.startsWith('*') && part.endsWith('*')) ||
|
||||
(part.startsWith('_') && part.endsWith('_'))
|
||||
) {
|
||||
nodes.push(
|
||||
<em key={key++} className="italic">
|
||||
{part.slice(1, -1)}
|
||||
</em>
|
||||
);
|
||||
} else if (part.startsWith('`') && part.endsWith('`')) {
|
||||
nodes.push(
|
||||
<code key={key++} className="rounded bg-muted px-1 py-0.5 text-[0.6875rem] font-mono">
|
||||
{part.slice(1, -1)}
|
||||
</code>
|
||||
);
|
||||
} else if (part) {
|
||||
nodes.push(<span key={key++}>{part}</span>);
|
||||
}
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
function AppriseFormatPreview({
|
||||
format,
|
||||
vars,
|
||||
markdown = true,
|
||||
}: {
|
||||
format: string;
|
||||
vars: Record<string, string>;
|
||||
markdown?: boolean;
|
||||
}) {
|
||||
const raw = appriseApplyFormat(format, vars);
|
||||
return (
|
||||
<div className="rounded-md border border-border bg-muted/30 p-2 space-y-1.5">
|
||||
{markdown && (
|
||||
<div>
|
||||
<span className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||
Rendered (Discord, Slack, Telegram)
|
||||
</span>
|
||||
<p className="text-xs break-all">{appriseRenderMarkdown(raw)}</p>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<span className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||
{markdown ? 'Raw (email, SMS)' : 'Preview'}
|
||||
</span>
|
||||
<p className="text-xs font-mono break-all text-muted-foreground">{raw}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function appriseIsDefault(value: unknown, defaultStr: string): boolean {
|
||||
if (value == null) return true;
|
||||
const s = String(value).trim();
|
||||
return s === '' || s === defaultStr;
|
||||
}
|
||||
|
||||
function AppriseConfigEditor({
|
||||
config,
|
||||
scope,
|
||||
@@ -2387,6 +2509,12 @@ function AppriseConfigEditor({
|
||||
onChange: (config: Record<string, unknown>) => void;
|
||||
onScopeChange: (scope: Record<string, unknown>) => void;
|
||||
}) {
|
||||
const markdown = config.markdown_format !== false;
|
||||
const defaultDm = markdown ? APPRISE_DEFAULT_DM : APPRISE_DEFAULT_DM_PLAIN;
|
||||
const defaultChan = markdown ? APPRISE_DEFAULT_CHANNEL : APPRISE_DEFAULT_CHANNEL_PLAIN;
|
||||
const dmFormat = ((config.body_format_dm as string) || '').trim() || defaultDm;
|
||||
const chanFormat = ((config.body_format_channel as string) || '').trim() || defaultChan;
|
||||
|
||||
return (
|
||||
<div className="space-y-3">
|
||||
<p className="text-[0.8125rem] text-muted-foreground">
|
||||
@@ -2445,16 +2573,145 @@ function AppriseConfigEditor({
|
||||
</div>
|
||||
</label>
|
||||
|
||||
<Separator />
|
||||
|
||||
<h3 className="text-base font-semibold tracking-tight">Message Format</h3>
|
||||
|
||||
<label className="flex items-center gap-3 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={config.include_path !== false}
|
||||
onChange={(e) => onChange({ ...config, include_path: e.target.checked })}
|
||||
checked={markdown}
|
||||
onChange={(e) => {
|
||||
const md = e.target.checked;
|
||||
const updates: Record<string, unknown> = { ...config, markdown_format: md };
|
||||
const curDm = ((config.body_format_dm as string) || '').trim();
|
||||
const curChan = ((config.body_format_channel as string) || '').trim();
|
||||
if (md) {
|
||||
if (!curDm || curDm === APPRISE_DEFAULT_DM_PLAIN)
|
||||
updates.body_format_dm = APPRISE_DEFAULT_DM;
|
||||
if (!curChan || curChan === APPRISE_DEFAULT_CHANNEL_PLAIN)
|
||||
updates.body_format_channel = APPRISE_DEFAULT_CHANNEL;
|
||||
} else {
|
||||
if (!curDm || curDm === APPRISE_DEFAULT_DM)
|
||||
updates.body_format_dm = APPRISE_DEFAULT_DM_PLAIN;
|
||||
if (!curChan || curChan === APPRISE_DEFAULT_CHANNEL)
|
||||
updates.body_format_channel = APPRISE_DEFAULT_CHANNEL_PLAIN;
|
||||
}
|
||||
onChange(updates);
|
||||
}}
|
||||
className="h-4 w-4 rounded border-border"
|
||||
/>
|
||||
<span className="text-sm">Include routing path in notifications</span>
|
||||
<div>
|
||||
<span className="text-sm">Markdown formatting</span>
|
||||
<p className="text-[0.8125rem] text-muted-foreground">
|
||||
If notifications fail on services like Telegram due to special characters in sender
|
||||
names, disable this option.
|
||||
</p>
|
||||
</div>
|
||||
</label>
|
||||
|
||||
<details className="group">
|
||||
<summary className="text-sm font-medium text-foreground cursor-pointer select-none flex items-center gap-1">
|
||||
<ChevronDown className="h-3 w-3 transition-transform group-open:rotate-0 -rotate-90" />
|
||||
Available variables
|
||||
</summary>
|
||||
<div className="mt-2 rounded-md border border-border bg-muted/30 p-2 text-xs space-y-0.5">
|
||||
<div className="grid grid-cols-[auto_1fr] gap-x-3 gap-y-0.5">
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">{'{text}'}</code>
|
||||
<span className="text-muted-foreground">Message body</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">
|
||||
{'{sender_name}'}
|
||||
</code>
|
||||
<span className="text-muted-foreground">Sender display name</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">
|
||||
{'{sender_key}'}
|
||||
</code>
|
||||
<span className="text-muted-foreground">Sender public key (hex)</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">
|
||||
{'{channel_name}'}
|
||||
</code>
|
||||
<span className="text-muted-foreground">Channel name (channel messages only)</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">
|
||||
{'{conversation_key}'}
|
||||
</code>
|
||||
<span className="text-muted-foreground">
|
||||
Contact pubkey (DM) or channel key (channel)
|
||||
</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">{'{type}'}</code>
|
||||
<span className="text-muted-foreground">PRIV or CHAN</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">{'{hops}'}</code>
|
||||
<span className="text-muted-foreground">
|
||||
Comma-separated hop IDs, or "direct"
|
||||
</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">
|
||||
{'{hops_backticked}'}
|
||||
</code>
|
||||
<span className="text-muted-foreground">Hops wrapped in backticks for markdown</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">
|
||||
{'{hop_count}'}
|
||||
</code>
|
||||
<span className="text-muted-foreground">Number of hops (0 for direct)</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">{'{rssi}'}</code>
|
||||
<span className="text-muted-foreground">Last-hop RSSI in dBm</span>
|
||||
<code className="text-[0.6875rem] font-mono bg-muted px-1 rounded">{'{snr}'}</code>
|
||||
<span className="text-muted-foreground">Last-hop SNR in dB</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1.5">
|
||||
Empty textareas use the default format. RSSI/SNR may be empty if unavailable.
|
||||
</p>
|
||||
</div>
|
||||
</details>
|
||||
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label htmlFor="fanout-apprise-fmt-dm">DM format</Label>
|
||||
{!appriseIsDefault(config.body_format_dm, defaultDm) && (
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Reset DM format to default"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={() => onChange({ ...config, body_format_dm: defaultDm })}
|
||||
>
|
||||
Reset to default
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<textarea
|
||||
id="fanout-apprise-fmt-dm"
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm font-mono min-h-[56px]"
|
||||
placeholder={defaultDm}
|
||||
value={(config.body_format_dm as string) ?? ''}
|
||||
onChange={(e) => onChange({ ...config, body_format_dm: e.target.value })}
|
||||
rows={2}
|
||||
/>
|
||||
<AppriseFormatPreview format={dmFormat} vars={APPRISE_SAMPLE_VARS_DM} markdown={markdown} />
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label htmlFor="fanout-apprise-fmt-chan">Channel format</Label>
|
||||
{!appriseIsDefault(config.body_format_channel, defaultChan) && (
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Reset channel format to default"
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
onClick={() => onChange({ ...config, body_format_channel: defaultChan })}
|
||||
>
|
||||
Reset to default
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<textarea
|
||||
id="fanout-apprise-fmt-chan"
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm font-mono min-h-[56px]"
|
||||
placeholder={defaultChan}
|
||||
value={(config.body_format_channel as string) ?? ''}
|
||||
onChange={(e) => onChange({ ...config, body_format_channel: e.target.value })}
|
||||
rows={2}
|
||||
/>
|
||||
<AppriseFormatPreview format={chanFormat} vars={APPRISE_SAMPLE_VARS} markdown={markdown} />
|
||||
</div>
|
||||
|
||||
<Separator />
|
||||
|
||||
<ScopeSelector scope={scope} onChange={onScopeChange} />
|
||||
|
||||
@@ -33,6 +33,13 @@ import {
|
||||
setSavedFontScale,
|
||||
} from '../../utils/fontScale';
|
||||
import { getAutoFocusInputEnabled, setAutoFocusInputEnabled } from '../../utils/autoFocusInput';
|
||||
import {
|
||||
getTextReplaceEnabled,
|
||||
setTextReplaceEnabled as saveTextReplaceEnabled,
|
||||
getTextReplaceMapJson,
|
||||
setTextReplaceMapJson,
|
||||
DEFAULT_MAP_JSON,
|
||||
} from '../../utils/textReplace';
|
||||
import {
|
||||
BATTERY_DISPLAY_CHANGE_EVENT,
|
||||
getShowBatteryPercent,
|
||||
@@ -232,6 +239,9 @@ export function SettingsLocalSection({
|
||||
const [batteryPercent, setBatteryPercent] = useState(getShowBatteryPercent);
|
||||
const [batteryVoltage, setBatteryVoltage] = useState(getShowBatteryVoltage);
|
||||
const [statusDotPulse, setStatusDotPulse] = useState(getStatusDotPulseEnabled);
|
||||
const [textReplaceEnabled, setTextReplaceEnabled] = useState(getTextReplaceEnabled);
|
||||
const [textReplaceJson, setTextReplaceJson] = useState(getTextReplaceMapJson);
|
||||
const [textReplaceError, setTextReplaceError] = useState<string | null>(null);
|
||||
const [fontScale, setFontScale] = useState(getSavedFontScale);
|
||||
const [fontScaleSlider, setFontScaleSlider] = useState(getSavedFontScale);
|
||||
const [fontScaleInput, setFontScaleInput] = useState(() => String(getSavedFontScale()));
|
||||
@@ -439,6 +449,63 @@ export function SettingsLocalSection({
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-md border border-border/60 p-3 space-y-2">
|
||||
<div className="flex items-start gap-3">
|
||||
<Checkbox
|
||||
id="text-replace"
|
||||
checked={textReplaceEnabled}
|
||||
onCheckedChange={(checked) => {
|
||||
const v = checked === true;
|
||||
setTextReplaceEnabled(v);
|
||||
saveTextReplaceEnabled(v);
|
||||
}}
|
||||
className="mt-0.5"
|
||||
/>
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="text-replace">Replace as you Type</Label>
|
||||
<p className="text-[0.8125rem] text-muted-foreground">
|
||||
Automatically replace characters as you type in the message input. Define
|
||||
replacements as a JSON object mapping source strings to their replacements.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
{textReplaceEnabled && (
|
||||
<div className="space-y-2 pl-7">
|
||||
<textarea
|
||||
value={textReplaceJson}
|
||||
onChange={(e) => {
|
||||
const val = e.target.value;
|
||||
setTextReplaceJson(val);
|
||||
setTextReplaceError(setTextReplaceMapJson(val));
|
||||
}}
|
||||
spellCheck={false}
|
||||
rows={10}
|
||||
className={cn(
|
||||
'w-full rounded-md border bg-background px-3 py-2 text-sm font-mono',
|
||||
textReplaceError ? 'border-destructive' : 'border-input'
|
||||
)}
|
||||
aria-label="Text replacement map (JSON)"
|
||||
/>
|
||||
{textReplaceError && (
|
||||
<p className="text-xs text-destructive">
|
||||
{textReplaceError} Changes are not saved until this is resolved.
|
||||
</p>
|
||||
)}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
setTextReplaceJson(DEFAULT_MAP_JSON);
|
||||
setTextReplaceMapJson(DEFAULT_MAP_JSON);
|
||||
setTextReplaceError(null);
|
||||
}}
|
||||
className="inline-flex h-8 items-center justify-center rounded-md border border-input px-3 text-sm font-medium transition-colors hover:bg-accent focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
||||
>
|
||||
Reset to Default
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
|
||||
@@ -1,11 +1,20 @@
|
||||
import { useState, useEffect, useMemo } from 'react';
|
||||
import { MapPinned } from 'lucide-react';
|
||||
import { useState, useEffect, useMemo, useRef } from 'react';
|
||||
import { ChevronDown, Download, MapPinned, Upload } from 'lucide-react';
|
||||
import { Input } from '../ui/input';
|
||||
import { Label } from '../ui/label';
|
||||
import { Button } from '../ui/button';
|
||||
import { Separator } from '../ui/separator';
|
||||
import { toast } from '../ui/sonner';
|
||||
import { Checkbox } from '../ui/checkbox';
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from '../ui/dialog';
|
||||
import { api } from '../../api';
|
||||
import { RADIO_PRESETS } from '../../utils/radioPresets';
|
||||
import { stripRegionScopePrefix } from '../../utils/regionScope';
|
||||
import type {
|
||||
@@ -17,8 +26,116 @@ import type {
|
||||
RadioConfigUpdate,
|
||||
RadioDiscoveryResponse,
|
||||
RadioDiscoveryTarget,
|
||||
RadioStatsSnapshot,
|
||||
} from '../../types';
|
||||
|
||||
function formatUptime(secs: number): string {
|
||||
const days = Math.floor(secs / 86400);
|
||||
const hours = Math.floor((secs % 86400) / 3600);
|
||||
const minutes = Math.floor((secs % 3600) / 60);
|
||||
if (days > 0) return `${days}d ${hours}h ${minutes}m`;
|
||||
if (hours > 0) return `${hours}h ${minutes}m`;
|
||||
return `${minutes}m`;
|
||||
}
|
||||
|
||||
function formatAirtime(secs: number): string {
|
||||
if (secs < 60) return `${secs}s`;
|
||||
const hours = Math.floor(secs / 3600);
|
||||
const minutes = Math.floor((secs % 3600) / 60);
|
||||
if (hours > 0) return `${hours}h ${minutes}m`;
|
||||
return `${minutes}m`;
|
||||
}
|
||||
|
||||
function StatRow({ label, value, warn }: { label: string; value: string; warn?: boolean }) {
|
||||
return (
|
||||
<div className="flex items-center justify-between gap-2 py-0.5">
|
||||
<span className="text-xs text-muted-foreground">{label}</span>
|
||||
<span
|
||||
className={`text-xs font-mono tabular-nums ${warn ? 'text-warning font-semibold' : ''}`}
|
||||
>
|
||||
{value}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function RadioDetailsCollapsible({ stats }: { stats: RadioStatsSnapshot }) {
|
||||
const age = stats.timestamp ? Math.max(0, Math.floor(Date.now() / 1000) - stats.timestamp) : null;
|
||||
const packets = {
|
||||
recv: stats.packets_recv,
|
||||
sent: stats.packets_sent,
|
||||
flood_tx: stats.flood_tx,
|
||||
direct_tx: stats.direct_tx,
|
||||
flood_rx: stats.flood_rx,
|
||||
direct_rx: stats.direct_rx,
|
||||
};
|
||||
|
||||
return (
|
||||
<details className="group">
|
||||
<summary className="text-sm font-medium text-foreground cursor-pointer select-none flex items-center gap-1">
|
||||
<ChevronDown className="h-3 w-3 transition-transform group-open:rotate-0 -rotate-90" />
|
||||
Radio Details
|
||||
</summary>
|
||||
<div className="mt-2 space-y-2 rounded-md border border-input bg-muted/20 p-3">
|
||||
{age !== null && (
|
||||
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||
Updated {age < 5 ? 'just now' : `${age}s ago`}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{/* Core */}
|
||||
{stats.uptime_secs != null && (
|
||||
<StatRow label="Uptime" value={formatUptime(stats.uptime_secs)} />
|
||||
)}
|
||||
{stats.battery_mv != null && stats.battery_mv > 0 && (
|
||||
<StatRow label="Battery" value={`${(stats.battery_mv / 1000).toFixed(2)}V`} />
|
||||
)}
|
||||
{stats.queue_len != null && (
|
||||
<StatRow
|
||||
label="TX Queue"
|
||||
value={`${stats.queue_len} / 16`}
|
||||
warn={stats.queue_len >= 14}
|
||||
/>
|
||||
)}
|
||||
{stats.errors != null && (
|
||||
<StatRow label="Errors" value={String(stats.errors)} warn={stats.errors > 0} />
|
||||
)}
|
||||
|
||||
{/* RF */}
|
||||
{stats.noise_floor != null && (
|
||||
<StatRow label="Noise Floor" value={`${stats.noise_floor} dBm`} />
|
||||
)}
|
||||
{stats.last_rssi != null && <StatRow label="Last RSSI" value={`${stats.last_rssi} dBm`} />}
|
||||
{stats.last_snr != null && <StatRow label="Last SNR" value={`${stats.last_snr} dB`} />}
|
||||
|
||||
{/* Airtime */}
|
||||
{(stats.tx_air_secs != null || stats.rx_air_secs != null) && (
|
||||
<>
|
||||
{stats.tx_air_secs != null && (
|
||||
<StatRow label="TX Airtime" value={formatAirtime(stats.tx_air_secs)} />
|
||||
)}
|
||||
{stats.rx_air_secs != null && (
|
||||
<StatRow label="RX Airtime" value={formatAirtime(stats.rx_air_secs)} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Packets */}
|
||||
{packets.recv != null && <StatRow label="Packets Received" value={String(packets.recv)} />}
|
||||
{packets.sent != null && <StatRow label="Packets Sent" value={String(packets.sent)} />}
|
||||
{packets.flood_tx != null && <StatRow label="Flood TX" value={String(packets.flood_tx)} />}
|
||||
{packets.flood_rx != null && <StatRow label="Flood RX" value={String(packets.flood_rx)} />}
|
||||
{packets.direct_tx != null && (
|
||||
<StatRow label="Direct TX" value={String(packets.direct_tx)} />
|
||||
)}
|
||||
{packets.direct_rx != null && (
|
||||
<StatRow label="Direct RX" value={String(packets.direct_rx)} />
|
||||
)}
|
||||
</div>
|
||||
</details>
|
||||
);
|
||||
}
|
||||
|
||||
export function SettingsRadioSection({
|
||||
config,
|
||||
health,
|
||||
@@ -279,11 +396,6 @@ export function SettingsRadioSection({
|
||||
|
||||
try {
|
||||
const update: AppSettingsUpdate = {};
|
||||
const hours = parseInt(advertIntervalHours, 10);
|
||||
const newAdvertInterval = isNaN(hours) ? 0 : hours * 3600;
|
||||
if (newAdvertInterval !== appSettings.advert_interval) {
|
||||
update.advert_interval = newAdvertInterval;
|
||||
}
|
||||
if (floodScope !== stripRegionScopePrefix(appSettings.flood_scope)) {
|
||||
update.flood_scope = floodScope;
|
||||
}
|
||||
@@ -302,6 +414,27 @@ export function SettingsRadioSection({
|
||||
}
|
||||
};
|
||||
|
||||
const [advertIntervalBusy, setAdvertIntervalBusy] = useState(false);
|
||||
const [advertIntervalError, setAdvertIntervalError] = useState<string | null>(null);
|
||||
|
||||
const handleSaveAdvertInterval = async () => {
|
||||
setAdvertIntervalError(null);
|
||||
setAdvertIntervalBusy(true);
|
||||
|
||||
try {
|
||||
const hours = parseInt(advertIntervalHours, 10);
|
||||
const newAdvertInterval = isNaN(hours) ? 0 : hours * 3600;
|
||||
if (newAdvertInterval !== appSettings.advert_interval) {
|
||||
await onSaveAppSettings({ advert_interval: newAdvertInterval });
|
||||
}
|
||||
toast.success('Advertising interval saved');
|
||||
} catch (err) {
|
||||
setAdvertIntervalError(err instanceof Error ? err.message : 'Failed to save');
|
||||
} finally {
|
||||
setAdvertIntervalBusy(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleAdvertise = async (mode: RadioAdvertMode) => {
|
||||
setAdvertisingMode(mode);
|
||||
try {
|
||||
@@ -320,6 +453,169 @@ export function SettingsRadioSection({
|
||||
}
|
||||
};
|
||||
|
||||
const importInputRef = useRef<HTMLInputElement>(null);
|
||||
const [keyImportDialogOpen, setKeyImportDialogOpen] = useState(false);
|
||||
const pendingImportRef = useRef<Record<string, unknown> | null>(null);
|
||||
|
||||
const buildConfigProfile = () => ({
|
||||
version: 1,
|
||||
exported_at: new Date().toISOString(),
|
||||
name: config.name,
|
||||
lat: config.lat,
|
||||
lon: config.lon,
|
||||
tx_power: config.tx_power,
|
||||
radio: { ...config.radio },
|
||||
path_hash_mode: config.path_hash_mode,
|
||||
advert_location_source: config.advert_location_source ?? 'current',
|
||||
multi_acks_enabled: config.multi_acks_enabled ?? false,
|
||||
});
|
||||
|
||||
const downloadJson = (profile: object, suffix: string) => {
|
||||
const blob = new Blob([JSON.stringify(profile, null, 2)], { type: 'application/json' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
const safeName = (config.name || 'radio').replace(/[^a-zA-Z0-9_-]/g, '_');
|
||||
const timestamp = new Date()
|
||||
.toLocaleString(undefined, {
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
hour12: false,
|
||||
})
|
||||
.replace(/[/:, ]+/g, '-');
|
||||
a.download = `${safeName}-${suffix}-${timestamp}.json`;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
const handleExportConfig = async () => {
|
||||
const profile = buildConfigProfile();
|
||||
try {
|
||||
const { private_key } = await api.getPrivateKey();
|
||||
downloadJson({ ...profile, private_key }, 'config');
|
||||
toast.success('Export generated with private key');
|
||||
} catch {
|
||||
downloadJson(profile, 'config');
|
||||
toast.info('Export generated without private key', {
|
||||
description: 'See README_ADVANCED.md for private key export enable',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const validateImportData = (
|
||||
data: unknown
|
||||
): data is {
|
||||
name: string;
|
||||
radio: { freq: number; bw: number; sf: number; cr: number };
|
||||
[k: string]: unknown;
|
||||
} =>
|
||||
typeof data === 'object' &&
|
||||
data !== null &&
|
||||
'name' in data &&
|
||||
typeof (data as Record<string, unknown>).name === 'string' &&
|
||||
'radio' in data &&
|
||||
typeof (data as Record<string, unknown>).radio === 'object' &&
|
||||
(data as Record<string, unknown>).radio !== null &&
|
||||
typeof (data as Record<string, Record<string, unknown>>).radio.freq === 'number' &&
|
||||
typeof (data as Record<string, Record<string, unknown>>).radio.bw === 'number' &&
|
||||
typeof (data as Record<string, Record<string, unknown>>).radio.sf === 'number' &&
|
||||
typeof (data as Record<string, Record<string, unknown>>).radio.cr === 'number';
|
||||
|
||||
const populateFormFromImport = (data: Record<string, unknown>) => {
|
||||
const radio = data.radio as { freq: number; bw: number; sf: number; cr: number };
|
||||
setName(data.name as string);
|
||||
if (typeof data.lat === 'number') setLat(String(data.lat));
|
||||
if (typeof data.lon === 'number') setLon(String(data.lon));
|
||||
if (typeof data.tx_power === 'number') setTxPower(String(data.tx_power));
|
||||
setFreq(String(radio.freq));
|
||||
setBw(String(radio.bw));
|
||||
setSf(String(radio.sf));
|
||||
setCr(String(radio.cr));
|
||||
if (typeof data.path_hash_mode === 'number') setPathHashMode(String(data.path_hash_mode));
|
||||
if (data.advert_location_source === 'off' || data.advert_location_source === 'current')
|
||||
setAdvertLocationSource(data.advert_location_source);
|
||||
if (typeof data.multi_acks_enabled === 'boolean') setMultiAcksEnabled(data.multi_acks_enabled);
|
||||
};
|
||||
|
||||
const buildUpdateFromImport = (data: Record<string, unknown>): RadioConfigUpdate => {
|
||||
const radio = data.radio as { freq: number; bw: number; sf: number; cr: number };
|
||||
const update: RadioConfigUpdate = {
|
||||
name: data.name as string,
|
||||
lat: typeof data.lat === 'number' ? data.lat : config.lat,
|
||||
lon: typeof data.lon === 'number' ? data.lon : config.lon,
|
||||
tx_power: typeof data.tx_power === 'number' ? (data.tx_power as number) : config.tx_power,
|
||||
radio,
|
||||
};
|
||||
if (data.advert_location_source === 'off' || data.advert_location_source === 'current')
|
||||
update.advert_location_source = data.advert_location_source;
|
||||
if (typeof data.multi_acks_enabled === 'boolean')
|
||||
update.multi_acks_enabled = data.multi_acks_enabled;
|
||||
if (config.path_hash_mode_supported && typeof data.path_hash_mode === 'number')
|
||||
update.path_hash_mode = data.path_hash_mode as number;
|
||||
return update;
|
||||
};
|
||||
|
||||
const applyImport = async (data: Record<string, unknown>) => {
|
||||
populateFormFromImport(data);
|
||||
const update = buildUpdateFromImport(data);
|
||||
|
||||
setBusy(true);
|
||||
setRebooting(true);
|
||||
try {
|
||||
if (typeof data.private_key === 'string' && data.private_key) {
|
||||
await onSetPrivateKey(data.private_key);
|
||||
toast.success('Config + private key imported, saving & rebooting...');
|
||||
} else {
|
||||
toast.success('Config imported, saving & rebooting...');
|
||||
}
|
||||
await onSave(update);
|
||||
await onReboot();
|
||||
if (!pageMode) onClose();
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to import config');
|
||||
} finally {
|
||||
setRebooting(false);
|
||||
setBusy(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleImportConfig = async (file: File) => {
|
||||
try {
|
||||
const text = await file.text();
|
||||
const data = JSON.parse(text);
|
||||
|
||||
if (!validateImportData(data)) {
|
||||
toast.error('Invalid config file', {
|
||||
description: 'File must contain name and radio parameters (freq, bw, sf, cr)',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof data.private_key === 'string' && data.private_key) {
|
||||
// Private key present — show warning dialog before applying
|
||||
pendingImportRef.current = data;
|
||||
setKeyImportDialogOpen(true);
|
||||
} else {
|
||||
await applyImport(data);
|
||||
}
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to import config');
|
||||
} finally {
|
||||
if (importInputRef.current) importInputRef.current.value = '';
|
||||
}
|
||||
};
|
||||
|
||||
const handleConfirmKeyImport = async () => {
|
||||
setKeyImportDialogOpen(false);
|
||||
const data = pendingImportRef.current;
|
||||
pendingImportRef.current = null;
|
||||
if (data) await applyImport(data);
|
||||
};
|
||||
|
||||
const radioState =
|
||||
health?.radio_state ?? (health?.radio_initializing ? 'initializing' : 'disconnected');
|
||||
const connectionActionLabel =
|
||||
@@ -414,6 +710,9 @@ export function SettingsRadioSection({
|
||||
</span>
|
||||
</div>
|
||||
{deviceInfoLabel && <p className="text-sm text-muted-foreground">{deviceInfoLabel}</p>}
|
||||
|
||||
{health?.radio_stats && <RadioDetailsCollapsible stats={health.radio_stats} />}
|
||||
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
@@ -678,6 +977,37 @@ export function SettingsRadioSection({
|
||||
Some settings may require a reboot to take effect on some radios.
|
||||
</p>
|
||||
|
||||
<div className="flex gap-2">
|
||||
<Button variant="outline" size="sm" onClick={handleExportConfig} className="flex-1">
|
||||
<Download className="mr-1.5 h-4 w-4" aria-hidden="true" />
|
||||
Export Config
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => importInputRef.current?.click()}
|
||||
disabled={busy || rebooting}
|
||||
className="flex-1"
|
||||
>
|
||||
<Upload className="mr-1.5 h-4 w-4" aria-hidden="true" />
|
||||
Import & Reboot
|
||||
</Button>
|
||||
<input
|
||||
ref={importInputRef}
|
||||
type="file"
|
||||
accept=".json"
|
||||
className="hidden"
|
||||
onChange={(e) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (file) handleImportConfig(file);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<p className="text-[0.8125rem] text-muted-foreground">
|
||||
Export saves the current server config to a JSON file. Import loads a config file, applies
|
||||
it, and reboots the radio.
|
||||
</p>
|
||||
|
||||
<Separator />
|
||||
|
||||
{/* ── Messaging ── */}
|
||||
@@ -733,9 +1063,9 @@ export function SettingsRadioSection({
|
||||
placeholder="MyRegion"
|
||||
/>
|
||||
<p className="text-[0.8125rem] text-muted-foreground">
|
||||
Tag outgoing flood messages with a region name (e.g. MyRegion). Repeaters configured for
|
||||
that region can forward the traffic, while repeaters configured to deny other regions may
|
||||
drop it. Leave empty to disable.
|
||||
Tag outgoing messages with a region name (e.g. MyRegion). Repeaters configured for that
|
||||
region can forward the traffic, while repeaters configured to deny other regions may drop
|
||||
it. Leave empty to disable.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -795,6 +1125,18 @@ export function SettingsRadioSection({
|
||||
How often to automatically advertise presence. Set to 0 to disable. Minimum: 1 hour.
|
||||
Recommended: 24 hours or higher.
|
||||
</p>
|
||||
{advertIntervalError && (
|
||||
<div className="text-sm text-destructive" role="alert">
|
||||
{advertIntervalError}
|
||||
</div>
|
||||
)}
|
||||
<Button
|
||||
onClick={handleSaveAdvertInterval}
|
||||
disabled={advertIntervalBusy}
|
||||
className="w-full"
|
||||
>
|
||||
{advertIntervalBusy ? 'Saving...' : 'Save Advertising Interval'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
@@ -907,6 +1249,44 @@ export function SettingsRadioSection({
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* ── Private Key Import Warning ── */}
|
||||
<Dialog
|
||||
open={keyImportDialogOpen}
|
||||
onOpenChange={(open) => {
|
||||
setKeyImportDialogOpen(open);
|
||||
if (!open) pendingImportRef.current = null;
|
||||
}}
|
||||
>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>Import includes Private Key</DialogTitle>
|
||||
<DialogDescription>
|
||||
This config file contains a private key. Importing it will change your radio's
|
||||
identity — your radio will have a new public key and other nodes will see it as
|
||||
a different device. This cannot be undone without the original key.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setKeyImportDialogOpen(false);
|
||||
pendingImportRef.current = null;
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleConfirmKeyImport}
|
||||
className="border-destructive/50 text-destructive hover:bg-destructive/10"
|
||||
variant="outline"
|
||||
>
|
||||
Import Config & Key
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -50,8 +50,8 @@ export function useContactsAndChannels({
|
||||
}, []);
|
||||
|
||||
const handleCreateContact = useCallback(
|
||||
async (name: string, publicKey: string, tryHistorical: boolean) => {
|
||||
const created = await api.createContact(publicKey, name || undefined, tryHistorical);
|
||||
async (name: string, publicKey: string, tryHistorical: boolean, type?: number) => {
|
||||
const created = await api.createContact(publicKey, name || undefined, tryHistorical, type);
|
||||
const data = await fetchAllContacts();
|
||||
setContacts(data);
|
||||
|
||||
|
||||
@@ -37,6 +37,33 @@ function urlBase64ToUint8Array(base64String: string): Uint8Array {
|
||||
return arr;
|
||||
}
|
||||
|
||||
/** Race a promise against a timeout; rejects with a descriptive error on expiry. */
|
||||
function withTimeout<T>(promise: Promise<T>, ms: number, label: string): Promise<T> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(
|
||||
() =>
|
||||
reject(
|
||||
new Error(
|
||||
`${label} timed out — the service worker may have failed to install. ` +
|
||||
'Mobile browsers require a trusted TLS certificate for service workers, ' +
|
||||
'even if the page itself loads with a self-signed cert.'
|
||||
)
|
||||
),
|
||||
ms
|
||||
);
|
||||
promise.then(
|
||||
(v) => {
|
||||
clearTimeout(timer);
|
||||
resolve(v);
|
||||
},
|
||||
(e) => {
|
||||
clearTimeout(timer);
|
||||
reject(e);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function uint8ArraysEqual(a: Uint8Array | null, b: Uint8Array): boolean {
|
||||
if (!a || a.length !== b.length) return false;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
@@ -109,8 +136,9 @@ export function usePushSubscription(): PushSubscriptionState {
|
||||
const subsPromise = api.getPushSubscriptions().catch(() => [] as PushSubscriptionInfo[]);
|
||||
|
||||
// Check if THIS browser has an active push subscription and match it
|
||||
// to a backend record.
|
||||
navigator.serviceWorker.ready
|
||||
// to a backend record. Use a timeout so we don't hang forever when the
|
||||
// service worker failed to install (e.g. mobile + self-signed cert).
|
||||
withTimeout(navigator.serviceWorker.ready, 1_000, 'Service worker activation')
|
||||
.then((reg) => reg.pushManager.getSubscription())
|
||||
.then(async (sub) => {
|
||||
const existing = await subsPromise;
|
||||
@@ -129,7 +157,11 @@ export function usePushSubscription(): PushSubscriptionState {
|
||||
const refreshSubscriptions = useCallback(async () => {
|
||||
try {
|
||||
const subs = await api.getPushSubscriptions();
|
||||
const reg = await navigator.serviceWorker.ready;
|
||||
const reg = await withTimeout(
|
||||
navigator.serviceWorker.ready,
|
||||
10_000,
|
||||
'Service worker activation'
|
||||
);
|
||||
const sub = await reg.pushManager.getSubscription();
|
||||
reconcileCurrentSubscription(subs, sub?.endpoint ?? null);
|
||||
return subs;
|
||||
@@ -155,7 +187,11 @@ export function usePushSubscription(): PushSubscriptionState {
|
||||
vapidKeyRef.current = resp.public_key;
|
||||
const vapidKeyBytes = urlBase64ToUint8Array(resp.public_key);
|
||||
|
||||
const reg = await navigator.serviceWorker.ready;
|
||||
const reg = await withTimeout(
|
||||
navigator.serviceWorker.ready,
|
||||
3_000,
|
||||
'Service worker activation'
|
||||
);
|
||||
let pushSub = await reg.pushManager.getSubscription();
|
||||
const existingKeyBytes = getApplicationServerKeyBytes(pushSub?.options?.applicationServerKey);
|
||||
const requiresRecreate =
|
||||
@@ -188,6 +224,7 @@ export function usePushSubscription(): PushSubscriptionState {
|
||||
console.error('Push subscribe failed:', err);
|
||||
toast.error('Failed to enable push notifications', {
|
||||
description: err instanceof Error ? err.message : 'Check that notifications are allowed',
|
||||
duration: 8_000,
|
||||
});
|
||||
return null;
|
||||
} finally {
|
||||
|
||||
@@ -24,5 +24,7 @@ createRoot(document.getElementById('root')!).render(
|
||||
|
||||
// Register service worker for Web Push (requires secure context)
|
||||
if ('serviceWorker' in navigator && window.isSecureContext) {
|
||||
navigator.serviceWorker.register('./sw.js').catch(() => {});
|
||||
navigator.serviceWorker.register('./sw.js').catch((err) => {
|
||||
console.warn('Service worker registration failed:', err);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6,7 +6,10 @@
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
html,
|
||||
html {
|
||||
height: 100dvh;
|
||||
}
|
||||
|
||||
body,
|
||||
#root {
|
||||
height: 100%;
|
||||
|
||||
@@ -111,6 +111,7 @@ beforeEach(() => {
|
||||
tracked_telemetry_repeaters: [],
|
||||
auto_resend_channel: false,
|
||||
telemetry_interval_hours: 8,
|
||||
telemetry_routed_hourly: false,
|
||||
});
|
||||
mockedApi.getRadioConfig.mockResolvedValue({
|
||||
public_key: 'aa'.repeat(32),
|
||||
@@ -1050,6 +1051,7 @@ describe('SettingsFanoutSection', () => {
|
||||
tracked_telemetry_repeaters: ['cc'.repeat(32)],
|
||||
auto_resend_channel: false,
|
||||
telemetry_interval_hours: 8,
|
||||
telemetry_routed_hourly: false,
|
||||
});
|
||||
|
||||
renderSection();
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('MessageInput', () => {
|
||||
}
|
||||
|
||||
function getInput() {
|
||||
return screen.getByPlaceholderText('Type a message...') as HTMLInputElement;
|
||||
return screen.getByPlaceholderText('Type a message...') as HTMLTextAreaElement;
|
||||
}
|
||||
|
||||
function getSendButton() {
|
||||
|
||||
@@ -172,7 +172,7 @@ describe('NewMessageModal form reset', () => {
|
||||
await user.click(screen.getByRole('button', { name: 'Create' }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onCreateContact).toHaveBeenCalledWith('Bob', 'bb'.repeat(32), false);
|
||||
expect(onCreateContact).toHaveBeenCalledWith('Bob', 'bb'.repeat(32), false, 1);
|
||||
});
|
||||
expect(onClose).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -94,6 +94,8 @@ describe('buildRawPacketStatsSnapshot', () => {
|
||||
sender: 'Alpha',
|
||||
channel_key: null,
|
||||
contact_key: '0a'.repeat(32),
|
||||
sender_timestamp: null,
|
||||
message: null,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -145,7 +147,9 @@ describe('buildRawPacketStatsSnapshot', () => {
|
||||
'2-5',
|
||||
'6-10',
|
||||
'11-15',
|
||||
'16+',
|
||||
'16-20',
|
||||
'21-31',
|
||||
'32+',
|
||||
]);
|
||||
expect(stats.hopProfile).toEqual(
|
||||
expect.arrayContaining([
|
||||
@@ -154,7 +158,9 @@ describe('buildRawPacketStatsSnapshot', () => {
|
||||
expect.objectContaining({ label: '2-5', count: 1 }),
|
||||
expect.objectContaining({ label: '6-10', count: 0 }),
|
||||
expect.objectContaining({ label: '11-15', count: 0 }),
|
||||
expect.objectContaining({ label: '16+', count: 0 }),
|
||||
expect.objectContaining({ label: '16-20', count: 0 }),
|
||||
expect.objectContaining({ label: '21-31', count: 0 }),
|
||||
expect.objectContaining({ label: '32+', count: 0 }),
|
||||
])
|
||||
);
|
||||
expect(stats.hopByteWidthProfile).toEqual(
|
||||
|
||||
@@ -438,6 +438,7 @@ describe('RepeaterDashboard', () => {
|
||||
flood_dups: 1,
|
||||
direct_dups: 0,
|
||||
full_events: 0,
|
||||
recv_errors: 5,
|
||||
telemetry_history: [],
|
||||
};
|
||||
|
||||
@@ -707,6 +708,7 @@ describe('RepeaterDashboard', () => {
|
||||
flood_dups: 1,
|
||||
direct_dups: 0,
|
||||
full_events: 0,
|
||||
recv_errors: null,
|
||||
telemetry_history: [liveEntry],
|
||||
};
|
||||
|
||||
@@ -742,6 +744,7 @@ describe('RepeaterDashboard', () => {
|
||||
flood_dups: 1,
|
||||
direct_dups: 0,
|
||||
full_events: 0,
|
||||
recv_errors: null,
|
||||
telemetry_history: [{ timestamp: 1700000000, data: { battery_volts: 4.2 } }],
|
||||
};
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { SettingsModal } from '../components/SettingsModal';
|
||||
import type {
|
||||
AppSettings,
|
||||
AppSettingsUpdate,
|
||||
Contact,
|
||||
HealthStatus,
|
||||
RadioAdvertMode,
|
||||
RadioConfig,
|
||||
@@ -71,6 +72,7 @@ const baseSettings: AppSettings = {
|
||||
tracked_telemetry_repeaters: [],
|
||||
auto_resend_channel: false,
|
||||
telemetry_interval_hours: 8,
|
||||
telemetry_routed_hourly: false,
|
||||
};
|
||||
|
||||
function renderModal(overrides?: {
|
||||
@@ -89,6 +91,8 @@ function renderModal(overrides?: {
|
||||
meshDiscovery?: RadioDiscoveryResponse | null;
|
||||
meshDiscoveryLoadingTarget?: RadioDiscoveryTarget | null;
|
||||
onDiscoverMesh?: (target: RadioDiscoveryTarget) => Promise<void>;
|
||||
contacts?: Contact[];
|
||||
trackedTelemetryRepeaters?: string[];
|
||||
open?: boolean;
|
||||
pageMode?: boolean;
|
||||
externalSidebarNav?: boolean;
|
||||
@@ -127,6 +131,8 @@ function renderModal(overrides?: {
|
||||
onDiscoverMesh,
|
||||
onHealthRefresh: vi.fn(async () => {}),
|
||||
onRefreshAppSettings,
|
||||
contacts: overrides?.contacts,
|
||||
trackedTelemetryRepeaters: overrides?.trackedTelemetryRepeaters,
|
||||
};
|
||||
|
||||
const view = overrides?.externalSidebarNav
|
||||
@@ -794,4 +800,68 @@ describe('SettingsModal', () => {
|
||||
expect(screen.getByText('Network')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('renders routed hourly checkbox and calls save on toggle', async () => {
|
||||
const onSaveAppSettings = vi.fn(async () => {});
|
||||
|
||||
renderModal({
|
||||
externalSidebarNav: true,
|
||||
desktopSection: 'database',
|
||||
onSaveAppSettings,
|
||||
});
|
||||
|
||||
const checkbox = screen.getByRole('checkbox', {
|
||||
name: /Poll direct\/routed-path repeaters hourly/i,
|
||||
}) as HTMLInputElement;
|
||||
|
||||
expect(checkbox).toBeInTheDocument();
|
||||
expect(checkbox.checked).toBe(false);
|
||||
|
||||
fireEvent.click(checkbox);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onSaveAppSettings).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ telemetry_routed_hourly: true })
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('shows route badge per tracked repeater', async () => {
|
||||
const directKey = 'bb'.repeat(32);
|
||||
|
||||
renderModal({
|
||||
externalSidebarNav: true,
|
||||
desktopSection: 'database',
|
||||
appSettings: {
|
||||
...baseSettings,
|
||||
tracked_telemetry_repeaters: [directKey],
|
||||
},
|
||||
trackedTelemetryRepeaters: [directKey],
|
||||
contacts: [
|
||||
{
|
||||
public_key: directKey,
|
||||
name: 'DirectRepeater',
|
||||
type: 2,
|
||||
flags: 0,
|
||||
direct_path: 'aabb',
|
||||
direct_path_len: 1,
|
||||
direct_path_hash_mode: 1,
|
||||
last_advert: null,
|
||||
lat: null,
|
||||
lon: null,
|
||||
last_seen: null,
|
||||
on_radio: false,
|
||||
favorite: false,
|
||||
last_contacted: null,
|
||||
last_read_at: null,
|
||||
first_seen: null,
|
||||
effective_route: { path: 'aabb', path_len: 1, path_hash_mode: 1 },
|
||||
effective_route_source: 'direct',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(screen.getByText('DirectRepeater')).toBeInTheDocument();
|
||||
expect(screen.getByText('direct')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import {
|
||||
getTextReplaceEnabled,
|
||||
setTextReplaceEnabled,
|
||||
getTextReplaceMapJson,
|
||||
setTextReplaceMapJson,
|
||||
applyTextReplacements,
|
||||
DEFAULT_MAP_JSON,
|
||||
} from '../utils/textReplace';
|
||||
|
||||
beforeEach(() => {
|
||||
localStorage.clear();
|
||||
});
|
||||
|
||||
describe('enabled toggle', () => {
|
||||
it('defaults to disabled', () => {
|
||||
expect(getTextReplaceEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it('persists enabled state', () => {
|
||||
setTextReplaceEnabled(true);
|
||||
expect(getTextReplaceEnabled()).toBe(true);
|
||||
setTextReplaceEnabled(false);
|
||||
expect(getTextReplaceEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('map JSON persistence', () => {
|
||||
it('returns default map when nothing stored', () => {
|
||||
expect(getTextReplaceMapJson()).toBe(DEFAULT_MAP_JSON);
|
||||
});
|
||||
|
||||
it('persists valid JSON and returns null', () => {
|
||||
const json = '{"a":"b"}';
|
||||
expect(setTextReplaceMapJson(json)).toBeNull();
|
||||
expect(getTextReplaceMapJson()).toBe(json);
|
||||
});
|
||||
|
||||
it('rejects invalid JSON with error string', () => {
|
||||
const err = setTextReplaceMapJson('not json');
|
||||
expect(err).toBeTypeOf('string');
|
||||
// localStorage unchanged — still returns default
|
||||
expect(getTextReplaceMapJson()).toBe(DEFAULT_MAP_JSON);
|
||||
});
|
||||
|
||||
it('rejects arrays', () => {
|
||||
expect(setTextReplaceMapJson('["a","b"]')).toBeTypeOf('string');
|
||||
});
|
||||
|
||||
it('rejects non-string values', () => {
|
||||
expect(setTextReplaceMapJson('{"a":123}')).toBeTypeOf('string');
|
||||
});
|
||||
|
||||
it('rejects null', () => {
|
||||
expect(setTextReplaceMapJson('null')).toBeTypeOf('string');
|
||||
});
|
||||
|
||||
it('accepts empty object', () => {
|
||||
expect(setTextReplaceMapJson('{}')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('re-expansion validation', () => {
|
||||
it('rejects when a key appears in its own replacement', () => {
|
||||
const err = setTextReplaceMapJson(JSON.stringify({ a: 'aa' }));
|
||||
expect(err).toBeTypeOf('string');
|
||||
expect(err).toContain('"a"');
|
||||
expect(err).toContain('"aa"');
|
||||
});
|
||||
|
||||
it('rejects when a key appears in another replacement', () => {
|
||||
const err = setTextReplaceMapJson(JSON.stringify({ a: 'X', b: 'ab' }));
|
||||
expect(err).toBeTypeOf('string');
|
||||
expect(err).toContain('"a"');
|
||||
expect(err).toContain('"ab"');
|
||||
});
|
||||
|
||||
it('allows replacements that do not contain any key', () => {
|
||||
expect(setTextReplaceMapJson(JSON.stringify({ a: 'X', b: 'Y' }))).toBeNull();
|
||||
});
|
||||
|
||||
it('allows the default Cyrillic map', () => {
|
||||
expect(setTextReplaceMapJson(DEFAULT_MAP_JSON)).toBeNull();
|
||||
});
|
||||
|
||||
it('does not check empty keys for re-expansion', () => {
|
||||
// Empty key is silently skipped by buildReplacements, so it should not
|
||||
// cause a re-expansion rejection for other entries.
|
||||
expect(setTextReplaceMapJson(JSON.stringify({ '': 'x', b: 'Y' }))).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('applyTextReplacements', () => {
|
||||
const simpleMap = JSON.stringify({ a: 'X', b: 'Y' });
|
||||
|
||||
it('returns null when no replacements match', () => {
|
||||
expect(applyTextReplacements('hello', 5, simpleMap)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null for empty map', () => {
|
||||
expect(applyTextReplacements('abc', 3, '{}')).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null for invalid JSON', () => {
|
||||
expect(applyTextReplacements('abc', 3, 'broken')).toBeNull();
|
||||
});
|
||||
|
||||
it('replaces a single character with cursor at end', () => {
|
||||
const result = applyTextReplacements('a', 1, simpleMap);
|
||||
expect(result).toEqual({ text: 'X', cursor: 1 });
|
||||
});
|
||||
|
||||
it('replaces multiple characters in one pass', () => {
|
||||
const result = applyTextReplacements('ab', 2, simpleMap);
|
||||
expect(result).toEqual({ text: 'XY', cursor: 2 });
|
||||
});
|
||||
|
||||
it('adjusts cursor when replacement is longer than needle', () => {
|
||||
const map = JSON.stringify({ ':)': 'smiley' });
|
||||
// "hello :)" cursor at end (8)
|
||||
const result = applyTextReplacements('hello :)', 8, map);
|
||||
expect(result).toEqual({ text: 'hello smiley', cursor: 12 });
|
||||
});
|
||||
|
||||
it('adjusts cursor when replacement is shorter than needle', () => {
|
||||
const map = JSON.stringify({ abc: 'Z' });
|
||||
// "abcdef" cursor at end (6)
|
||||
const result = applyTextReplacements('abcdef', 6, map);
|
||||
expect(result).toEqual({ text: 'Zdef', cursor: 4 });
|
||||
});
|
||||
|
||||
it('preserves cursor position when replacement is before cursor', () => {
|
||||
const map = JSON.stringify({ a: 'XX' });
|
||||
// "a_b" cursor at 2 (on 'b'), 'a' replaced with 'XX'
|
||||
const result = applyTextReplacements('a_b', 2, map);
|
||||
expect(result).toEqual({ text: 'XX_b', cursor: 3 });
|
||||
});
|
||||
|
||||
it('does not adjust cursor for replacements after cursor', () => {
|
||||
const map = JSON.stringify({ b: 'YY' });
|
||||
// "ab" cursor at 1 (after 'a'), 'b' is after cursor
|
||||
const result = applyTextReplacements('ab', 1, map);
|
||||
expect(result).toEqual({ text: 'aYY', cursor: 1 });
|
||||
});
|
||||
|
||||
it('places cursor after replacement when cursor is inside a multi-char match', () => {
|
||||
const map = JSON.stringify({ abc: 'Z' });
|
||||
// "abc" cursor at 2 (inside the match)
|
||||
const result = applyTextReplacements('abc', 2, map);
|
||||
expect(result).toEqual({ text: 'Z', cursor: 1 });
|
||||
});
|
||||
|
||||
it('handles multiple replacements with cursor tracking', () => {
|
||||
const map = JSON.stringify({ ':)': 'S' });
|
||||
// ":):)" cursor at end (4) — two replacements, each shrinks by 1
|
||||
const result = applyTextReplacements(':):)', 4, map);
|
||||
expect(result).toEqual({ text: 'SS', cursor: 2 });
|
||||
});
|
||||
|
||||
it('cursor between two replacements stays correct', () => {
|
||||
const map = JSON.stringify({ ':)': 'S' });
|
||||
// ":):)" cursor at 2 (between the two smileys)
|
||||
const result = applyTextReplacements(':):)', 2, map);
|
||||
expect(result).toEqual({ text: 'SS', cursor: 1 });
|
||||
});
|
||||
|
||||
it('uses longest match first', () => {
|
||||
const map = JSON.stringify({ ab: 'LONG', a: 'X' });
|
||||
const result = applyTextReplacements('ab', 2, map);
|
||||
expect(result).toEqual({ text: 'LONG', cursor: 4 });
|
||||
});
|
||||
|
||||
it('ignores empty-string keys (no infinite loop)', () => {
|
||||
const map = JSON.stringify({ '': 'oops', a: 'X' });
|
||||
const result = applyTextReplacements('abc', 3, map);
|
||||
expect(result).toEqual({ text: 'Xbc', cursor: 3 });
|
||||
});
|
||||
|
||||
it('works with the default Cyrillic map', () => {
|
||||
// "Привет" — П has no mapping, р→p, и has no mapping, в has no mapping, е→e, т has no mapping
|
||||
const result = applyTextReplacements('Привет', 6, DEFAULT_MAP_JSON);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.text).toBe('Пpивeт');
|
||||
expect(result!.cursor).toBe(6);
|
||||
});
|
||||
|
||||
it('handles paste with many replacements', () => {
|
||||
const map = JSON.stringify({ А: 'A', В: 'B', С: 'C' });
|
||||
const result = applyTextReplacements('АВС', 3, map);
|
||||
expect(result).toEqual({ text: 'ABC', cursor: 3 });
|
||||
});
|
||||
});
|
||||
@@ -150,6 +150,35 @@ describe('usePushSubscription', () => {
|
||||
expect(result.current.allSubscriptions).toEqual([]);
|
||||
});
|
||||
|
||||
it('times out and shows a toast when service worker never activates', async () => {
|
||||
// Replace serviceWorker.ready with a promise that never resolves
|
||||
Object.defineProperty(navigator, 'serviceWorker', {
|
||||
configurable: true,
|
||||
value: {
|
||||
ready: new Promise(() => {}),
|
||||
},
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => usePushSubscription());
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSupported).toBe(true);
|
||||
});
|
||||
|
||||
// subscribe() will hang on serviceWorker.ready, then the 1s timeout fires
|
||||
await act(async () => {
|
||||
await result.current.subscribe();
|
||||
});
|
||||
|
||||
expect(result.current.loading).toBe(false);
|
||||
expect(mocks.toast.error).toHaveBeenCalledWith(
|
||||
'Failed to enable push notifications',
|
||||
expect.objectContaining({
|
||||
description: expect.stringContaining('trusted TLS certificate for service workers'),
|
||||
})
|
||||
);
|
||||
}, 5_000);
|
||||
|
||||
it('recreates a stale browser subscription when the server VAPID key changed', async () => {
|
||||
const oldSubscription = activeSubscription;
|
||||
mocks.api.getPushSubscriptions
|
||||
|
||||
@@ -66,6 +66,8 @@ export interface RadioStatsSnapshot {
|
||||
timestamp: number | null;
|
||||
battery_mv: number | null;
|
||||
uptime_secs: number | null;
|
||||
queue_len: number | null;
|
||||
errors: number | null;
|
||||
noise_floor: number | null;
|
||||
last_rssi: number | null;
|
||||
last_snr: number | null;
|
||||
@@ -341,6 +343,8 @@ export interface RawPacket {
|
||||
sender: string | null;
|
||||
channel_key: string | null;
|
||||
contact_key: string | null;
|
||||
sender_timestamp: number | null;
|
||||
message: string | null;
|
||||
} | null;
|
||||
}
|
||||
|
||||
@@ -357,6 +361,7 @@ export interface AppSettings {
|
||||
tracked_telemetry_repeaters: string[];
|
||||
auto_resend_channel: boolean;
|
||||
telemetry_interval_hours: number;
|
||||
telemetry_routed_hourly: boolean;
|
||||
}
|
||||
|
||||
export interface AppSettingsUpdate {
|
||||
@@ -369,6 +374,7 @@ export interface AppSettingsUpdate {
|
||||
blocked_names?: string[];
|
||||
discovery_blocked_types?: number[];
|
||||
telemetry_interval_hours?: number;
|
||||
telemetry_routed_hourly?: boolean;
|
||||
}
|
||||
|
||||
export interface TelemetrySchedule {
|
||||
@@ -378,6 +384,8 @@ export interface TelemetrySchedule {
|
||||
tracked_count: number;
|
||||
max_tracked: number;
|
||||
next_run_at: number | null;
|
||||
routed_hourly: boolean;
|
||||
next_routed_run_at: number | null;
|
||||
}
|
||||
|
||||
export interface TrackedTelemetryResponse {
|
||||
@@ -436,6 +444,7 @@ export interface RepeaterStatusResponse {
|
||||
flood_dups: number;
|
||||
direct_dups: number;
|
||||
full_events: number;
|
||||
recv_errors: number | null;
|
||||
telemetry_history: TelemetryHistoryEntry[];
|
||||
}
|
||||
|
||||
|
||||
@@ -324,51 +324,56 @@ export function inspectRawPacketWithOptions(
|
||||
createPacketField('payload', `payload-${index}`, segment, structure.payload.startByte)
|
||||
);
|
||||
|
||||
const enrichedPayloadFields =
|
||||
decoded?.isValid && decoded.payloadType === PayloadType.GroupText && decoded.payload.decoded
|
||||
? payloadFields.map((field) => {
|
||||
if (field.name !== 'Ciphertext') {
|
||||
return field;
|
||||
}
|
||||
const payload = decoded.payload.decoded as {
|
||||
decrypted?: { timestamp?: number; flags?: number; sender?: string; message?: string };
|
||||
};
|
||||
if (!payload.decrypted?.message) {
|
||||
return field;
|
||||
}
|
||||
const detailLines = [
|
||||
payload.decrypted.timestamp != null
|
||||
? `Timestamp: ${formatUnixTimestamp(payload.decrypted.timestamp)}`
|
||||
: null,
|
||||
payload.decrypted.flags != null
|
||||
? `Flags: 0x${payload.decrypted.flags.toString(16).padStart(2, '0')}`
|
||||
: null,
|
||||
payload.decrypted.sender ? `Sender: ${payload.decrypted.sender}` : null,
|
||||
`Message: ${payload.decrypted.message}`,
|
||||
].filter((line): line is string => line !== null);
|
||||
return {
|
||||
...field,
|
||||
description: describeCiphertextStructure(
|
||||
decoded.payloadType,
|
||||
field.endByte - field.startByte + 1,
|
||||
field.description
|
||||
),
|
||||
decryptedMessage: detailLines.join('\n'),
|
||||
};
|
||||
})
|
||||
: payloadFields.map((field) => {
|
||||
if (!decoded?.isValid || field.name !== 'Ciphertext') {
|
||||
return field;
|
||||
}
|
||||
return {
|
||||
...field,
|
||||
description: describeCiphertextStructure(
|
||||
decoded.payloadType,
|
||||
field.endByte - field.startByte + 1,
|
||||
field.description
|
||||
),
|
||||
};
|
||||
});
|
||||
const enrichedPayloadFields = payloadFields.map((field) => {
|
||||
if (!decoded?.isValid || field.name !== 'Ciphertext') {
|
||||
return field;
|
||||
}
|
||||
|
||||
const withStructure = {
|
||||
...field,
|
||||
description: describeCiphertextStructure(
|
||||
decoded.payloadType,
|
||||
field.endByte - field.startByte + 1,
|
||||
field.description
|
||||
),
|
||||
};
|
||||
|
||||
// GroupText: client-side decoder has the decrypted content
|
||||
if (decoded.payloadType === PayloadType.GroupText && decoded.payload.decoded) {
|
||||
const payload = decoded.payload.decoded as {
|
||||
decrypted?: { timestamp?: number; flags?: number; sender?: string; message?: string };
|
||||
};
|
||||
if (!payload.decrypted?.message) {
|
||||
return withStructure;
|
||||
}
|
||||
const detailLines = [
|
||||
payload.decrypted.timestamp != null
|
||||
? `Sent (packet): ${formatUnixTimestamp(payload.decrypted.timestamp)}`
|
||||
: null,
|
||||
payload.decrypted.flags != null
|
||||
? `Flags: 0x${payload.decrypted.flags.toString(16).padStart(2, '0')}`
|
||||
: null,
|
||||
payload.decrypted.sender ? `Sender: ${payload.decrypted.sender}` : null,
|
||||
`Message: ${payload.decrypted.message}`,
|
||||
].filter((line): line is string => line !== null);
|
||||
return { ...withStructure, decryptedMessage: detailLines.join('\n') };
|
||||
}
|
||||
|
||||
// TextMessage (DM): server-side decryption via decrypted_info
|
||||
if (decoded.payloadType === PayloadType.TextMessage && packet.decrypted_info?.message) {
|
||||
const info = packet.decrypted_info;
|
||||
const detailLines = [
|
||||
info.sender_timestamp != null
|
||||
? `Sent (packet): ${formatUnixTimestamp(info.sender_timestamp)}`
|
||||
: null,
|
||||
info.sender ? `Sender: ${info.sender}` : null,
|
||||
`Message: ${info.message}`,
|
||||
].filter((line): line is string => line !== null);
|
||||
return { ...withStructure, decryptedMessage: detailLines.join('\n') };
|
||||
}
|
||||
|
||||
return withStructure;
|
||||
});
|
||||
|
||||
return {
|
||||
decoded,
|
||||
|
||||
@@ -322,7 +322,13 @@ function getHopProfileBucket(pathTokenCount: number): string {
|
||||
if (pathTokenCount <= 15) {
|
||||
return '11-15';
|
||||
}
|
||||
return '16+';
|
||||
if (pathTokenCount <= 20) {
|
||||
return '16-20';
|
||||
}
|
||||
if (pathTokenCount <= 31) {
|
||||
return '21-31';
|
||||
}
|
||||
return '32+';
|
||||
}
|
||||
|
||||
export function buildRawPacketStatsSnapshot(
|
||||
@@ -354,7 +360,9 @@ export function buildRawPacketStatsSnapshot(
|
||||
['2-5', 0],
|
||||
['6-10', 0],
|
||||
['11-15', 0],
|
||||
['16+', 0],
|
||||
['16-20', 0],
|
||||
['21-31', 0],
|
||||
['32+', 0],
|
||||
]);
|
||||
const hopByteWidthCounts = new Map<string, number>([
|
||||
['No path', 0],
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
const ENABLED_KEY = 'remoteterm-text-replace-enabled';
|
||||
const MAP_KEY = 'remoteterm-text-replace-map';
|
||||
|
||||
const DEFAULT_MAP: Record<string, string> = {
|
||||
А: 'A',
|
||||
В: 'B',
|
||||
Е: 'E',
|
||||
Ё: 'E',
|
||||
З: '3',
|
||||
К: 'K',
|
||||
М: 'M',
|
||||
Н: 'H',
|
||||
О: 'O',
|
||||
Р: 'P',
|
||||
С: 'C',
|
||||
Т: 'T',
|
||||
Х: 'X',
|
||||
Ь: 'b',
|
||||
а: 'a',
|
||||
е: 'e',
|
||||
ё: 'e',
|
||||
о: 'o',
|
||||
р: 'p',
|
||||
с: 'c',
|
||||
у: 'y',
|
||||
х: 'x',
|
||||
};
|
||||
|
||||
export const DEFAULT_MAP_JSON = JSON.stringify(DEFAULT_MAP, null, 2);
|
||||
|
||||
export function getTextReplaceEnabled(): boolean {
|
||||
try {
|
||||
return localStorage.getItem(ENABLED_KEY) === 'true';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function setTextReplaceEnabled(enabled: boolean): void {
|
||||
try {
|
||||
if (enabled) {
|
||||
localStorage.setItem(ENABLED_KEY, 'true');
|
||||
} else {
|
||||
localStorage.removeItem(ENABLED_KEY);
|
||||
}
|
||||
} catch {
|
||||
// localStorage may be unavailable
|
||||
}
|
||||
}
|
||||
|
||||
export function getTextReplaceMapJson(): string {
|
||||
try {
|
||||
const raw = localStorage.getItem(MAP_KEY);
|
||||
if (raw !== null) return raw;
|
||||
} catch {
|
||||
// fall through
|
||||
}
|
||||
return DEFAULT_MAP_JSON;
|
||||
}
|
||||
|
||||
/** Persist the map JSON only if it's valid. Returns null on success or an error string. */
|
||||
export function setTextReplaceMapJson(json: string): string | null {
|
||||
try {
|
||||
const parsed = JSON.parse(json);
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed))
|
||||
return 'Must be a JSON object.';
|
||||
const rawEntries = Object.entries(parsed);
|
||||
for (const [k, v] of rawEntries) {
|
||||
if (typeof k !== 'string' || typeof v !== 'string')
|
||||
return 'All keys and values must be strings.';
|
||||
}
|
||||
const entries = rawEntries as [string, string][];
|
||||
// Check for re-expansion: no key may appear as a substring of any replacement value.
|
||||
for (const [needle] of entries) {
|
||||
if (needle.length === 0) continue;
|
||||
for (const [, replacement] of entries) {
|
||||
if (replacement.includes(needle)) {
|
||||
return `Key "${needle}" appears inside replacement "${replacement}" and would re-expand on every keystroke.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
localStorage.setItem(MAP_KEY, json);
|
||||
return null;
|
||||
} catch {
|
||||
return 'Invalid JSON.';
|
||||
}
|
||||
}
|
||||
|
||||
/** Build a sorted-by-length-desc array of [needle, replacement] for efficient matching. */
|
||||
function buildReplacements(json: string): [string, string][] {
|
||||
try {
|
||||
const parsed = JSON.parse(json) as Record<string, string>;
|
||||
return Object.entries(parsed)
|
||||
.filter(([k]) => k.length > 0)
|
||||
.sort((a, b) => b[0].length - a[0].length);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply text replacements and compute the adjusted cursor position.
|
||||
* Returns null if nothing changed.
|
||||
*/
|
||||
export function applyTextReplacements(
|
||||
text: string,
|
||||
cursorPos: number,
|
||||
mapJson: string
|
||||
): { text: string; cursor: number } | null {
|
||||
const replacements = buildReplacements(mapJson);
|
||||
if (replacements.length === 0) return null;
|
||||
|
||||
let result = '';
|
||||
let newCursor = cursorPos;
|
||||
let i = 0;
|
||||
|
||||
while (i < text.length) {
|
||||
let matched = false;
|
||||
for (const [needle, replacement] of replacements) {
|
||||
if (text.startsWith(needle, i)) {
|
||||
result += replacement;
|
||||
// Adjust cursor if this match is before or spans the cursor
|
||||
if (i + needle.length <= cursorPos) {
|
||||
newCursor += replacement.length - needle.length;
|
||||
} else if (i < cursorPos) {
|
||||
// Cursor is inside this match — place it after the replacement
|
||||
newCursor = result.length;
|
||||
}
|
||||
i += needle.length;
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
result += text[i];
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
if (result === text) return null;
|
||||
return { text: result, cursor: newCursor };
|
||||
}
|
||||
@@ -29,3 +29,7 @@ MESHCORE_DISABLE_BOTS=true
|
||||
# HTTP Basic Auth (recommended when bots are enabled)
|
||||
#MESHCORE_BASIC_AUTH_USERNAME=
|
||||
#MESHCORE_BASIC_AUTH_PASSWORD=
|
||||
|
||||
# Enable GET /api/radio/private-key to return the in-memory private key as hex
|
||||
# for backup or migration. Only enable on a trusted network.
|
||||
#MESHCORE_ENABLE_LOCAL_PRIVATE_KEY_EXPORT=false
|
||||
|
||||
+3
-3
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "remoteterm-meshcore"
|
||||
version = "3.12.0"
|
||||
version = "3.13.0"
|
||||
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
@@ -12,7 +12,7 @@ dependencies = [
|
||||
"httpx>=0.28.1",
|
||||
"pycryptodome>=3.20.0",
|
||||
"pynacl>=1.5.0",
|
||||
"meshcore==2.3.2",
|
||||
"meshcore==2.3.7",
|
||||
"aiomqtt>=2.0",
|
||||
"apprise>=1.9.8",
|
||||
"boto3>=1.38.0",
|
||||
@@ -61,7 +61,7 @@ reportMissingTypeStubs = false
|
||||
dev = [
|
||||
"httpx>=0.28.1",
|
||||
"pip-licenses>=5.0.0",
|
||||
"pytest>=9.0.2",
|
||||
"pytest>=9.0.3",
|
||||
"pytest-asyncio>=1.3.0",
|
||||
"pytest-xdist>=3.0",
|
||||
"ruff>=0.8.0",
|
||||
|
||||
@@ -63,9 +63,10 @@ test.describe('Apprise integration settings', () => {
|
||||
const preserveIdentity = page.getByText('Preserve identity on Discord');
|
||||
await expect(preserveIdentity).toBeVisible();
|
||||
|
||||
// Verify include routing path checkbox is checked by default
|
||||
const includePath = page.getByText('Include routing path in notifications');
|
||||
await expect(includePath).toBeVisible();
|
||||
// Verify format textareas are present under Message Format heading
|
||||
await expect(page.getByText('Message Format')).toBeVisible();
|
||||
await expect(page.locator('#fanout-apprise-fmt-dm')).toBeVisible();
|
||||
await expect(page.locator('#fanout-apprise-fmt-chan')).toBeVisible();
|
||||
|
||||
// Rename it
|
||||
const nameInput = page.locator('#fanout-edit-name');
|
||||
@@ -94,7 +95,8 @@ test.describe('Apprise integration settings', () => {
|
||||
config: {
|
||||
urls: `${appriseUrl}\nslack://token_a/token_b/token_c`,
|
||||
preserve_identity: false,
|
||||
include_path: false,
|
||||
body_format_dm: '{sender_name}: {text}',
|
||||
body_format_channel: '{channel_name} | {sender_name}: {text}',
|
||||
},
|
||||
enabled: true,
|
||||
});
|
||||
@@ -113,18 +115,18 @@ test.describe('Apprise integration settings', () => {
|
||||
await expect(urlsTextarea).toHaveValue(new RegExp(appriseUrl.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')));
|
||||
await expect(urlsTextarea).toHaveValue(/slack:\/\/token_a/);
|
||||
|
||||
// Verify checkboxes reflect our config (both unchecked)
|
||||
// Verify preserve identity checkbox reflects our config (unchecked)
|
||||
const preserveCheckbox = page
|
||||
.getByText('Preserve identity on Discord')
|
||||
.locator('xpath=ancestor::label[1]')
|
||||
.locator('input[type="checkbox"]');
|
||||
await expect(preserveCheckbox).not.toBeChecked();
|
||||
|
||||
const pathCheckbox = page
|
||||
.getByText('Include routing path in notifications')
|
||||
.locator('xpath=ancestor::label[1]')
|
||||
.locator('input[type="checkbox"]');
|
||||
await expect(pathCheckbox).not.toBeChecked();
|
||||
// Verify format textareas reflect our custom formats
|
||||
const dmFormat = page.locator('#fanout-apprise-fmt-dm');
|
||||
await expect(dmFormat).toHaveValue('{sender_name}: {text}');
|
||||
const chanFormat = page.locator('#fanout-apprise-fmt-chan');
|
||||
await expect(chanFormat).toHaveValue('{channel_name} | {sender_name}: {text}');
|
||||
|
||||
// Go back
|
||||
page.once('dialog', (dialog) => dialog.accept());
|
||||
|
||||
@@ -52,6 +52,12 @@ test.describe('Favorites persistence', () => {
|
||||
return channels.some((c) => c.key === channelKey && c.favorite);
|
||||
})
|
||||
.toBe(false);
|
||||
await expect(page.getByText('Favorites')).not.toBeVisible();
|
||||
// The test channel should no longer appear under the Favorites header —
|
||||
// but the Favorites section itself may remain if radio-synced contacts are favorited.
|
||||
const channelsSectionHeader = page.getByText('Channels');
|
||||
await expect(channelsSectionHeader).toBeVisible();
|
||||
// Verify the channel now appears in the non-favorites Channels section
|
||||
const channelEntry = page.getByText(channelName, { exact: true }).first();
|
||||
await expect(channelEntry).toBeVisible();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,8 +23,9 @@ test.describe('Channel messaging in #flightless', () => {
|
||||
// Send it
|
||||
await page.getByRole('button', { name: 'Send', exact: true }).click();
|
||||
|
||||
// Verify message appears in the message list
|
||||
await expect(page.getByText(testMessage)).toBeVisible({ timeout: 15_000 });
|
||||
// Verify message appears in the message list (use locator('span') to avoid
|
||||
// matching the textarea which may briefly retain the sent text)
|
||||
await expect(page.locator('span', { hasText: testMessage })).toBeVisible({ timeout: 15_000 });
|
||||
});
|
||||
|
||||
test('outgoing message shows ack indicator', async ({ page }) => {
|
||||
@@ -37,8 +38,8 @@ test.describe('Channel messaging in #flightless', () => {
|
||||
await input.fill(testMessage);
|
||||
await page.getByRole('button', { name: 'Send', exact: true }).click();
|
||||
|
||||
// Wait for the message to appear
|
||||
const messageEl = page.getByText(testMessage);
|
||||
// Wait for the message to appear in the message list
|
||||
const messageEl = page.locator('span', { hasText: testMessage });
|
||||
await expect(messageEl).toBeVisible({ timeout: 15_000 });
|
||||
|
||||
// Outgoing messages show either "?" (pending) or "✓" (acked)
|
||||
@@ -58,7 +59,7 @@ test.describe('Channel messaging in #flightless', () => {
|
||||
await input.fill(testMessage);
|
||||
await page.getByRole('button', { name: 'Send', exact: true }).click();
|
||||
|
||||
const messageEl = page.getByText(testMessage).first();
|
||||
const messageEl = page.locator('span', { hasText: testMessage }).first();
|
||||
await expect(messageEl).toBeVisible({ timeout: 15_000 });
|
||||
|
||||
const messageContainer = messageEl.locator(
|
||||
@@ -94,6 +95,6 @@ test.describe('Channel messaging in #flightless', () => {
|
||||
await expect(page.getByText('Message resent')).toBeVisible({ timeout: 10_000 });
|
||||
|
||||
// Byte-perfect resend should not create a second visible row in this conversation.
|
||||
await expect(page.getByText(testMessage)).toHaveCount(1);
|
||||
await expect(page.locator('span', { hasText: testMessage })).toHaveCount(1);
|
||||
});
|
||||
});
|
||||
|
||||
+43
-17
@@ -50,7 +50,7 @@ def _patch_require_connected(mc=None, *, detail="Radio not connected"):
|
||||
if mc is None:
|
||||
return patch(
|
||||
"app.services.radio_runtime.radio_runtime.require_connected",
|
||||
side_effect=HTTPException(status_code=503, detail=detail),
|
||||
side_effect=HTTPException(status_code=423, detail=detail),
|
||||
)
|
||||
return patch("app.services.radio_runtime.radio_runtime.require_connected", return_value=mc)
|
||||
|
||||
@@ -203,6 +203,30 @@ class TestHealthEndpoint:
|
||||
class TestDebugEndpoint:
|
||||
"""Test the debug support snapshot endpoint."""
|
||||
|
||||
def test_build_environment_exposes_env_settings(self):
|
||||
"""_build_environment should expose env config without secrets."""
|
||||
from app.config import Settings
|
||||
from app.routers.debug import _build_environment
|
||||
|
||||
with patch(
|
||||
"app.routers.debug.settings",
|
||||
Settings(
|
||||
serial_port="/dev/ttyUSB0",
|
||||
serial_baudrate=115200,
|
||||
log_level="DEBUG",
|
||||
database_path="data/test.db",
|
||||
),
|
||||
):
|
||||
env = _build_environment()
|
||||
|
||||
assert env.connection_type == "serial"
|
||||
assert env.serial_port == "/dev/ttyUSB0"
|
||||
assert env.log_level == "DEBUG"
|
||||
assert env.database_path == "data/test.db"
|
||||
assert not hasattr(env, "ble_pin")
|
||||
assert not hasattr(env, "basic_auth_password")
|
||||
assert not hasattr(env, "basic_auth_username")
|
||||
|
||||
def test_support_snapshot_sanitizes_radio_probe_location_fields(self):
|
||||
"""Debug radio probe should redact advertised lat/lon from self_info."""
|
||||
from app.routers.debug import _sanitize_radio_probe_self_info
|
||||
@@ -300,6 +324,8 @@ class TestDebugEndpoint:
|
||||
assert "multi_acks_enabled" not in payload["radio_probe"]
|
||||
assert "max_channels" not in payload["runtime"]
|
||||
assert "path_hash_mode" not in payload["runtime"]
|
||||
assert "environment" in payload
|
||||
assert payload["environment"]["connection_type"] in ("serial", "tcp", "ble")
|
||||
assert payload["runtime"]["channels_with_incoming_messages"] == 0
|
||||
assert payload["database"]["total_dms"] == 0
|
||||
assert payload["database"]["total_channel_messages"] == 0
|
||||
@@ -396,11 +422,11 @@ class TestDebugEndpoint:
|
||||
|
||||
|
||||
class TestRadioDisconnectedHandler:
|
||||
"""Test that RadioDisconnectedError maps to 503."""
|
||||
"""Test that RadioDisconnectedError maps to 423."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_disconnect_race_returns_503(self, test_db, client):
|
||||
"""If radio disconnects between require_connected() and lock acquisition, return 503."""
|
||||
async def test_disconnect_race_returns_423(self, test_db, client):
|
||||
"""If radio disconnects between require_connected() and lock acquisition, return 423."""
|
||||
pub_key = "ab" * 32
|
||||
await _insert_contact(pub_key, "Alice")
|
||||
|
||||
@@ -411,7 +437,7 @@ class TestRadioDisconnectedHandler:
|
||||
"/api/messages/direct", json={"destination": pub_key, "text": "Hi"}
|
||||
)
|
||||
|
||||
assert response.status_code == 503
|
||||
assert response.status_code == 423
|
||||
assert "not connected" in response.json()["detail"].lower()
|
||||
|
||||
|
||||
@@ -474,25 +500,25 @@ class TestMessagesEndpoint:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_direct_message_requires_connection(self, test_db, client):
|
||||
"""Sending message when disconnected returns 503."""
|
||||
"""Sending message when disconnected returns 423."""
|
||||
with _patch_require_connected():
|
||||
response = await client.post(
|
||||
"/api/messages/direct", json={"destination": "abc123", "text": "Hello"}
|
||||
)
|
||||
|
||||
assert response.status_code == 503
|
||||
assert response.status_code == 423
|
||||
assert "not connected" in response.json()["detail"].lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_channel_message_requires_connection(self, test_db, client):
|
||||
"""Sending channel message when disconnected returns 503."""
|
||||
"""Sending channel message when disconnected returns 423."""
|
||||
with _patch_require_connected():
|
||||
response = await client.post(
|
||||
"/api/messages/channel",
|
||||
json={"channel_key": "0123456789ABCDEF0123456789ABCDEF", "text": "Hello"},
|
||||
)
|
||||
|
||||
assert response.status_code == 503
|
||||
assert response.status_code == 423
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_direct_message_emits_websocket_message_event(self, test_db, client):
|
||||
@@ -577,8 +603,8 @@ class TestMessagesEndpoint:
|
||||
assert "not found" in response.json()["detail"].lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_direct_message_duplicate_returns_500(self, test_db):
|
||||
"""If MessageRepository.create returns None (duplicate), returns 500."""
|
||||
async def test_send_direct_message_duplicate_returns_422(self, test_db):
|
||||
"""If MessageRepository.create returns None (duplicate), returns 422."""
|
||||
from app.models import SendDirectMessageRequest
|
||||
from app.routers.messages import send_direct_message
|
||||
|
||||
@@ -610,12 +636,12 @@ class TestMessagesEndpoint:
|
||||
SendDirectMessageRequest(destination=pub_key, text="Hello")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
assert exc_info.value.status_code == 422
|
||||
assert "unexpected duplicate" in exc_info.value.detail.lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_channel_message_duplicate_returns_500(self, test_db):
|
||||
"""If MessageRepository.create returns None (duplicate), returns 500."""
|
||||
async def test_send_channel_message_duplicate_returns_422(self, test_db):
|
||||
"""If MessageRepository.create returns None (duplicate), returns 422."""
|
||||
from app.models import SendChannelMessageRequest
|
||||
from app.routers.messages import send_channel_message
|
||||
|
||||
@@ -646,16 +672,16 @@ class TestMessagesEndpoint:
|
||||
SendChannelMessageRequest(channel_key=chan_key, text="Hello")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
assert exc_info.value.status_code == 422
|
||||
assert "unexpected duplicate" in exc_info.value.detail.lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resend_channel_message_requires_connection(self, test_db, client):
|
||||
"""Resend endpoint returns 503 when radio is disconnected."""
|
||||
"""Resend endpoint returns 423 when radio is disconnected."""
|
||||
with _patch_require_connected():
|
||||
response = await client.post("/api/messages/channel/1/resend")
|
||||
|
||||
assert response.status_code == 503
|
||||
assert response.status_code == 423
|
||||
assert "not connected" in response.json()["detail"].lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
||||
+1
-1
@@ -709,7 +709,7 @@ class TestBotMessageRateLimiting:
|
||||
patch(
|
||||
"app.routers.messages.send_direct_message",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=HTTPException(status_code=500, detail="Send failed"),
|
||||
side_effect=HTTPException(status_code=422, detail="Send failed"),
|
||||
),
|
||||
):
|
||||
await process_bot_response(
|
||||
|
||||
@@ -812,16 +812,14 @@ class TestLwtAndStatusPublish:
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = MagicMock()
|
||||
mock_radio.meshcore.self_info = {"name": "TestNode"}
|
||||
mock_radio.device_info_loaded = True
|
||||
mock_radio.device_model = "T-Deck"
|
||||
mock_radio.firmware_version = "v2.2.2"
|
||||
mock_radio.firmware_build = "2025-01-15"
|
||||
|
||||
with (
|
||||
patch("app.keystore.get_public_key", return_value=public_key),
|
||||
patch("app.radio.radio_manager", mock_radio),
|
||||
patch.object(
|
||||
pub,
|
||||
"_fetch_device_info",
|
||||
new_callable=AsyncMock,
|
||||
return_value={"model": "T-Deck", "firmware_version": "v2.2.2 (Build: 2025-01-15)"},
|
||||
),
|
||||
patch.object(
|
||||
pub, "_fetch_stats", new_callable=AsyncMock, return_value={"battery_mv": 4200}
|
||||
),
|
||||
@@ -852,6 +850,82 @@ class TestLwtAndStatusPublish:
|
||||
assert payload["client_version"] == "RemoteTerm/2.4.0-abcdef"
|
||||
assert payload["stats"] == {"battery_mv": 4200}
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_publish_status_uses_fallback_fetch_when_device_info_not_loaded(self):
|
||||
"""When device_info_loaded is False, _fetch_device_info() should be called as fallback."""
|
||||
pub = CommunityMqttPublisher()
|
||||
private_key, public_key = _make_test_keys()
|
||||
settings = SimpleNamespace(community_mqtt_enabled=True, community_mqtt_iata="LAX")
|
||||
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = MagicMock()
|
||||
mock_radio.meshcore.self_info = {"name": "OldNode"}
|
||||
mock_radio.device_info_loaded = False
|
||||
|
||||
with (
|
||||
patch("app.keystore.get_public_key", return_value=public_key),
|
||||
patch("app.radio.radio_manager", mock_radio),
|
||||
patch.object(
|
||||
pub,
|
||||
"_fetch_device_info",
|
||||
new_callable=AsyncMock,
|
||||
return_value={"model": "LegacyBoard", "firmware_version": "v2"},
|
||||
) as mock_fetch,
|
||||
patch.object(pub, "_fetch_stats", new_callable=AsyncMock, return_value=None),
|
||||
patch("app.fanout.community_mqtt._build_radio_info", return_value="0,0,0,0"),
|
||||
patch("app.fanout.community_mqtt._get_client_version", return_value="RemoteTerm/0-x"),
|
||||
patch.object(pub, "publish", new_callable=AsyncMock) as mock_publish,
|
||||
):
|
||||
await pub._publish_status(settings)
|
||||
|
||||
mock_fetch.assert_awaited_once()
|
||||
payload = mock_publish.call_args[0][1]
|
||||
assert payload["model"] == "LegacyBoard"
|
||||
assert payload["firmware_version"] == "v2"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_publish_status_reflects_updated_firmware_version_after_reconnect(self):
|
||||
"""After firmware update + radio reconnect, the published firmware_version must be fresh.
|
||||
|
||||
This is a regression test for the stale-cache bug: previously _cached_device_info
|
||||
was never cleared between reconnects, so a radio firmware update was invisible to
|
||||
the Community MQTT status payload until the fanout module itself restarted.
|
||||
"""
|
||||
pub = CommunityMqttPublisher()
|
||||
private_key, public_key = _make_test_keys()
|
||||
settings = SimpleNamespace(community_mqtt_enabled=True, community_mqtt_iata="LAX")
|
||||
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = MagicMock()
|
||||
mock_radio.meshcore.self_info = {"name": "MyNode"}
|
||||
mock_radio.device_info_loaded = True
|
||||
mock_radio.device_model = "T-Deck"
|
||||
mock_radio.firmware_version = "1.14.1"
|
||||
mock_radio.firmware_build = ""
|
||||
|
||||
async def _publish_once(radio_mock):
|
||||
with (
|
||||
patch("app.keystore.get_public_key", return_value=public_key),
|
||||
patch("app.radio.radio_manager", radio_mock),
|
||||
patch.object(pub, "_fetch_stats", new_callable=AsyncMock, return_value=None),
|
||||
patch("app.fanout.community_mqtt._build_radio_info", return_value="0,0,0,0"),
|
||||
patch("app.fanout.community_mqtt._get_client_version", return_value="RT/0-x"),
|
||||
patch.object(pub, "publish", new_callable=AsyncMock) as mock_pub,
|
||||
):
|
||||
await pub._publish_status(settings)
|
||||
return mock_pub.call_args[0][1]
|
||||
|
||||
first_payload = await _publish_once(mock_radio)
|
||||
assert first_payload["firmware_version"] == "1.14.1"
|
||||
|
||||
# Simulate firmware update: radio reboots, radio_lifecycle refreshes the manager fields
|
||||
mock_radio.firmware_version = "1.15.0"
|
||||
|
||||
second_payload = await _publish_once(mock_radio)
|
||||
assert second_payload["firmware_version"] == "1.15.0", (
|
||||
"Expected updated firmware version after reconnect; stale cache bug would return v1.14.1"
|
||||
)
|
||||
|
||||
def test_lwt_and_online_share_same_topic(self):
|
||||
"""LWT and on-connect status should use the same topic path."""
|
||||
pub = CommunityMqttPublisher()
|
||||
@@ -896,6 +970,7 @@ class TestLwtAndStatusPublish:
|
||||
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = None
|
||||
mock_radio.device_info_loaded = False
|
||||
|
||||
with (
|
||||
patch("app.keystore.get_public_key", return_value=public_key),
|
||||
@@ -1252,18 +1327,16 @@ class TestPublishStatus:
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = MagicMock()
|
||||
mock_radio.meshcore.self_info = {"name": "TestNode"}
|
||||
mock_radio.device_info_loaded = True
|
||||
mock_radio.device_model = "T-Deck"
|
||||
mock_radio.firmware_version = "v2.2.2"
|
||||
mock_radio.firmware_build = "2025-01-15"
|
||||
|
||||
stats = {"battery_mv": 4200, "uptime_secs": 3600, "noise_floor": -120}
|
||||
|
||||
with (
|
||||
patch("app.keystore.get_public_key", return_value=public_key),
|
||||
patch("app.radio.radio_manager", mock_radio),
|
||||
patch.object(
|
||||
pub,
|
||||
"_fetch_device_info",
|
||||
new_callable=AsyncMock,
|
||||
return_value={"model": "T-Deck", "firmware_version": "v2.2.2 (Build: 2025-01-15)"},
|
||||
),
|
||||
patch.object(pub, "_fetch_stats", new_callable=AsyncMock, return_value=stats),
|
||||
patch("app.fanout.community_mqtt._build_radio_info", return_value="915.0,250.0,10,8"),
|
||||
patch(
|
||||
@@ -1294,6 +1367,7 @@ class TestPublishStatus:
|
||||
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = None
|
||||
mock_radio.device_info_loaded = False
|
||||
|
||||
with (
|
||||
patch("app.keystore.get_public_key", return_value=public_key),
|
||||
@@ -1326,6 +1400,7 @@ class TestPublishStatus:
|
||||
|
||||
mock_radio = MagicMock()
|
||||
mock_radio.meshcore = None
|
||||
mock_radio.device_info_loaded = False
|
||||
|
||||
before = time.monotonic()
|
||||
|
||||
|
||||
@@ -317,7 +317,7 @@ class TestPathDiscovery:
|
||||
mock_broadcast.assert_called_once_with("contact", updated.model_dump())
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_504_when_no_response_is_heard(self, test_db, client):
|
||||
async def test_returns_408_when_no_response_is_heard(self, test_db, client):
|
||||
await _insert_contact(KEY_A, "Alice", type=1)
|
||||
mc = MagicMock()
|
||||
mc.commands = MagicMock()
|
||||
@@ -332,7 +332,7 @@ class TestPathDiscovery:
|
||||
mock_rm.radio_operation = _noop_radio_operation(mc)
|
||||
response = await client.post(f"/api/contacts/{KEY_A}/path-discovery")
|
||||
|
||||
assert response.status_code == 504
|
||||
assert response.status_code == 408
|
||||
assert "No path discovery response heard" in response.json()["detail"]
|
||||
|
||||
|
||||
|
||||
+264
-9
@@ -1049,7 +1049,8 @@ class TestAppriseFormatBody:
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"}, include_path=False
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"},
|
||||
body_format_dm="**DM:** {sender_name}: {text}",
|
||||
)
|
||||
assert body == "**DM:** Alice: hi"
|
||||
|
||||
@@ -1058,7 +1059,7 @@ class TestAppriseFormatBody:
|
||||
|
||||
body = _format_body(
|
||||
{"type": "CHAN", "text": "hi", "sender_name": "Bob", "channel_name": "#general"},
|
||||
include_path=False,
|
||||
body_format_channel="**{channel_name}:** {sender_name}: {text}",
|
||||
)
|
||||
assert body == "**#general:** Bob: hi"
|
||||
|
||||
@@ -1072,7 +1073,7 @@ class TestAppriseFormatBody:
|
||||
"sender_name": "Bob",
|
||||
"channel_name": "#general",
|
||||
},
|
||||
include_path=False,
|
||||
body_format_channel="**{channel_name}:** {sender_name}: {text}",
|
||||
)
|
||||
assert body == "**#general:** Bob: hi"
|
||||
|
||||
@@ -1086,7 +1087,7 @@ class TestAppriseFormatBody:
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": "2027"}],
|
||||
},
|
||||
include_path=True,
|
||||
body_format_dm="**DM:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
)
|
||||
assert "**via:**" in body
|
||||
assert "`20`" in body
|
||||
@@ -1097,7 +1098,7 @@ class TestAppriseFormatBody:
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"},
|
||||
include_path=True,
|
||||
body_format_dm="**DM:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
)
|
||||
assert "`direct`" in body
|
||||
|
||||
@@ -1112,7 +1113,7 @@ class TestAppriseFormatBody:
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": "aabbccdd", "path_len": 2}],
|
||||
},
|
||||
include_path=True,
|
||||
body_format_dm="**DM:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
)
|
||||
assert "**via:**" in body
|
||||
assert "`aabb`" in body
|
||||
@@ -1129,7 +1130,7 @@ class TestAppriseFormatBody:
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": "aabbccddeeff", "path_len": 2}],
|
||||
},
|
||||
include_path=True,
|
||||
body_format_dm="**DM:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
)
|
||||
assert "**via:**" in body
|
||||
assert "`aabbcc`" in body
|
||||
@@ -1147,7 +1148,7 @@ class TestAppriseFormatBody:
|
||||
"channel_name": "#general",
|
||||
"paths": [{"path": "aabbccdd", "path_len": 2}],
|
||||
},
|
||||
include_path=True,
|
||||
body_format_channel="**{channel_name}:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
)
|
||||
assert "**#general:**" in body
|
||||
assert "`aabb`" in body
|
||||
@@ -1164,12 +1165,118 @@ class TestAppriseFormatBody:
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": "aabb"}],
|
||||
},
|
||||
include_path=True,
|
||||
body_format_dm="**DM:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
)
|
||||
assert "**via:**" in body
|
||||
assert "`aa`" in body
|
||||
assert "`bb`" in body
|
||||
|
||||
def test_default_format_strings(self):
|
||||
"""Default format strings produce expected output."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{
|
||||
"type": "PRIV",
|
||||
"text": "hi",
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": "2a3b"}],
|
||||
},
|
||||
)
|
||||
assert body == "**DM:** Alice: hi **via:** [`2a`, `3b`]"
|
||||
|
||||
def test_custom_format_with_rssi(self):
|
||||
"""Custom format string can include rssi/snr."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{
|
||||
"type": "PRIV",
|
||||
"text": "hi",
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": "2a", "rssi": -95, "snr": 6.5}],
|
||||
},
|
||||
body_format_dm="From {sender_name}: {text} (rssi: {rssi}, snr: {snr})",
|
||||
)
|
||||
assert body == "From Alice: hi (rssi: -95, snr: 6.5)"
|
||||
|
||||
def test_unknown_placeholder_left_as_is(self):
|
||||
"""Unknown {placeholders} pass through unchanged."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"},
|
||||
body_format_dm="{sender_name}: {text} {unknown_var}",
|
||||
)
|
||||
assert body == "Alice: hi {unknown_var}"
|
||||
|
||||
def test_none_fields_render_empty(self):
|
||||
"""None optional fields render as empty string, not 'None'."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"},
|
||||
body_format_dm="{sender_name}: {text} rssi={rssi}",
|
||||
)
|
||||
assert body == "Alice: hi rssi="
|
||||
assert "None" not in body
|
||||
|
||||
def test_hops_direct_when_no_paths(self):
|
||||
"""hops is 'direct' when no path data exists."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "CHAN", "text": "hi", "sender_name": "Bob", "channel_name": "#gen"},
|
||||
body_format_channel="{channel_name} {hops}",
|
||||
)
|
||||
assert body == "#gen direct"
|
||||
|
||||
def test_hops_direct_when_empty_path(self):
|
||||
"""hops is 'direct' when path string is empty."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{
|
||||
"type": "PRIV",
|
||||
"text": "hi",
|
||||
"sender_name": "Alice",
|
||||
"paths": [{"path": ""}],
|
||||
},
|
||||
body_format_dm="{hops}",
|
||||
)
|
||||
assert body == "direct"
|
||||
|
||||
def test_no_re_expansion_of_substituted_values(self):
|
||||
"""Placeholders in message text must not be expanded by later passes."""
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hello {sender_name}", "sender_name": "Alice"},
|
||||
body_format_dm="{sender_name}: {text}",
|
||||
)
|
||||
assert body == "Alice: hello {sender_name}"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_format_string_uses_default(self):
|
||||
"""Empty format strings in config should produce default output, not blank."""
|
||||
from unittest.mock import patch as _patch
|
||||
|
||||
from app.fanout.apprise_mod import AppriseModule
|
||||
|
||||
mod = AppriseModule(
|
||||
"test",
|
||||
{"urls": "json://localhost", "body_format_dm": "", "body_format_channel": " "},
|
||||
)
|
||||
with _patch("app.fanout.apprise_mod._send_sync", return_value=True) as mock_send:
|
||||
await mod.on_message(
|
||||
{"type": "PRIV", "text": "hi", "outgoing": False, "sender_name": "Alice"}
|
||||
)
|
||||
mock_send.assert_called_once()
|
||||
body = mock_send.call_args[0][1]
|
||||
assert "Alice" in body
|
||||
assert "hi" in body
|
||||
assert body != ""
|
||||
|
||||
|
||||
class TestAppriseNormalizeDiscordUrl:
|
||||
def test_discord_scheme(self):
|
||||
@@ -1233,6 +1340,26 @@ class TestAppriseValidation:
|
||||
|
||||
_validate_apprise_config({"urls": "discord://123/abc"})
|
||||
|
||||
def test_validate_apprise_config_accepts_format_strings(self):
|
||||
from app.routers.fanout import _validate_apprise_config
|
||||
|
||||
_validate_apprise_config(
|
||||
{
|
||||
"urls": "discord://123/abc",
|
||||
"body_format_dm": "DM from {sender_name}: {text}",
|
||||
"body_format_channel": "{channel_name}: {text}",
|
||||
}
|
||||
)
|
||||
|
||||
def test_validate_apprise_config_rejects_non_string_format(self):
|
||||
from fastapi import HTTPException
|
||||
|
||||
from app.routers.fanout import _validate_apprise_config
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
_validate_apprise_config({"urls": "discord://123/abc", "body_format_dm": 123})
|
||||
assert exc_info.value.status_code == 400
|
||||
|
||||
def test_enforce_scope_apprise_strips_raw_packets(self):
|
||||
from app.routers.fanout import _enforce_scope
|
||||
|
||||
@@ -1240,6 +1367,134 @@ class TestAppriseValidation:
|
||||
assert scope["raw_packets"] == "none"
|
||||
assert scope["messages"] == "all"
|
||||
|
||||
def test_validate_apprise_config_accepts_markdown_format_bool(self):
|
||||
from app.routers.fanout import _validate_apprise_config
|
||||
|
||||
_validate_apprise_config({"urls": "discord://123/abc", "markdown_format": False})
|
||||
|
||||
def test_validate_apprise_config_normalizes_markdown_format(self):
|
||||
from app.routers.fanout import _validate_apprise_config
|
||||
|
||||
config: dict = {"urls": "discord://123/abc", "markdown_format": 0}
|
||||
_validate_apprise_config(config)
|
||||
assert config["markdown_format"] is False
|
||||
|
||||
def test_validate_apprise_config_works_without_markdown_format(self):
|
||||
from app.routers.fanout import _validate_apprise_config
|
||||
|
||||
_validate_apprise_config({"urls": "discord://123/abc"})
|
||||
|
||||
|
||||
class TestAppriseMarkdownFormat:
|
||||
def test_format_body_markdown_true_uses_markdown_fallback(self):
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"},
|
||||
markdown=True,
|
||||
)
|
||||
assert "**DM:**" in body
|
||||
|
||||
def test_format_body_markdown_false_uses_plain_fallback(self):
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "PRIV", "text": "hi", "sender_name": "Alice"},
|
||||
markdown=False,
|
||||
)
|
||||
assert "**" not in body
|
||||
assert "DM:" in body
|
||||
assert "Alice" in body
|
||||
|
||||
def test_format_body_markdown_false_channel(self):
|
||||
from app.fanout.apprise_mod import _format_body
|
||||
|
||||
body = _format_body(
|
||||
{"type": "CHAN", "text": "hi", "sender_name": "Bob", "channel_name": "#gen"},
|
||||
markdown=False,
|
||||
)
|
||||
assert "**" not in body
|
||||
assert "#gen:" in body
|
||||
|
||||
def test_send_sync_passes_markdown_body_format(self):
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
with patch("app.fanout.apprise_mod.apprise_lib", create=True) as mock_lib:
|
||||
mock_notifier = MagicMock()
|
||||
mock_notifier.notify.return_value = True
|
||||
mock_lib.Apprise.return_value = mock_notifier
|
||||
|
||||
with patch.dict("sys.modules", {"apprise": mock_lib}):
|
||||
from app.fanout.apprise_mod import _send_sync
|
||||
|
||||
_send_sync("json://localhost", "test", preserve_identity=False, markdown=True)
|
||||
call_kwargs = mock_notifier.notify.call_args
|
||||
assert call_kwargs.kwargs.get("body_format") or call_kwargs[1].get("body_format")
|
||||
|
||||
def test_send_sync_passes_text_body_format_when_markdown_false(self):
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
with patch("app.fanout.apprise_mod.apprise_lib", create=True) as mock_lib:
|
||||
mock_notifier = MagicMock()
|
||||
mock_notifier.notify.return_value = True
|
||||
mock_lib.Apprise.return_value = mock_notifier
|
||||
|
||||
with patch.dict("sys.modules", {"apprise": mock_lib}):
|
||||
from app.fanout.apprise_mod import _send_sync
|
||||
|
||||
_send_sync("json://localhost", "test", preserve_identity=False, markdown=False)
|
||||
call_kwargs = mock_notifier.notify.call_args
|
||||
assert call_kwargs.kwargs.get("body_format") or call_kwargs[1].get("body_format")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_message_reads_markdown_format_config(self):
|
||||
from unittest.mock import patch as _patch
|
||||
|
||||
from app.fanout.apprise_mod import AppriseModule
|
||||
|
||||
mod = AppriseModule("test", {"urls": "json://localhost", "markdown_format": False})
|
||||
with _patch("app.fanout.apprise_mod._send_sync", return_value=True) as mock_send:
|
||||
await mod.on_message(
|
||||
{"type": "PRIV", "text": "hello", "outgoing": False, "sender_name": "S_Borkin"}
|
||||
)
|
||||
mock_send.assert_called_once()
|
||||
assert mock_send.call_args.kwargs.get("markdown") is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_message_defaults_markdown_true(self):
|
||||
from unittest.mock import patch as _patch
|
||||
|
||||
from app.fanout.apprise_mod import AppriseModule
|
||||
|
||||
mod = AppriseModule("test", {"urls": "json://localhost"})
|
||||
with _patch("app.fanout.apprise_mod._send_sync", return_value=True) as mock_send:
|
||||
await mod.on_message(
|
||||
{"type": "PRIV", "text": "hello", "outgoing": False, "sender_name": "Alice"}
|
||||
)
|
||||
mock_send.assert_called_once()
|
||||
assert mock_send.call_args.kwargs.get("markdown") is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_message_markdown_false_uses_plain_default_format(self):
|
||||
from unittest.mock import patch as _patch
|
||||
|
||||
from app.fanout.apprise_mod import AppriseModule
|
||||
|
||||
mod = AppriseModule("test", {"urls": "json://localhost", "markdown_format": False})
|
||||
with _patch("app.fanout.apprise_mod._send_sync", return_value=True) as mock_send:
|
||||
await mod.on_message(
|
||||
{
|
||||
"type": "CHAN",
|
||||
"text": "hi",
|
||||
"outgoing": False,
|
||||
"sender_name": "Bob",
|
||||
"channel_name": "#general",
|
||||
}
|
||||
)
|
||||
body = mock_send.call_args[0][1]
|
||||
assert "**" not in body
|
||||
assert "#general:" in body
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Comprehensive scope/filter selection logic tests
|
||||
|
||||
@@ -1171,7 +1171,8 @@ class TestFanoutAppriseIntegration:
|
||||
config={
|
||||
"urls": f"json://127.0.0.1:{apprise_capture_server.port}",
|
||||
"preserve_identity": True,
|
||||
"include_path": False,
|
||||
"body_format_dm": "**DM:** {sender_name}: {text}",
|
||||
"body_format_channel": "**{channel_name}:** {sender_name}: {text}",
|
||||
},
|
||||
scope={"messages": "all", "raw_packets": "none"},
|
||||
enabled=True,
|
||||
@@ -1212,7 +1213,8 @@ class TestFanoutAppriseIntegration:
|
||||
name="Channel Apprise",
|
||||
config={
|
||||
"urls": f"json://127.0.0.1:{apprise_capture_server.port}",
|
||||
"include_path": False,
|
||||
"body_format_dm": "**DM:** {sender_name}: {text}",
|
||||
"body_format_channel": "**{channel_name}:** {sender_name}: {text}",
|
||||
},
|
||||
scope={"messages": "all", "raw_packets": "none"},
|
||||
enabled=True,
|
||||
@@ -1541,13 +1543,14 @@ class TestFanoutAppriseIntegration:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_apprise_includes_routing_path(self, apprise_capture_server, integration_db):
|
||||
"""Apprise with include_path=True shows routing hops in the body."""
|
||||
"""Apprise with hops in format string shows routing hops in the body."""
|
||||
cfg = await FanoutConfigRepository.create(
|
||||
config_type="apprise",
|
||||
name="Path Apprise",
|
||||
config={
|
||||
"urls": f"json://127.0.0.1:{apprise_capture_server.port}",
|
||||
"include_path": True,
|
||||
"body_format_dm": "**DM:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
"body_format_channel": "**{channel_name}:** {sender_name}: {text} **via:** [{hops_backticked}]",
|
||||
},
|
||||
scope={"messages": "all", "raw_packets": "none"},
|
||||
enabled=True,
|
||||
@@ -1577,6 +1580,46 @@ class TestFanoutAppriseIntegration:
|
||||
assert "Eve" in body_text
|
||||
assert "routed msg" in body_text
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_apprise_markdown_false_delivers_plain_text(
|
||||
self, apprise_capture_server, integration_db
|
||||
):
|
||||
"""Apprise with markdown_format=False delivers without markdown formatting."""
|
||||
cfg = await FanoutConfigRepository.create(
|
||||
config_type="apprise",
|
||||
name="Plain Apprise",
|
||||
config={
|
||||
"urls": f"json://127.0.0.1:{apprise_capture_server.port}",
|
||||
"markdown_format": False,
|
||||
},
|
||||
scope={"messages": "all", "raw_packets": "none"},
|
||||
enabled=True,
|
||||
)
|
||||
|
||||
manager = FanoutManager()
|
||||
try:
|
||||
await manager.load_from_db()
|
||||
assert cfg["id"] in manager._modules
|
||||
|
||||
await manager.broadcast_message(
|
||||
{
|
||||
"type": "PRIV",
|
||||
"conversation_key": "pk1",
|
||||
"text": "hello",
|
||||
"sender_name": "S_Borkin",
|
||||
}
|
||||
)
|
||||
|
||||
results = await apprise_capture_server.wait_for(1)
|
||||
finally:
|
||||
await manager.stop_all()
|
||||
|
||||
assert len(results) >= 1
|
||||
body_text = str(results[0])
|
||||
assert "S_Borkin" in body_text
|
||||
assert "hello" in body_text
|
||||
assert "**" not in body_text
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Bot lifecycle tests
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
# run ``run_migrations`` to completion assert ``get_version == LATEST`` and
|
||||
# ``applied == LATEST - starting_version`` so only this constant needs to
|
||||
# change, not every individual assertion.
|
||||
LATEST_SCHEMA_VERSION = 59
|
||||
LATEST_SCHEMA_VERSION = 61
|
||||
|
||||
+2
-2
@@ -342,8 +342,8 @@ class TestConnectionLoop:
|
||||
assert sleep_args[0] == _BACKOFF_MIN
|
||||
assert sleep_args[1] == _BACKOFF_MIN * 2
|
||||
assert sleep_args[2] == _BACKOFF_MIN * 4
|
||||
# Fourth should be capped at _backoff_max (5*8=40 > 30)
|
||||
assert sleep_args[3] == MqttPublisher._backoff_max
|
||||
# Fourth is still doubling (5*8=40), not yet at _backoff_max
|
||||
assert sleep_args[3] == _BACKOFF_MIN * 8
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_waits_for_settings_when_unconfigured(self):
|
||||
|
||||
+91
-1
@@ -7,6 +7,7 @@ import pytest
|
||||
|
||||
from app.fanout.mqtt_ha import (
|
||||
MqttHaModule,
|
||||
_assign_lpp_keys,
|
||||
_contact_tracker_discovery_config,
|
||||
_device_payload,
|
||||
_lpp_discovery_configs,
|
||||
@@ -124,7 +125,7 @@ class TestRadioDiscovery:
|
||||
class TestRepeaterDiscovery:
|
||||
def test_produces_sensor_per_field(self):
|
||||
configs = _repeater_discovery_configs("mc", "ccdd11223344", "Rep1", "aabb")
|
||||
assert len(configs) == 7 # matches _REPEATER_SENSORS length
|
||||
assert len(configs) == 8 # matches _REPEATER_SENSORS length
|
||||
|
||||
topics = [t for t, _ in configs]
|
||||
assert "homeassistant/sensor/meshcore_ccdd11223344/battery_voltage/config" in topics
|
||||
@@ -552,6 +553,45 @@ class TestLppSensorKey:
|
||||
assert _lpp_sensor_key("humidity", 0) == "lpp_humidity_ch0"
|
||||
|
||||
|
||||
class TestAssignLppKeys:
|
||||
def test_no_duplicates(self):
|
||||
sensors = [
|
||||
{"type_name": "temperature", "channel": 1, "value": 20},
|
||||
{"type_name": "humidity", "channel": 2, "value": 45},
|
||||
]
|
||||
result = _assign_lpp_keys(sensors)
|
||||
assert [(k, n) for _, k, n in result] == [
|
||||
("lpp_temperature_ch1", 1),
|
||||
("lpp_humidity_ch2", 1),
|
||||
]
|
||||
|
||||
def test_duplicate_type_and_channel(self):
|
||||
sensors = [
|
||||
{"type_name": "temperature", "channel": 1, "value": 20},
|
||||
{"type_name": "humidity", "channel": 2, "value": 45},
|
||||
{"type_name": "temperature", "channel": 1, "value": 53},
|
||||
]
|
||||
result = _assign_lpp_keys(sensors)
|
||||
assert [(k, n) for _, k, n in result] == [
|
||||
("lpp_temperature_ch1", 1),
|
||||
("lpp_humidity_ch2", 1),
|
||||
("lpp_temperature_ch1_2", 2),
|
||||
]
|
||||
|
||||
def test_triple_duplicate(self):
|
||||
sensors = [
|
||||
{"type_name": "voltage", "channel": 0, "value": 3.3},
|
||||
{"type_name": "voltage", "channel": 0, "value": 5.0},
|
||||
{"type_name": "voltage", "channel": 0, "value": 12.0},
|
||||
]
|
||||
result = _assign_lpp_keys(sensors)
|
||||
keys = [k for _, k, _ in result]
|
||||
assert keys == ["lpp_voltage_ch0", "lpp_voltage_ch0_2", "lpp_voltage_ch0_3"]
|
||||
|
||||
def test_empty_list(self):
|
||||
assert _assign_lpp_keys([]) == []
|
||||
|
||||
|
||||
class TestLppDiscoveryConfigs:
|
||||
def test_produces_config_per_sensor(self):
|
||||
nid = "ccdd11223344"
|
||||
@@ -583,6 +623,27 @@ class TestLppDiscoveryConfigs:
|
||||
assert cfg["suggested_display_precision"] == 1
|
||||
assert "lpp_temperature_ch1" in cfg["value_template"]
|
||||
|
||||
def test_duplicate_type_channel_gets_indexed_keys(self):
|
||||
nid = "ccdd11223344"
|
||||
device = _device_payload(nid, "Rep1", "Repeater")
|
||||
sensors = [
|
||||
{"channel": 1, "type_name": "temperature", "value": 20.0},
|
||||
{"channel": 2, "type_name": "humidity", "value": 45.0},
|
||||
{"channel": 1, "type_name": "temperature", "value": 53.0},
|
||||
]
|
||||
configs = _lpp_discovery_configs("mc", nid, device, sensors, f"mc/{nid}/telemetry")
|
||||
|
||||
assert len(configs) == 3
|
||||
topics = [t for t, _ in configs]
|
||||
assert f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config" in topics
|
||||
assert f"homeassistant/sensor/meshcore_{nid}/lpp_humidity_ch2/config" in topics
|
||||
assert f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1_2/config" in topics
|
||||
|
||||
# First temperature keeps base name, second gets #2 suffix
|
||||
names = {cfg["unique_id"]: cfg["name"] for _, cfg in configs}
|
||||
assert names[f"meshcore_{nid}_lpp_temperature_ch1"] == "Temperature (Ch 1)"
|
||||
assert names[f"meshcore_{nid}_lpp_temperature_ch1_2"] == "Temperature (Ch 1) #2"
|
||||
|
||||
def test_unknown_sensor_type_no_device_class(self):
|
||||
nid = "ccdd11223344"
|
||||
device = _device_payload(nid, "Rep1", "Repeater")
|
||||
@@ -712,6 +773,35 @@ class TestMqttHaTelemetryWithLpp:
|
||||
|
||||
mod._publish_discovery.assert_not_awaited()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_duplicate_lpp_sensors_not_overwritten(self):
|
||||
"""Two sensors with same (type_name, channel) get distinct keys."""
|
||||
key = "ccdd11223344"
|
||||
nid = _node_id(key)
|
||||
mod = MqttHaModule("test", _base_config(tracked_repeaters=[key]))
|
||||
mod._publisher = MagicMock()
|
||||
mod._publisher.connected = True
|
||||
mod._publisher.publish = AsyncMock()
|
||||
mod._discovery_topics = [
|
||||
f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1/config",
|
||||
f"homeassistant/sensor/meshcore_{nid}/lpp_temperature_ch1_2/config",
|
||||
]
|
||||
|
||||
await mod.on_telemetry(
|
||||
{
|
||||
"public_key": key,
|
||||
"battery_volts": 4.1,
|
||||
"lpp_sensors": [
|
||||
{"channel": 1, "type_name": "temperature", "value": 20.0},
|
||||
{"channel": 1, "type_name": "temperature", "value": 53.0},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
payload = mod._publisher.publish.call_args[0][1]
|
||||
assert payload["lpp_temperature_ch1"] == 20.0
|
||||
assert payload["lpp_temperature_ch1_2"] == 53.0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_telemetry_without_lpp_sensors(self):
|
||||
"""Existing behavior: no lpp_sensors key means no LPP fields in payload."""
|
||||
|
||||
@@ -95,6 +95,8 @@ class TestGetRawPacket:
|
||||
"sender": "Alice",
|
||||
"channel_key": channel_key,
|
||||
"contact_key": None,
|
||||
"sender_timestamp": 1700000000,
|
||||
"message": "Alice: hello",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -174,8 +174,8 @@ class TestRadioOperationYield:
|
||||
class TestRequireConnected:
|
||||
"""Test the require_connected() FastAPI dependency."""
|
||||
|
||||
def test_raises_503_when_setup_in_progress(self):
|
||||
"""HTTPException 503 is raised when radio is connected but setup is still in progress."""
|
||||
def test_raises_423_when_setup_in_progress(self):
|
||||
"""HTTPException 423 is raised when radio is connected but setup is still in progress."""
|
||||
from fastapi import HTTPException
|
||||
|
||||
from app.services.radio_runtime import radio_runtime
|
||||
@@ -188,11 +188,11 @@ class TestRequireConnected:
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
radio_runtime.require_connected()
|
||||
|
||||
assert exc_info.value.status_code == 503
|
||||
assert exc_info.value.status_code == 423
|
||||
assert "initializing" in exc_info.value.detail.lower()
|
||||
|
||||
def test_raises_503_when_not_connected(self):
|
||||
"""HTTPException 503 is raised when radio is not connected."""
|
||||
def test_raises_423_when_not_connected(self):
|
||||
"""HTTPException 423 is raised when radio is not connected."""
|
||||
from fastapi import HTTPException
|
||||
|
||||
from app.services.radio_runtime import radio_runtime
|
||||
@@ -205,7 +205,7 @@ class TestRequireConnected:
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
radio_runtime.require_connected()
|
||||
|
||||
assert exc_info.value.status_code == 503
|
||||
assert exc_info.value.status_code == 423
|
||||
|
||||
def test_returns_meshcore_when_connected_and_setup_complete(self):
|
||||
"""Returns meshcore instance when radio is connected and setup is complete."""
|
||||
|
||||
+44
-11
@@ -20,6 +20,7 @@ from app.routers.radio import (
|
||||
RadioSettings,
|
||||
disconnect_radio,
|
||||
discover_mesh,
|
||||
get_private_key,
|
||||
get_radio_config,
|
||||
reboot_radio,
|
||||
reconnect_radio,
|
||||
@@ -130,14 +131,14 @@ class TestGetRadioConfig:
|
||||
assert response.advert_location_source == "current"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_503_when_self_info_missing(self):
|
||||
async def test_returns_423_when_self_info_missing(self):
|
||||
mc = MagicMock()
|
||||
mc.self_info = None
|
||||
with patch("app.routers.radio.radio_manager.require_connected", return_value=mc):
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await get_radio_config()
|
||||
|
||||
assert exc.value.status_code == 503
|
||||
assert exc.value.status_code == 423
|
||||
|
||||
|
||||
class TestUpdateRadioConfig:
|
||||
@@ -277,12 +278,44 @@ class TestUpdateRadioConfig:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await update_radio_config(RadioConfigUpdate(path_hash_mode=1))
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "Failed to set path hash mode" in str(exc.value.detail)
|
||||
assert radio_manager.path_hash_mode == 0
|
||||
mc.commands.send_appstart.assert_not_awaited()
|
||||
|
||||
|
||||
class TestPrivateKeyExport:
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_403_when_export_disabled(self):
|
||||
with patch("app.config.settings") as mock_settings:
|
||||
mock_settings.enable_local_private_key_export = False
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await get_private_key()
|
||||
assert exc.value.status_code == 403
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_404_when_no_key_available(self):
|
||||
with (
|
||||
patch("app.config.settings") as mock_settings,
|
||||
patch("app.keystore.get_private_key", return_value=None),
|
||||
):
|
||||
mock_settings.enable_local_private_key_export = True
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await get_private_key()
|
||||
assert exc.value.status_code == 404
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_key_hex_when_enabled_and_available(self):
|
||||
key_bytes = bytes.fromhex("ab" * 64)
|
||||
with (
|
||||
patch("app.config.settings") as mock_settings,
|
||||
patch("app.keystore.get_private_key", return_value=key_bytes),
|
||||
):
|
||||
mock_settings.enable_local_private_key_export = True
|
||||
result = await get_private_key()
|
||||
assert result == {"private_key": "ab" * 64}
|
||||
|
||||
|
||||
class TestPrivateKeyImport:
|
||||
@pytest.mark.asyncio
|
||||
async def test_rejects_invalid_hex(self):
|
||||
@@ -306,7 +339,7 @@ class TestPrivateKeyImport:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await set_private_key(PrivateKeyUpdate(private_key="aa" * 64))
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
|
||||
|
||||
class TestDiscoverMesh:
|
||||
@@ -666,7 +699,7 @@ class TestTracePath:
|
||||
assert "not a repeater" in exc.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_504_when_no_trace_response_is_heard(self):
|
||||
async def test_returns_408_when_no_trace_response_is_heard(self):
|
||||
mc = _mock_meshcore_with_info()
|
||||
repeater = Contact(
|
||||
public_key="44" * 32,
|
||||
@@ -708,7 +741,7 @@ class TestTracePath:
|
||||
)
|
||||
)
|
||||
|
||||
assert exc.value.status_code == 504
|
||||
assert exc.value.status_code == 408
|
||||
assert "No trace response heard" in exc.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -817,7 +850,7 @@ class TestTracePath:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await discover_mesh(RadioDiscoveryRequest(target="sensors"))
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert exc.value.detail == "Failed to start mesh discovery"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -854,7 +887,7 @@ class TestTracePath:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await set_private_key(PrivateKeyUpdate(private_key="aa" * 64))
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "keystore" in exc.value.detail.lower()
|
||||
# Called twice: initial attempt + one retry
|
||||
assert mock_export.await_count == 2
|
||||
@@ -893,7 +926,7 @@ class TestAdvertise:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await send_advertisement()
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_defaults_to_flood_mode(self):
|
||||
@@ -1026,7 +1059,7 @@ class TestRebootAndReconnect:
|
||||
assert result["connected"] is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reconnect_raises_503_on_failure(self):
|
||||
async def test_reconnect_raises_423_on_failure(self):
|
||||
mock_rm = MagicMock()
|
||||
mock_rm.is_connected = False
|
||||
mock_rm.is_reconnecting = False
|
||||
@@ -1037,7 +1070,7 @@ class TestRebootAndReconnect:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await reconnect_radio()
|
||||
|
||||
assert exc.value.status_code == 503
|
||||
assert exc.value.status_code == 423
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_disconnect_pauses_connection_attempts_and_broadcasts_health(self):
|
||||
|
||||
@@ -57,12 +57,12 @@ def test_require_connected_preserves_http_semantics():
|
||||
)
|
||||
with pytest.raises(HTTPException, match="Radio is initializing") as exc:
|
||||
runtime.require_connected()
|
||||
assert exc.value.status_code == 503
|
||||
assert exc.value.status_code == 423
|
||||
|
||||
runtime = RadioRuntime(_Manager(meshcore=None, is_connected=False, is_setup_in_progress=False))
|
||||
with pytest.raises(HTTPException, match="Radio not connected") as exc:
|
||||
runtime.require_connected()
|
||||
assert exc.value.status_code == 503
|
||||
assert exc.value.status_code == 423
|
||||
|
||||
|
||||
def test_require_connected_returns_fresh_meshcore_after_connectivity_check():
|
||||
|
||||
@@ -2219,6 +2219,262 @@ class TestCollectRepeaterTelemetryLpp:
|
||||
assert "lpp_sensors" not in recorded_data
|
||||
|
||||
|
||||
class TestRunTelemetryCycleRoutedOnly:
|
||||
"""Verify that _run_telemetry_cycle(routed_only=True) skips flood repeaters."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_routed_only_skips_flood_contacts(self):
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from app.models import AppSettings, Contact
|
||||
from app.radio_sync import _run_telemetry_cycle
|
||||
|
||||
flood_key = "aa" * 32
|
||||
direct_key = "bb" * 32
|
||||
override_key = "cc" * 32
|
||||
|
||||
flood_contact = Contact(
|
||||
public_key=flood_key,
|
||||
name="Flood",
|
||||
type=2,
|
||||
direct_path=None,
|
||||
direct_path_len=-1,
|
||||
direct_path_hash_mode=-1,
|
||||
)
|
||||
direct_contact = Contact(
|
||||
public_key=direct_key,
|
||||
name="Direct",
|
||||
type=2,
|
||||
direct_path="aabb",
|
||||
direct_path_len=1,
|
||||
direct_path_hash_mode=1,
|
||||
)
|
||||
override_contact = Contact(
|
||||
public_key=override_key,
|
||||
name="Override",
|
||||
type=2,
|
||||
direct_path=None,
|
||||
direct_path_len=-1,
|
||||
direct_path_hash_mode=-1,
|
||||
route_override_path="ccdd",
|
||||
route_override_len=1,
|
||||
route_override_hash_mode=1,
|
||||
)
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=[flood_key, direct_key, override_key],
|
||||
)
|
||||
|
||||
contact_map = {
|
||||
flood_key: flood_contact,
|
||||
direct_key: direct_contact,
|
||||
override_key: override_contact,
|
||||
}
|
||||
collected_keys: list[str] = []
|
||||
|
||||
async def fake_get_by_key(key):
|
||||
return contact_map.get(key)
|
||||
|
||||
async def fake_collect(mc, contact):
|
||||
collected_keys.append(contact.public_key)
|
||||
return True
|
||||
|
||||
fake_radio_manager = MagicMock()
|
||||
fake_radio_manager.is_connected = True
|
||||
fake_radio_manager.radio_operation = MagicMock()
|
||||
|
||||
# Make radio_operation an async context manager that yields a MagicMock
|
||||
fake_mc = MagicMock()
|
||||
|
||||
class FakeRadioOp:
|
||||
async def __aenter__(self):
|
||||
return fake_mc
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
pass
|
||||
|
||||
fake_radio_manager.radio_operation.return_value = FakeRadioOp()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch(
|
||||
"app.radio_sync.ContactRepository.get_by_key",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=fake_get_by_key,
|
||||
),
|
||||
patch("app.radio_sync._collect_repeater_telemetry", new=fake_collect),
|
||||
patch("app.radio_sync.radio_manager", fake_radio_manager),
|
||||
):
|
||||
await _run_telemetry_cycle(routed_only=True)
|
||||
|
||||
# Flood contact should be skipped; direct and override should be collected
|
||||
assert flood_key not in collected_keys
|
||||
assert direct_key in collected_keys
|
||||
assert override_key in collected_keys
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_routed_only_skips_forced_flood_override(self):
|
||||
"""A contact with a forced-flood override (path_len=-1) should be
|
||||
treated as flood even though effective_route_source is 'override'."""
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from app.models import AppSettings, Contact
|
||||
from app.radio_sync import _run_telemetry_cycle
|
||||
|
||||
forced_flood_key = "aa" * 32
|
||||
direct_key = "bb" * 32
|
||||
|
||||
forced_flood_contact = Contact(
|
||||
public_key=forced_flood_key,
|
||||
name="ForcedFlood",
|
||||
type=2,
|
||||
direct_path=None,
|
||||
direct_path_len=-1,
|
||||
direct_path_hash_mode=-1,
|
||||
route_override_path="",
|
||||
route_override_len=-1,
|
||||
route_override_hash_mode=-1,
|
||||
)
|
||||
direct_contact = Contact(
|
||||
public_key=direct_key,
|
||||
name="Direct",
|
||||
type=2,
|
||||
direct_path="aabb",
|
||||
direct_path_len=1,
|
||||
direct_path_hash_mode=1,
|
||||
)
|
||||
|
||||
# Verify the forced-flood contact reports "override" source
|
||||
assert forced_flood_contact.effective_route_source == "override"
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=[forced_flood_key, direct_key],
|
||||
)
|
||||
|
||||
contact_map = {forced_flood_key: forced_flood_contact, direct_key: direct_contact}
|
||||
collected_keys: list[str] = []
|
||||
|
||||
async def fake_get_by_key(key):
|
||||
return contact_map.get(key)
|
||||
|
||||
async def fake_collect(mc, contact):
|
||||
collected_keys.append(contact.public_key)
|
||||
return True
|
||||
|
||||
fake_radio_manager = MagicMock()
|
||||
fake_radio_manager.is_connected = True
|
||||
|
||||
fake_mc = MagicMock()
|
||||
|
||||
class FakeRadioOp:
|
||||
async def __aenter__(self):
|
||||
return fake_mc
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
pass
|
||||
|
||||
fake_radio_manager.radio_operation.return_value = FakeRadioOp()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch(
|
||||
"app.radio_sync.ContactRepository.get_by_key",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=fake_get_by_key,
|
||||
),
|
||||
patch("app.radio_sync._collect_repeater_telemetry", new=fake_collect),
|
||||
patch("app.radio_sync.radio_manager", fake_radio_manager),
|
||||
):
|
||||
await _run_telemetry_cycle(routed_only=True)
|
||||
|
||||
# Forced-flood override should be excluded; direct should be collected
|
||||
assert forced_flood_key not in collected_keys
|
||||
assert direct_key in collected_keys
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_full_cycle_includes_all_contacts(self):
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from app.models import AppSettings, Contact
|
||||
from app.radio_sync import _run_telemetry_cycle
|
||||
|
||||
flood_key = "aa" * 32
|
||||
direct_key = "bb" * 32
|
||||
|
||||
flood_contact = Contact(
|
||||
public_key=flood_key,
|
||||
name="Flood",
|
||||
type=2,
|
||||
direct_path=None,
|
||||
direct_path_len=-1,
|
||||
direct_path_hash_mode=-1,
|
||||
)
|
||||
direct_contact = Contact(
|
||||
public_key=direct_key,
|
||||
name="Direct",
|
||||
type=2,
|
||||
direct_path="aabb",
|
||||
direct_path_len=1,
|
||||
direct_path_hash_mode=1,
|
||||
)
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=[flood_key, direct_key],
|
||||
)
|
||||
|
||||
contact_map = {flood_key: flood_contact, direct_key: direct_contact}
|
||||
collected_keys: list[str] = []
|
||||
|
||||
async def fake_get_by_key(key):
|
||||
return contact_map.get(key)
|
||||
|
||||
async def fake_collect(mc, contact):
|
||||
collected_keys.append(contact.public_key)
|
||||
return True
|
||||
|
||||
fake_radio_manager = MagicMock()
|
||||
fake_radio_manager.is_connected = True
|
||||
|
||||
fake_mc = MagicMock()
|
||||
|
||||
class FakeRadioOp:
|
||||
async def __aenter__(self):
|
||||
return fake_mc
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
pass
|
||||
|
||||
fake_radio_manager.radio_operation.return_value = FakeRadioOp()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch(
|
||||
"app.radio_sync.ContactRepository.get_by_key",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=fake_get_by_key,
|
||||
),
|
||||
patch("app.radio_sync._collect_repeater_telemetry", new=fake_collect),
|
||||
patch("app.radio_sync.radio_manager", fake_radio_manager),
|
||||
):
|
||||
await _run_telemetry_cycle(routed_only=False)
|
||||
|
||||
# Full cycle collects both
|
||||
assert flood_key in collected_keys
|
||||
assert direct_key in collected_keys
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _telemetry_collect_loop — UTC modulo scheduler
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -2518,6 +2774,113 @@ class TestTelemetryCollectSchedulerDecision:
|
||||
)
|
||||
|
||||
|
||||
class TestRoutedHourlySchedulerDecision:
|
||||
"""Verify the routed_hourly feature in _maybe_run_scheduled_cycle."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_routed_hourly_fires_on_non_modulo_hour(self):
|
||||
"""At 09:00 UTC with 8h interval and routed_hourly=True, the scheduler
|
||||
should call _run_telemetry_cycle(routed_only=True)."""
|
||||
import datetime as real_datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32],
|
||||
telemetry_interval_hours=8,
|
||||
telemetry_routed_hourly=True,
|
||||
)
|
||||
calls = []
|
||||
|
||||
async def fake_cycle(*, routed_only=False):
|
||||
calls.append({"routed_only": routed_only})
|
||||
|
||||
now = real_datetime.datetime(2026, 4, 16, 9, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
):
|
||||
await radio_sync._maybe_run_scheduled_cycle(now)
|
||||
|
||||
assert len(calls) == 1
|
||||
assert calls[0]["routed_only"] is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_routed_hourly_disabled_skips_non_modulo_hour(self):
|
||||
"""At 09:00 UTC with 8h interval and routed_hourly=False, nothing runs."""
|
||||
import datetime as real_datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32],
|
||||
telemetry_interval_hours=8,
|
||||
telemetry_routed_hourly=False,
|
||||
)
|
||||
calls = []
|
||||
|
||||
async def fake_cycle(*, routed_only=False):
|
||||
calls.append({"routed_only": routed_only})
|
||||
|
||||
now = real_datetime.datetime(2026, 4, 16, 9, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
):
|
||||
await radio_sync._maybe_run_scheduled_cycle(now)
|
||||
|
||||
assert len(calls) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modulo_hour_runs_full_cycle_even_with_routed_hourly(self):
|
||||
"""At 16:00 UTC with 8h interval, a normal full cycle runs regardless
|
||||
of whether routed_hourly is enabled — it covers all repeaters."""
|
||||
import datetime as real_datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app import radio_sync
|
||||
from app.models import AppSettings
|
||||
|
||||
settings = AppSettings(
|
||||
tracked_telemetry_repeaters=["aa" * 32],
|
||||
telemetry_interval_hours=8,
|
||||
telemetry_routed_hourly=True,
|
||||
)
|
||||
calls = []
|
||||
|
||||
async def fake_cycle(*, routed_only=False):
|
||||
calls.append({"routed_only": routed_only})
|
||||
|
||||
now = real_datetime.datetime(2026, 4, 16, 16, 0, 0, tzinfo=real_datetime.UTC)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.AppSettingsRepository.get",
|
||||
new_callable=AsyncMock,
|
||||
return_value=settings,
|
||||
),
|
||||
patch("app.radio_sync._run_telemetry_cycle", new=fake_cycle),
|
||||
):
|
||||
await radio_sync._maybe_run_scheduled_cycle(now)
|
||||
|
||||
assert len(calls) == 1
|
||||
assert calls[0]["routed_only"] is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# get_contacts_selected_for_radio_sync — DM-active prioritization
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@@ -302,7 +302,7 @@ class TestRepeaterCommandRoute:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await send_repeater_command(KEY_A, CommandRequest(command="ver"))
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
mc.start_auto_message_fetching.assert_awaited_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -502,7 +502,7 @@ class TestTraceRoute:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await request_trace(KEY_A)
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
mc.commands.send_trace.assert_awaited_once_with(
|
||||
path=KEY_A[:8],
|
||||
tag=1234,
|
||||
@@ -510,7 +510,7 @@ class TestTraceRoute:
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_wait_timeout_returns_504(self, test_db):
|
||||
async def test_wait_timeout_returns_408(self, test_db):
|
||||
mc = _mock_mc()
|
||||
await _insert_contact(KEY_A, name="Client", contact_type=1)
|
||||
mc.commands.send_trace = AsyncMock(return_value=_radio_result(EventType.OK))
|
||||
@@ -524,7 +524,7 @@ class TestTraceRoute:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await request_trace(KEY_A)
|
||||
|
||||
assert exc.value.status_code == 504
|
||||
assert exc.value.status_code == 408
|
||||
mc.commands.send_trace.assert_awaited_once_with(
|
||||
path=KEY_A[:8],
|
||||
tag=1234,
|
||||
@@ -722,6 +722,7 @@ class TestRepeaterStatus:
|
||||
"flood_dups": 10,
|
||||
"direct_dups": 5,
|
||||
"full_evts": 0,
|
||||
"recv_errors": 42,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -741,9 +742,10 @@ class TestRepeaterStatus:
|
||||
assert response.uptime_seconds == 86400
|
||||
assert response.sent_flood == 100
|
||||
assert response.recv_direct == 700
|
||||
assert response.recv_errors == 42
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_504_on_timeout(self, test_db):
|
||||
async def test_408_on_timeout(self, test_db):
|
||||
mc = _mock_mc()
|
||||
await _insert_contact(KEY_A, name="Repeater", contact_type=2)
|
||||
mc.commands.req_status_sync = AsyncMock(return_value=None)
|
||||
@@ -754,7 +756,7 @@ class TestRepeaterStatus:
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await repeater_status(KEY_A)
|
||||
assert exc.value.status_code == 504
|
||||
assert exc.value.status_code == 408
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_400_not_repeater(self, test_db):
|
||||
@@ -817,7 +819,7 @@ class TestRepeaterLppTelemetry:
|
||||
assert response.sensors == []
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_504_on_timeout(self, test_db):
|
||||
async def test_408_on_timeout(self, test_db):
|
||||
mc = _mock_mc()
|
||||
await _insert_contact(KEY_A, name="Repeater", contact_type=2)
|
||||
mc.commands.req_telemetry_sync = AsyncMock(return_value=None)
|
||||
@@ -828,7 +830,7 @@ class TestRepeaterLppTelemetry:
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await repeater_lpp_telemetry(KEY_A)
|
||||
assert exc.value.status_code == 504
|
||||
assert exc.value.status_code == 408
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_400_not_repeater(self, test_db):
|
||||
@@ -1232,7 +1234,7 @@ class TestBatchCliFetch:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await _batch_cli_fetch(contact, "test_op", [("ver", "firmware_version")])
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "Failed to add contact to radio" in exc.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -1305,7 +1307,7 @@ class TestRepeaterAddContactError:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await repeater_status(KEY_A)
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "Failed to add contact to radio" in exc.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -1323,7 +1325,7 @@ class TestRepeaterAddContactError:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await repeater_lpp_telemetry(KEY_A)
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "Failed to add contact to radio" in exc.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -1341,7 +1343,7 @@ class TestRepeaterAddContactError:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await repeater_neighbors(KEY_A)
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "Failed to add contact to radio" in exc.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -1359,5 +1361,5 @@ class TestRepeaterAddContactError:
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
await repeater_acl(KEY_A)
|
||||
|
||||
assert exc.value.status_code == 500
|
||||
assert exc.value.status_code == 422
|
||||
assert "Failed to add contact to radio" in exc.value.detail
|
||||
|
||||
@@ -31,6 +31,7 @@ SAMPLE_STATUS = {
|
||||
"flood_dups": 5,
|
||||
"direct_dups": 2,
|
||||
"full_events": 0,
|
||||
"recv_errors": None,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -135,6 +135,7 @@ class TestRoomStatus:
|
||||
"flood_dups": 2,
|
||||
"direct_dups": 1,
|
||||
"full_evts": 0,
|
||||
"recv_errors": 7,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -147,6 +148,7 @@ class TestRoomStatus:
|
||||
assert response.battery_volts == 4.025
|
||||
assert response.packets_received == 80
|
||||
assert response.recv_direct == 73
|
||||
assert response.recv_errors == 7
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_acl_maps_entries(self, test_db):
|
||||
|
||||
+10
-10
@@ -646,7 +646,7 @@ class TestOutgoingChannelBroadcast:
|
||||
request = SendChannelMessageRequest(channel_key=chan_key, text="hello")
|
||||
await send_channel_message(request)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
assert exc_info.value.status_code == 422
|
||||
assert "regional override" in exc_info.value.detail.lower()
|
||||
mc.commands.set_channel.assert_not_awaited()
|
||||
mc.commands.send_chan_msg.assert_not_awaited()
|
||||
@@ -790,7 +790,7 @@ class TestOutgoingChannelBroadcast:
|
||||
SendChannelMessageRequest(channel_key=chan_key, text="this will fail")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
assert exc_info.value.status_code == 422
|
||||
assert radio_manager.get_cached_channel_slot(chan_key) is None
|
||||
|
||||
|
||||
@@ -969,7 +969,7 @@ class TestResendChannelMessage:
|
||||
assert sent_timestamp == now + 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resend_no_radio_response_returns_504_and_creates_no_new_row(self, test_db):
|
||||
async def test_resend_no_radio_response_returns_408_and_creates_no_new_row(self, test_db):
|
||||
"""When resend returns None, report unknown outcome and create no new message row."""
|
||||
mc = _make_mc(name="MyNode")
|
||||
chan_key = "c1" * 16
|
||||
@@ -995,7 +995,7 @@ class TestResendChannelMessage:
|
||||
):
|
||||
await resend_channel_message(msg_id, new_timestamp=True)
|
||||
|
||||
assert exc_info.value.status_code == 504
|
||||
assert exc_info.value.status_code == 408
|
||||
assert exc_info.value.detail == NO_RADIO_RESPONSE_AFTER_SEND_DETAIL
|
||||
|
||||
messages = await MessageRepository.get_all(
|
||||
@@ -1317,7 +1317,7 @@ class TestPathHashModeOverride:
|
||||
SendChannelMessageRequest(channel_key=chan_key, text="hello")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
assert exc_info.value.status_code == 422
|
||||
assert "path hash mode" in exc_info.value.detail.lower()
|
||||
mc.commands.send_chan_msg.assert_not_awaited()
|
||||
|
||||
@@ -1567,7 +1567,7 @@ class TestRadioExceptionMidSend:
|
||||
assert len(messages) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_dm_send_no_radio_response_returns_504_without_storing_message(self, test_db):
|
||||
async def test_dm_send_no_radio_response_returns_408_without_storing_message(self, test_db):
|
||||
"""When mc.commands.send_msg() returns None, report unknown outcome and store nothing."""
|
||||
mc = _make_mc()
|
||||
pub_key = "ac" * 32
|
||||
@@ -1584,7 +1584,7 @@ class TestRadioExceptionMidSend:
|
||||
SendDirectMessageRequest(destination=pub_key, text="Did this send?")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 504
|
||||
assert exc_info.value.status_code == 408
|
||||
assert exc_info.value.detail == NO_RADIO_RESPONSE_AFTER_SEND_DETAIL
|
||||
|
||||
messages = await MessageRepository.get_all(
|
||||
@@ -1593,7 +1593,7 @@ class TestRadioExceptionMidSend:
|
||||
assert len(messages) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_channel_send_no_radio_response_returns_504_without_storing_message(
|
||||
async def test_channel_send_no_radio_response_returns_408_without_storing_message(
|
||||
self, test_db
|
||||
):
|
||||
"""When mc.commands.send_chan_msg() returns None, report unknown outcome and store nothing."""
|
||||
@@ -1612,7 +1612,7 @@ class TestRadioExceptionMidSend:
|
||||
SendChannelMessageRequest(channel_key=chan_key, text="Did this send?")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 504
|
||||
assert exc_info.value.status_code == 408
|
||||
assert exc_info.value.detail == NO_RADIO_RESPONSE_AFTER_SEND_DETAIL
|
||||
|
||||
messages = await MessageRepository.get_all(
|
||||
@@ -1733,7 +1733,7 @@ class TestRadioExceptionMidSend:
|
||||
SendChannelMessageRequest(channel_key=chan_key_b, text="Never sent")
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
assert exc_info.value.status_code == 422
|
||||
assert radio_manager.get_cached_channel_slot(chan_key_a) is None
|
||||
assert radio_manager.get_cached_channel_slot(chan_key_b) is None
|
||||
mc.commands.send_chan_msg.assert_not_called()
|
||||
|
||||
@@ -330,3 +330,66 @@ class TestTelemetryScheduleEndpoint:
|
||||
assert schedule.tracked_count == 5
|
||||
assert schedule.options == [6, 8, 12, 24]
|
||||
assert schedule.next_run_at is not None
|
||||
|
||||
|
||||
class TestRoutedHourlySetting:
|
||||
"""Tests for the telemetry_routed_hourly setting."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_defaults_to_false(self, test_db):
|
||||
settings = await AppSettingsRepository.get()
|
||||
assert settings.telemetry_routed_hourly is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_round_trip_via_patch(self, test_db):
|
||||
result = await update_settings(AppSettingsUpdate(telemetry_routed_hourly=True))
|
||||
assert result.telemetry_routed_hourly is True
|
||||
|
||||
result = await update_settings(AppSettingsUpdate(telemetry_routed_hourly=False))
|
||||
assert result.telemetry_routed_hourly is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_schedule_includes_routed_fields_when_enabled(self, test_db):
|
||||
key = "aa" * 32
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key=key, name="R1", type=CONTACT_TYPE_REPEATER)
|
||||
)
|
||||
await AppSettingsRepository.update(
|
||||
tracked_telemetry_repeaters=[key],
|
||||
telemetry_routed_hourly=True,
|
||||
)
|
||||
|
||||
schedule = await get_telemetry_schedule()
|
||||
|
||||
assert schedule.routed_hourly is True
|
||||
assert schedule.next_routed_run_at is not None
|
||||
assert schedule.next_run_at is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_schedule_omits_routed_run_when_disabled(self, test_db):
|
||||
key = "aa" * 32
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key=key, name="R1", type=CONTACT_TYPE_REPEATER)
|
||||
)
|
||||
await AppSettingsRepository.update(
|
||||
tracked_telemetry_repeaters=[key],
|
||||
telemetry_routed_hourly=False,
|
||||
)
|
||||
|
||||
schedule = await get_telemetry_schedule()
|
||||
|
||||
assert schedule.routed_hourly is False
|
||||
assert schedule.next_routed_run_at is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_toggle_response_carries_routed_hourly(self, test_db):
|
||||
key = "bb" * 32
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert(public_key=key, name="R2", type=CONTACT_TYPE_REPEATER)
|
||||
)
|
||||
await AppSettingsRepository.update(telemetry_routed_hourly=True)
|
||||
|
||||
result = await toggle_tracked_telemetry(TrackedTelemetryRequest(public_key=key))
|
||||
|
||||
assert result.schedule.routed_hourly is True
|
||||
assert result.schedule.next_routed_run_at is not None
|
||||
|
||||
@@ -768,7 +768,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "meshcore"
|
||||
version = "2.3.2"
|
||||
version = "2.3.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "bleak" },
|
||||
@@ -776,9 +776,9 @@ dependencies = [
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "pyserial-asyncio-fast" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4c/32/6e7a3e7dcc379888bc2bfcbbdf518af89e47b3697977cbfefd0b87fdf333/meshcore-2.3.2.tar.gz", hash = "sha256:98ceb8c28a8abe5b5b77f0941b30f99ba3d4fc2350f76de99b6c8a4e778dad6f", size = 69871 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/d1/e45d8fa3cac24d58c3bc2523fe67b8cd00c05ea68e1704fbbaf56cb19753/meshcore-2.3.7.tar.gz", hash = "sha256:267107e09a96f7d0d63f4bdb1402d033a724baadd9c9becf9b71a458170f60bb", size = 90787 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/db/e4/9aafcd70315e48ca1bbae2f4ad1e00a13d5ef00019c486f964b31c34c488/meshcore-2.3.2-py3-none-any.whl", hash = "sha256:7b98e6d71f2c1e1ee146dd2fe96da40eb5bf33077e34ca840557ee53b192e322", size = 53325 },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/3d/ff4b5971a3210da07dc793b54af9b1231fea42dfb87e2818fdcc83e10d72/meshcore-2.3.7-py3-none-any.whl", hash = "sha256:952f028b25527155e78103d01598fa3897cccfa793ba2028a32bc36c86759f14", size = 60352 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1399,7 +1399,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.2"
|
||||
version = "9.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
@@ -1408,9 +1408,9 @@ dependencies = [
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1453,11 +1453,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1533,7 +1533,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "remoteterm-meshcore"
|
||||
version = "3.12.0"
|
||||
version = "3.13.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aiomqtt" },
|
||||
@@ -1569,7 +1569,7 @@ requires-dist = [
|
||||
{ name = "boto3", specifier = ">=1.38.0" },
|
||||
{ name = "fastapi", specifier = ">=0.115.0" },
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "meshcore", specifier = "==2.3.2" },
|
||||
{ name = "meshcore", specifier = "==2.3.7" },
|
||||
{ name = "pycryptodome", specifier = ">=3.20.0" },
|
||||
{ name = "pydantic-settings", specifier = ">=2.0.0" },
|
||||
{ name = "pynacl", specifier = ">=1.5.0" },
|
||||
@@ -1582,7 +1582,7 @@ dev = [
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "pip-licenses", specifier = ">=5.0.0" },
|
||||
{ name = "pyright", specifier = ">=1.1.390" },
|
||||
{ name = "pytest", specifier = ">=9.0.2" },
|
||||
{ name = "pytest", specifier = ">=9.0.3" },
|
||||
{ name = "pytest-asyncio", specifier = ">=1.3.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.0" },
|
||||
{ name = "ruff", specifier = ">=0.8.0" },
|
||||
@@ -1590,7 +1590,7 @@ dev = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
version = "2.33.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
@@ -1598,9 +1598,9 @@ dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user