Compare commits

..

43 Commits

Author SHA1 Message Date
Jack Kingsman 0d6d287aa9 Overhaul radio responsibility 2026-04-10 16:48:20 -07:00
Jack Kingsman 53a4d8186a Updating changelog + build for 3.11.0 2026-04-10 16:12:27 -07:00
Jack Kingsman 70e1669113 Improve test coverage 2026-04-10 16:04:02 -07:00
Jack Kingsman 3b1a292507 Docs updates and be consistent about node >=20 2026-04-10 15:57:47 -07:00
Jack Kingsman 4f19e1ec9a Fix races and stale things 2026-04-10 15:54:03 -07:00
Jack Kingsman 59601bb98e Assume that a same-second same-message same-first-byte-key DM is more likely an echo than them sending the same message, and multi-retry for flood scope restoration 2026-04-10 15:50:45 -07:00
Jack Kingsman f6b0fd21fb Don't consume DM resend attempt on busy radio 2026-04-10 15:46:19 -07:00
Jack Kingsman 8a4858a313 Don't consume DM resend attempt on busy radio 2026-04-10 15:44:50 -07:00
Jack Kingsman 442c2fad20 Fix some frontend display/quality/doc issues 2026-04-10 15:43:08 -07:00
Jack Kingsman 8cc542ce23 Fix same-second same-message collision in room servers with per-sender disambiguation at DB level 2026-04-10 15:36:53 -07:00
Jack Kingsman a7258c120e Merge pull request #177 from YourSandwich/feature/battery-status
Add optional battery display to status bar
2026-04-10 14:55:39 -07:00
Jack Kingsman 8752320f52 Add some tests and move the helpers into their own TS file 2026-04-10 14:53:57 -07:00
Jack Kingsman f9f046a05f Fix inversion of const definition location 2026-04-10 14:51:19 -07:00
Jack Kingsman 390c0624ea IIFE => memo for battery color/styling conversion 2026-04-10 14:49:05 -07:00
YourSandwich 2f55d11b0b Add battery display toggles to Local Configuration 2026-04-10 23:38:29 +02:00
YourSandwich fa0be24990 Add battery indicator to status bar 2026-04-10 23:38:29 +02:00
Jack Kingsman 1e22a21445 Add radio health &c. to fanout bus 2026-04-10 14:31:45 -07:00
YourSandwich e09a3a01f7 Add localStorage helpers for battery display settings 2026-04-10 22:25:17 +02:00
Jack Kingsman 3bd756ee4e Pluck in HA radio stats into the WS fanout endpoint 2026-04-10 12:39:37 -07:00
Jack Kingsman 43c5e0f67d Improve e2e testing posture to make it sliiiightly less unfriendly for others to get working 2026-04-10 11:36:26 -07:00
Jack Kingsman c0fc5fbba2 Add AUR download and test script 2026-04-10 11:30:05 -07:00
Jack Kingsman c7248222dd Updating changelog + build for 3.10.0 2026-04-10 11:16:16 -07:00
Jack Kingsman 1e18a91f12 Merge pull request #172 from YourSandwich/aur-install-instructions
Add Arch Linux (AUR) packaging infrastructure
2026-04-10 10:54:49 -07:00
Jack Kingsman 18db6e4dd8 Make test script executable 2026-04-10 10:49:49 -07:00
Jack Kingsman 2393dadf1b Unload the service on uninstall 2026-04-10 10:48:38 -07:00
Jack Kingsman fd26576e0d Use correct email 2026-04-10 10:47:21 -07:00
Sandwich cb5a76eb5f Replace manual user/group creation with sysusers.d and tmpfiles.d 2026-04-10 19:23:01 +02:00
Jack Kingsman 7f5dde119f Update AGENTS.md 2026-04-10 00:15:57 -07:00
Jack Kingsman 799a721761 Be more defensive about systemd detection 2026-04-10 00:10:53 -07:00
Jack Kingsman 152a584f35 Fix TCP host 2026-04-10 00:10:41 -07:00
Jack Kingsman 5cc0476426 Fix port numbering 2026-04-10 00:06:22 -07:00
Jack Kingsman e468c6c161 Change command palette shortcut 2026-04-09 23:45:16 -07:00
Jack Kingsman e33537018b Fix AUR username 2026-04-09 23:11:02 -07:00
Jack Kingsman 0727793560 Add test script 2026-04-09 23:08:32 -07:00
Jack Kingsman 5c4e04e024 Skip daemon reload if systemctl isn't around 2026-04-09 23:08:26 -07:00
Jack Kingsman 967269ef7d Initial AUR work 2026-04-09 23:08:22 -07:00
Jack Kingsman 1903797d0d Fix broken statistics pane e2e test 2026-04-09 22:30:12 -07:00
Jack Kingsman bb5af5ba82 Bump apprise to 1.9.9. Closes #173. 2026-04-09 17:20:57 -07:00
Sandwich 424da7e232 Add Arch Linux (AUR) install instructions to README
Adds "Install Path 3: Arch Linux (AUR)" section covering both AUR
helper and manual makepkg installation, linking to the published
remoteterm-meshcore AUR package.

Closes #171
2026-04-09 03:51:39 +02:00
Jack Kingsman 159df1ec5b Revert "Add debug lines for fav click"
This reverts commit 8e2e039985.
2026-04-08 16:33:44 -07:00
Jack Kingsman 8e2e039985 Add debug lines for fav click 2026-04-08 16:18:46 -07:00
Jack Kingsman 01c86a486e Add packet feed filters; closes #169. 2026-04-08 14:44:41 -07:00
Jack Kingsman 7d5cfdec26 Add note about startup on windows 2026-04-07 22:07:31 -07:00
83 changed files with 3620 additions and 588 deletions
+73
View File
@@ -0,0 +1,73 @@
name: Publish AUR package
# Pushes the contents of pkg/aur/ to the remoteterm-meshcore AUR repository
# whenever a GitHub release is published. Can also be triggered manually for
# testing or out-of-band republishes.
#
# Required secrets:
# AUR_SSH_PRIVATE_KEY Private SSH key registered with the AUR maintainer
# account that owns the remoteterm-meshcore package.
# AUR_COMMIT_EMAIL Email used for the AUR git commit identity.
on:
release:
types: [published]
workflow_dispatch:
inputs:
version:
description: 'Version to publish (no v prefix, e.g. 3.9.1)'
required: true
concurrency:
# Serialize publishes so a fast back-to-back release sequence cannot race
# two pushes against the AUR repo. The later one wins by virtue of being
# the final state.
group: publish-aur
cancel-in-progress: false
jobs:
publish-aur:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Resolve version from event
id: version
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
VERSION="${{ inputs.version }}"
else
VERSION="${{ github.event.release.tag_name }}"
fi
VERSION="${VERSION#v}"
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "Publishing AUR package for version $VERSION"
- name: Stamp pkgver into PKGBUILD
run: |
sed -i "s/^pkgver=.*/pkgver=${{ steps.version.outputs.version }}/" pkg/aur/PKGBUILD
sed -i "s/^pkgrel=.*/pkgrel=1/" pkg/aur/PKGBUILD
- name: Publish to AUR
uses: KSXGitHub/github-actions-deploy-aur@v4.1.2
with:
pkgname: remoteterm-meshcore
pkgbuild: pkg/aur/PKGBUILD
assets: |
pkg/aur/remoteterm-meshcore.install
pkg/aur/remoteterm-meshcore.service
pkg/aur/remoteterm-meshcore.sysusers
pkg/aur/remoteterm-meshcore.tmpfiles
pkg/aur/remoteterm.env
commit_username: jackkingsman
commit_email: ${{ secrets.AUR_COMMIT_EMAIL }}
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
commit_message: "Update to ${{ steps.version.outputs.version }}"
# Recompute sha256sums from the live release tarball + the bundled
# service/env files. The committed PKGBUILD has SKIP placeholders.
updpkgsums: true
# Validate the PKGBUILD parses and sources download, but skip the
# actual build (which would run uv sync + npm install for several
# minutes of CI time on every release).
test: true
test_flags: --clean --cleanbuild --nodeps --nobuild
+5 -3
View File
@@ -209,6 +209,7 @@ This message-layer echo/path handling is independent of raw-packet storage dedup
│ │ ├── MapView.tsx # Leaflet map showing node locations
│ │ └── ...
│ └── vite.config.ts
├── pkg/aur/ # AUR package files (PKGBUILD, systemd service, env, install hooks)
├── scripts/ # Quality / release helpers (listing below is representative, not exhaustive)
│ ├── build/
│ │ ├── collect_licenses.sh # Gather third-party license attributions
@@ -216,7 +217,8 @@ This message-layer echo/path handling is independent of raw-packet storage dedup
│ ├── quality/
│ │ ├── all_quality.sh # Repo-standard autofix + validate gate
│ │ ├── e2e.sh # End-to-end test runner
│ │ ── extended_quality.sh # Quality gate plus e2e and Docker matrix
│ │ ── extended_quality.sh # Quality gate plus e2e and Docker matrix
│ │ └── test_aur_package.sh # Build + install AUR package in Arch Docker containers
│ └── setup/
│ ├── fetch_prebuilt_frontend.py # Download release frontend fallback
│ └── install_service.sh # Install/configure Linux systemd service
@@ -371,7 +373,7 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
| POST | `/api/settings/favorites/toggle` | Toggle favorite status |
| POST | `/api/settings/blocked-keys/toggle` | Toggle blocked key |
| POST | `/api/settings/blocked-names/toggle` | Toggle blocked name |
| POST | `/api/settings/migrate` | One-time migration from frontend localStorage |
| POST | `/api/settings/tracked-telemetry/toggle` | Toggle tracked telemetry repeater |
| GET | `/api/fanout` | List all fanout configs |
| POST | `/api/fanout` | Create new fanout config |
| PATCH | `/api/fanout/{id}` | Update fanout config (triggers module reload) |
@@ -478,7 +480,7 @@ mc.subscribe(EventType.ACK, handler)
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | `false` | Switch the always-on radio audit task from hourly checks to aggressive 10-second polling; the audit checks both missed message drift and channel-slot cache drift |
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | `false` | Disable channel-slot reuse and force `set_channel(...)` before every channel send, even on serial/BLE |
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `advert_interval`, `last_advert_time`, `favorites`, `last_message_times`, `flood_scope`, `blocked_keys`, `blocked_names`, and `discovery_blocked_types`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `advert_interval`, `last_advert_time`, `last_message_times`, `flood_scope`, `blocked_keys`, `blocked_names`, `discovery_blocked_types`, `tracked_telemetry_repeaters`, and `auto_resend_channel`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
Byte-perfect channel retries are user-triggered via `POST /api/messages/channel/{message_id}/resend` and are allowed for 30 seconds after the original send.
+23 -7
View File
@@ -1,3 +1,23 @@
## [3.11.0] - 2026-04-10
* Feature: Radio health and contact data accessible on fanout bus
* Feature: Local node radio stats (voltage etc.) on WS health bus
* Feature: Battery indicator optional in status bar (configured in Local Settings)
* Bugfix: Fix same-second same-message collision in room servers
* Bugfix: Don't consume DM resend attempt if the radio was just busy
* Bugfix: Assume that a same-second same-message same-first-byte-key DM is more likely an echo than them sending the same message
* Bugfix: Multi-retry for flood scope restoration
* Misc: Testing & documentation improvements
## [3.10.0] - 2026-04-10
* Feature: Add Arch AUR package
* Feature: 72hr packet density view in statistics
* Feature: Add warnings for event loop selection for MQTT on Windows startup
* Bugfix: Bump Apprise to 1.9.9 to fix Matrix bug
* Misc: More memory-conscious on recent contact fetch
* Misc: Fix statistics pane e2e test
## [3.9.0] - 2026-04-06
* Feature: Add hop counts to hop-width selection options
@@ -136,7 +156,7 @@
* Bugfix: Fix Apprise duplicate names
* Bugfix: Be better about identity resolution in the stats pane
* Misc: Docs, test, and performance enhancements
* Misc: Don't prompt "Are you sure" when leaving an unedited interation
* Misc: Don't prompt "Are you sure" when leaving an unedited integration
* Misc: Log node time on startup
* Misc: Improve community MQTT error bubble-up
* Misc: Unread DMs always have a red unread counter
@@ -163,7 +183,7 @@
## [3.3.0] - 2026-03-13
* Feature: Use dashed lines to show collapsed ambiguous router results
* Feature: Jump to unred
* Feature: Jump to unread
* Feature: Local channel management to prevent need to reload channel every time
* Feature: Debug endpoint
* Feature: Force-singleton channel management
@@ -226,7 +246,7 @@
* Feature: Massive codebase refactor and overhaul
* Bugfix: Fix packet parsing for trace packets
* Bugfix: Refetch channels on reconnect
* Bugfix: Load All on repeater pane on mobile doesn't etend into lower text
* Bugfix: Load All on repeater pane on mobile doesn't extend into lower text
* Bugfix: Timestamps in logs
* Bugfix: Correct wrong clock sync command
* Misc: Improve bot error bubble up
@@ -243,10 +263,6 @@
* Bugfix: Don't obscure new integration dropdown on session boundary
## [2.7.8] - 2026-03-08
## [2.7.8] - 2026-03-08
* Bugfix: Improve frontend asset resolution and fixup the build/push script
+98 -4
View File
@@ -70,17 +70,111 @@ npm run test:run
npm run build
```
## Quality + Publishing Scripts
<details>
<summary>scripts/quality/</summary>
| Script | Purpose |
|--------|---------|
| `all_quality.sh` | Repo-standard gate: autofix (ruff, eslint, prettier), then pyright, pytest, vitest, and frontend build. Run before finishing any code change. |
| `extended_quality.sh` | `all_quality.sh` plus e2e tests and Docker build matrix. Used for release validation. |
| `e2e.sh` | Thin wrapper that runs Playwright e2e tests from `tests/e2e/`. |
| `docker_ci.sh` | Builds the Docker image and runs a smoke test against it. |
| `test_aur_package.sh` | Builds the AUR package in an Arch container, then installs and boots it in a second container with port 8000 exposed (hang finish). |
| `run_aur_with_radio.sh` | Like `test_aur_package.sh` but passes through the host serial device for testing with a real radio (hang finish). |
</details>
<details>
<summary>scripts/build/</summary>
| Script | Purpose |
|--------|---------|
| `publish.sh` | Full release ceremony: quality gate, version bump, changelog, frontend build, Docker multi-arch push, GitHub release. |
| `release_common.sh` | Shared shell helpers (version validation, formatting) sourced by other build scripts. |
| `package_release_artifact.sh` | Builds the prebuilt-frontend release zip attached to GitHub releases. |
| `push_docker_multiarch.sh` | Builds and pushes multi-arch Docker images (amd64 + arm64). |
| `create_github_release.sh` | Creates a GitHub release with changelog notes and the release artifact. |
| `extract_release_notes.sh` | Extracts the latest version's notes from `CHANGELOG.md` for the release body. |
| `collect_licenses.sh` | Gathers third-party license attributions into `LICENSES.md`. |
| `print_frontend_licenses.cjs` | Helper that extracts frontend npm dependency licenses. |
| `dump_api_specs.py` | Dumps the OpenAPI spec from the running backend (developer utility). |
</details>
## E2E Testing
E2E coverage exists, but it is intentionally not part of the normal development path.
E2E tests exercise the full stack (backend + frontend + real radio hardware) via Playwright.
These tests are only guaranteed to run correctly in a narrow subset of environments; they require a busy mesh with messages arriving constantly, an available autodetect-able radio, and a contact in the test database (which you can provide in `tests/e2e/.tmp/e2e-test.db` after an initial run). E2E tests are generally not necessary to run for normal development work.
> [!WARNING]
> E2E tests are **not part of the normal development path** — most contributors will never need to run them. They exist to catch integration issues that unit tests can't and generally only need to be run by maintainers.
### Hardware requirements
- A MeshCore radio connected via serial (auto-detected, or set `MESHCORE_SERIAL_PORT`)
- The radio must be powered on and past its startup sequence before tests begin
### Running
```bash
cd tests/e2e
npm install
npx playwright test # headless
npx playwright test --headed # you can probably guess
npx playwright install chromium # first time only
npx playwright test # headless
npx playwright test --headed # watch it run
```
The test harness starts its own uvicorn instance on port 8001 with a fresh temporary database. Your development server (port 8000) is unaffected.
### Test tiers
**Most tests (22 of 28) are fully self-contained.** They seed their own data via API calls or direct DB writes and need only a connected radio. These cover messaging, pagination, search, favorites, settings, fanout integrations, historical decryption, and all UI-only views.
**Mesh-traffic tests (tagged `@mesh-traffic`)** wait up to 3 minutes for an incoming message from another node on the network. If no traffic arrives, they fail with an advisory that the failure may be RF conditions, not a bug. These are: `incoming-message` and `packet-feed` (second test only).
**The partner-radio DM ACK test (tagged `@partner-radio`)** validates direct-route learning by sending a DM and waiting for an ACK. It requires a second radio in range that has your test radio in its contacts. Configure the partner node's public key and name via `E2E_PARTNER_RADIO_PUBKEY` and `E2E_PARTNER_RADIO_NAME`.
### Making mesh-traffic tests reliable: the echo bot
The most practical way to guarantee incoming traffic is to run an **echo bot on a second radio** monitoring a known channel. When the test suite starts a `@mesh-traffic` test, it sends a trigger message to that channel. If a bot on another radio is listening, it replies — generating the incoming RF packet the test needs within seconds instead of waiting for organic mesh traffic.
The test suite sends `!echo please give incoming message` to the echo channel (default `#flightless`) at the start of each `@mesh-traffic` test. The trigger message is configurable via `E2E_ECHO_TRIGGER_MESSAGE`.
Setup:
1. Set up a second MeshCore radio within RF range of your test radio
2. Run a RemoteTerm instance on the second radio
3. Configure a bot on the second radio that monitors the echo channel and replies when it sees the trigger. Example bot code:
```python
def bot(sender_name, sender_key, message_text, is_dm,
channel_key, channel_name, sender_timestamp, path):
if "!echo" in message_text.lower():
return f"[ECHO] {message_text}"
return None
```
4. The test suite calls `nudgeEchoBot()` automatically — no manual intervention needed
Without the echo bot, `@mesh-traffic` tests rely on organic traffic from other nodes. In a quiet RF environment they will time out.
### Environment variables
All E2E environment configuration is centralized in `tests/e2e/helpers/env.ts` with defaults that work for the maintainer's test rig. Override via environment variables:
| Variable | Default | Purpose |
|----------|---------|---------|
| `MESHCORE_SERIAL_PORT` | auto-detect | Serial port for the test radio |
| `E2E_ECHO_CHANNEL` | `#flightless` | Channel the echo bot monitors for traffic generation |
| `E2E_ECHO_TRIGGER_MESSAGE` | `!echo please give incoming message` | Message sent to nudge the echo bot |
| `E2E_PARTNER_RADIO_PUBKEY` | *(maintainer's test node)* | 64-char hex public key of a node that will ACK DMs from your radio |
| `E2E_PARTNER_RADIO_NAME` | *(maintainer's test node)* | Display name of that node (used in UI assertions) |
Example for a contributor with their own two-radio setup:
```bash
E2E_ECHO_CHANNEL="#mytest" \
E2E_PARTNER_RADIO_PUBKEY="abcd1234...full64charhexkey..." \
E2E_PARTNER_RADIO_NAME="MyTestNode" \
npx playwright test
```
## Pull Request Expectations
+2 -2
View File
@@ -56,7 +56,7 @@ SOFTWARE.
</details>
### apprise (1.9.7) — BSD-2-Clause
### apprise (1.9.9) — BSD-2-Clause
<details>
<summary>Full license text</summary>
@@ -64,7 +64,7 @@ SOFTWARE.
```
BSD 2-Clause License
Copyright (c) 2025, Chris Caron <lead2gold@gmail.com>
Copyright (c) 2026, Chris Caron <lead2gold@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
+37 -1
View File
@@ -23,7 +23,7 @@ For advanced setup and troubleshooting see [README_ADVANCED.md](README_ADVANCED.
## Requirements
- Python 3.10+
- Python 3.11+
- Node.js LTS or current (20, 22, 24, 25) if you're not using a prebuilt release
- [UV](https://astral.sh/uv) package manager: `curl -LsSf https://astral.sh/uv/install.sh | sh`
- MeshCore radio connected via USB serial, TCP, or BLE
@@ -116,6 +116,8 @@ cp docker-compose.example.yml docker-compose.yml
bash scripts/setup/install_docker.sh
```
> The interactive generator enables a self-signed (snakeoil) TLS certificate by default. If you accept the default, the app will be served over HTTPS and the generated compose file will include certificate mounts and an SSL command override. Decline if you prefer plain HTTP or plan to terminate TLS externally.
Your local `docker-compose.yml` is gitignored so future pulls do not overwrite your Docker settings.
The guided Docker flow can collect BLE settings, but BLE access from Docker still needs manual compose customization such as Bluetooth passthrough and possibly privileged mode or host networking. If you want the simpler path for BLE, use the regular Python launch flow instead.
@@ -135,6 +137,8 @@ sudo docker compose pull
sudo docker compose up -d
```
> If you switched to a local build (`build: .` instead of `image:`), use `sudo docker compose up -d --build` instead — `pull` only fetches remote images.
The example file and setup script default to the published Docker Hub image. To build locally from your checkout instead, replace:
```yaml
@@ -161,6 +165,29 @@ To stop:
sudo docker compose down
```
## Install Path 3: Arch Linux (AUR)
A [`remoteterm-meshcore`](https://aur.archlinux.org/packages/remoteterm-meshcore) package is available in the AUR. Install it with an AUR helper or build it manually:
```bash
# with an AUR helper
yay -S remoteterm-meshcore
# or manually
git clone https://aur.archlinux.org/remoteterm-meshcore.git
cd remoteterm-meshcore
makepkg -si
```
Configure your radio connection, then start the service:
```bash
sudo vi /etc/remoteterm-meshcore/remoteterm.env
sudo systemctl enable --now remoteterm-meshcore
```
Access the app at http://localhost:8000.
## Standard Environment Variables
Only one transport may be active at a time. If multiple are set, the server will refuse to start.
@@ -199,6 +226,15 @@ $env:MESHCORE_SERIAL_PORT="COM8" # or your COM port
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
```
> [!WARNING]
> **Windows + MQTT fanout:** Python's default Windows event loop (ProactorEventLoop) is not compatible with the MQTT libraries used by RemoteTerm. If you configure any MQTT integration, add `--loop none` to your uvicorn command:
>
> ```powershell
> uv run uvicorn app.main:app --host 0.0.0.0 --port 8000 --loop none
> ```
>
> If you forget, the app will start normally but MQTT connections will fail and you'll see a toast in the UI with this same guidance.
If you enable Basic Auth, protect the app with HTTPS. HTTP Basic credentials are not safe on plain HTTP. Also note that the app's permissive CORS policy is a deliberate trusted-network tradeoff, so cross-origin browser JavaScript is not a reliable way to use that Basic Auth gate.
## Where To Go Next
+13 -9
View File
@@ -40,8 +40,8 @@ app/
│ ├── contact_reconciliation.py # Prefix-claim, sender-key backfill, name-history wiring
│ ├── radio_lifecycle.py # Post-connect setup and reconnect/setup helpers
│ ├── radio_commands.py # Radio config/private-key command workflows
│ ├── radio_noise_floor.py # In-memory local radio noise-floor sampling/history
│ └── radio_runtime.py # Router/dependency seam over the global RadioManager
│ ├── radio_stats.py # In-memory local radio stats sampling and noise-floor history
│ └── radio_runtime.py # Explicit router/dependency seam over the live radio manager + runtime state
├── radio.py # RadioManager transport/session state + lock management
├── radio_sync.py # Polling, sync, periodic advertisement loop
├── decoder.py # Packet parsing/decryption
@@ -95,7 +95,7 @@ app/
- `RadioManager.start_connection_monitor()` checks health every 5s.
- `RadioManager.post_connect_setup()` delegates to `services/radio_lifecycle.py`.
- Routers, startup/lifespan code, fanout helpers, and `radio_sync.py` should reach radio state through `services/radio_runtime.py`, not by importing `app.radio.radio_manager` directly.
- Routers, startup/lifespan code, fanout helpers, and `radio_sync.py` should reach radio state through `services/radio_runtime.py`, not by importing `app.radio.radio_manager` directly. `RadioManager` owns transport/session operations; mutable runtime metadata and caches now live in its composed runtime-state object.
- Shared reconnect/setup helpers in `services/radio_lifecycle.py` are used by startup, the monitor, and manual reconnect/reboot flows before broadcasting healthy state.
- Setup still includes handler registration, key export, time sync, contact/channel sync, and advertisement tasks. The message-poll task always starts: by default it runs as a low-frequency hourly audit, and `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK=true` switches it to aggressive 10-second polling. That audit checks both missed-radio-message drift and channel-slot cache drift; cache mismatches are logged, toasted, and the send-slot cache is reset.
- Post-connect setup is timeout-bounded. If initial radio offload/setup hangs too long, the backend logs the failure and broadcasts an `error` toast telling the operator to reboot the radio and restart the server.
@@ -161,10 +161,12 @@ app/
- All external integrations (MQTT, bots, webhooks, Apprise, SQS) are managed through the fanout bus (`app/fanout/`).
- Configs stored in `fanout_configs` table, managed via `GET/POST/PATCH/DELETE /api/fanout`.
- `broadcast_event()` in `websocket.py` dispatches to the fanout manager for `message` and `raw_packet` events.
- Each integration is a `FanoutModule` with scope-based filtering.
- `broadcast_event()` in `websocket.py` dispatches to the fanout manager for `message`, `raw_packet`, and `contact` events.
- `on_message` and `on_raw` are scope-gated. `on_contact`, `on_telemetry`, and `on_health` are dispatched to all modules unconditionally (modules filter internally).
- Repeater telemetry broadcasts are emitted after `RepeaterTelemetryRepository.record()` in both `radio_sync.py` (auto-collect) and `routers/repeaters.py` (manual fetch).
- The 60-second radio stats sampling loop in `radio_stats.py` dispatches an enriched health snapshot (radio identity + full stats) to all fanout modules after each sample.
- Community MQTT publishes raw packets only, but its derived `path` field for direct packets is emitted as comma-separated hop identifiers, not flat path bytes.
- See `app/fanout/AGENTS_fanout.md` for full architecture details.
- See `app/fanout/AGENTS_fanout.md` for full architecture details and event payload shapes.
## API Surface (all under `/api`)
@@ -244,7 +246,7 @@ app/
- `POST /settings/favorites/toggle`
- `POST /settings/blocked-keys/toggle`
- `POST /settings/blocked-names/toggle`
- `POST /settings/migrate`
- `POST /settings/tracked-telemetry/toggle`
### Fanout
- `GET /fanout` — list all fanout configs
@@ -286,6 +288,8 @@ Main tables:
- `raw_packets`
- `contact_advert_paths` (recent unique advertisement paths per contact, keyed by contact + path bytes + hop count)
- `contact_name_history` (tracks name changes over time)
- `repeater_telemetry_history` (time-series telemetry snapshots for tracked repeaters)
- `fanout_configs` (MQTT, bot, webhook, Apprise, SQS integration configs)
- `app_settings`
Contact route state is canonicalized on the backend:
@@ -301,14 +305,14 @@ Repository writes should prefer typed models such as `ContactUpsert` over ad hoc
`app_settings` fields in active model:
- `max_radio_contacts`
- `favorites`
- `auto_decrypt_dm_on_advert`
- `last_message_times`
- `preferences_migrated`
- `advert_interval`
- `last_advert_time`
- `flood_scope`
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
- `tracked_telemetry_repeaters`
- `auto_resend_channel`
Note: MQTT, community MQTT, and bot configs were migrated to the `fanout_configs` table (migrations 36-38).
+1 -1
View File
@@ -136,7 +136,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_dedup_null_safe
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))
WHERE type = 'CHAN';
CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_incoming_priv_dedup
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0), COALESCE(sender_key, ''))
WHERE type = 'PRIV' AND outgoing = 0;
CREATE INDEX IF NOT EXISTS idx_messages_sender_key ON messages(sender_key);
CREATE INDEX IF NOT EXISTS idx_messages_pagination
+2 -2
View File
@@ -2,10 +2,10 @@
import json
import logging
from typing import Any, Literal
from typing import Any, Literal, NotRequired
from pydantic import TypeAdapter
from typing_extensions import NotRequired, TypedDict
from typing_extensions import TypedDict
from app.models import Channel, Contact, Message, MessagePath, RawPacketBroadcast
from app.routers.health import HealthResponse
+61 -7
View File
@@ -1,6 +1,6 @@
# Fanout Bus Architecture
The fanout bus is a unified system for dispatching mesh radio events (decoded messages and raw packets) to external integrations. It replaces the previous scattered singleton MQTT publishers with a modular, configurable framework.
The fanout bus is a unified system for dispatching mesh radio events to external integrations. It replaces the previous scattered singleton MQTT publishers with a modular, configurable framework.
## Core Concepts
@@ -8,10 +8,15 @@ The fanout bus is a unified system for dispatching mesh radio events (decoded me
Base class that all integration modules extend:
- `__init__(config_id, config, *, name="")` — constructor; receives the config UUID, the type-specific config dict, and the user-assigned name
- `start()` / `stop()` — async lifecycle (e.g. open/close connections)
- `on_message(data)` — receive decoded messages (DM/channel)
- `on_raw(data)` — receive raw RF packets
- `on_message(data)` — receive decoded messages (scope-gated)
- `on_raw(data)` — receive raw RF packets (scope-gated)
- `on_contact(data)` — receive contact upserts; dispatched to all modules
- `on_telemetry(data)` — receive repeater telemetry snapshots; dispatched to all modules
- `on_health(data)` — receive periodic radio health snapshots; dispatched to all modules
- `status` property (**must override**) — return `"connected"`, `"disconnected"`, or `"error"`
All five event hooks are no-ops by default; modules override only the ones they care about.
### FanoutManager (manager.py)
Singleton that owns all active modules and dispatches events:
- `load_from_db()` — startup: load enabled configs, instantiate modules
@@ -19,6 +24,9 @@ Singleton that owns all active modules and dispatches events:
- `remove_config(id)` — delete: stop and remove
- `broadcast_message(data)` — scope-check + dispatch `on_message`
- `broadcast_raw(data)` — scope-check + dispatch `on_raw`
- `broadcast_contact(data)` — dispatch `on_contact` to all modules
- `broadcast_telemetry(data)` — dispatch `on_telemetry` to all modules
- `broadcast_health_fanout(data)` — dispatch `on_health` to all modules
- `stop_all()` — shutdown
- `get_statuses()` — health endpoint data
@@ -33,19 +41,65 @@ Each config has a `scope` JSON blob controlling what events reach it:
```
Community MQTT always enforces `{"messages": "none", "raw_packets": "all"}`.
Scope only gates `on_message` and `on_raw`. The `on_contact`, `on_telemetry`, and `on_health` hooks are dispatched to all modules unconditionally — modules that care about specific contacts or repeaters filter internally based on their own config.
## Event Flow
```
Radio Event -> packet_processor / event_handler
-> broadcast_event("message"|"raw_packet", data, realtime=True)
-> broadcast_event("message"|"raw_packet"|"contact", data, realtime=True)
-> WebSocket broadcast (always)
-> FanoutManager.broadcast_message/raw (only if realtime=True)
-> scope check per module
-> module.on_message / on_raw
-> FanoutManager.broadcast_message/raw/contact (only if realtime=True)
-> scope check per module (message/raw only)
-> module.on_message / on_raw / on_contact
Telemetry collect (radio_sync.py / routers/repeaters.py)
-> RepeaterTelemetryRepository.record(...)
-> FanoutManager.broadcast_telemetry(data)
-> module.on_telemetry (all modules, unconditional)
Health fanout (radio_stats.py, piggybacks on 60s stats sampling loop)
-> FanoutManager.broadcast_health_fanout(data)
-> module.on_health (all modules, unconditional)
```
Setting `realtime=False` (used during historical decryption) skips fanout dispatch entirely.
## Event Payloads
### on_message(data)
`Message.model_dump()` — the full Pydantic message model. Key fields:
- `type` (`"PRIV"` | `"CHAN"`), `conversation_key`, `text`, `sender_name`, `sender_key`
- `outgoing`, `acked`, `paths`, `sender_timestamp`, `received_at`
### on_raw(data)
Raw packet dict from `packet_processor.py`. Key fields:
- `id` (storage row ID), `observation_id` (per-arrival), `raw` (hex), `timestamp`
- `decrypted_info` (optional: `channel_key`, `contact_key`, `text`)
### on_contact(data)
`Contact.model_dump()` — the full Pydantic contact model. Key fields:
- `public_key`, `name`, `type` (0=unknown, 1=client, 2=repeater, 3=room, 4=sensor)
- `lat`, `lon`, `last_seen`, `first_seen`, `on_radio`
### on_telemetry(data)
Repeater telemetry snapshot, broadcast after successful `RepeaterTelemetryRepository.record()`.
Identical shape from both auto-collect (`radio_sync.py`) and manual fetch (`routers/repeaters.py`):
- `public_key`, `name`, `timestamp`
- `battery_volts`, `noise_floor_dbm`, `last_rssi_dbm`, `last_snr_db`
- `packets_received`, `packets_sent`, `airtime_seconds`, `rx_airtime_seconds`
- `uptime_seconds`, `sent_flood`, `sent_direct`, `recv_flood`, `recv_direct`
- `flood_dups`, `direct_dups`, `full_events`, `tx_queue_len`
### on_health(data)
Radio health + stats snapshot, broadcast every 60s by the stats sampling loop in `radio_stats.py`:
- `connected` (bool), `connection_info` (str | None)
- `public_key` (str | None), `name` (str | None)
- `noise_floor_dbm`, `battery_mv`, `uptime_secs` (int | None)
- `last_rssi` (int | None), `last_snr` (float | None)
- `tx_air_secs`, `rx_air_secs` (int | None)
- `packets_recv`, `packets_sent`, `flood_tx`, `direct_tx`, `flood_rx`, `direct_rx` (int | None)
## Current Module Types
### mqtt_private (mqtt_private.py)
+9
View File
@@ -38,6 +38,15 @@ class FanoutModule:
async def on_raw(self, data: dict) -> None:
"""Called for raw RF packets. Override if needed."""
async def on_contact(self, data: dict) -> None:
"""Called for contact upserts (adverts, sync). Override if needed."""
async def on_telemetry(self, data: dict) -> None:
"""Called for repeater telemetry snapshots. Override if needed."""
async def on_health(self, data: dict) -> None:
"""Called for periodic radio health snapshots. Override if needed."""
@property
def status(self) -> str:
"""Return 'connected', 'disconnected', or 'error'."""
+1 -1
View File
@@ -164,7 +164,7 @@ class BotModule(FanoutModule):
),
timeout=BOT_EXECUTION_TIMEOUT,
)
except asyncio.TimeoutError:
except TimeoutError:
logger.warning("Bot '%s' execution timed out", self.name)
return
except Exception:
+1 -1
View File
@@ -538,7 +538,7 @@ class CommunityMqttPublisher(BaseMqttPublisher):
self._version_event.clear()
try:
await asyncio.wait_for(self._version_event.wait(), timeout=30)
except asyncio.TimeoutError:
except TimeoutError:
pass
return False
return True
+33 -1
View File
@@ -86,6 +86,11 @@ def _scope_matches_raw(scope: dict, _data: dict) -> bool:
return scope.get("raw_packets", "none") == "all"
def _always_match(_scope: dict, _data: dict) -> bool:
"""Match all modules unconditionally (filtering is module-internal)."""
return True
class FanoutManager:
"""Owns all active fanout modules and dispatches events."""
@@ -220,7 +225,7 @@ class FanoutManager:
handler = getattr(module, handler_name)
await asyncio.wait_for(handler(data), timeout=_DISPATCH_TIMEOUT_SECONDS)
self._clear_module_error(config_id)
except asyncio.TimeoutError:
except TimeoutError:
timeout_error = f"{handler_name} timed out after {_DISPATCH_TIMEOUT_SECONDS:.1f}s"
self._set_module_error(config_id, timeout_error)
logger.error(
@@ -270,6 +275,33 @@ class FanoutManager:
log_label="on_raw",
)
async def broadcast_contact(self, data: dict) -> None:
"""Dispatch a contact upsert to all modules."""
await self._dispatch_matching(
data,
matcher=_always_match,
handler_name="on_contact",
log_label="on_contact",
)
async def broadcast_telemetry(self, data: dict) -> None:
"""Dispatch a repeater telemetry snapshot to all modules."""
await self._dispatch_matching(
data,
matcher=_always_match,
handler_name="on_telemetry",
log_label="on_telemetry",
)
async def broadcast_health_fanout(self, data: dict) -> None:
"""Dispatch a radio health snapshot to all modules."""
await self._dispatch_matching(
data,
matcher=_always_match,
handler_name="on_health",
log_label="on_health",
)
async def stop_all(self) -> None:
"""Shutdown all modules."""
for config_id, (module, _) in list(self._modules.items()):
+15 -7
View File
@@ -196,7 +196,7 @@ class BaseMqttPublisher(ABC):
self._version_event.wait(),
timeout=self._not_configured_timeout,
)
except asyncio.TimeoutError:
except TimeoutError:
continue
except asyncio.CancelledError:
return
@@ -231,7 +231,7 @@ class BaseMqttPublisher(ABC):
self._version_event.clear()
try:
await asyncio.wait_for(self._version_event.wait(), timeout=60)
except asyncio.TimeoutError:
except TimeoutError:
elapsed = time.monotonic() - connect_time
await self._on_periodic_wake(elapsed)
if self._should_break_wait(elapsed):
@@ -254,14 +254,22 @@ class BaseMqttPublisher(ABC):
self._last_error = _format_error_detail(e)
# Windows ProactorEventLoop does not implement add_reader /
# add_writer, which paho-mqtt requires. Give a specific,
# actionable toast instead of the generic connection error.
if isinstance(e, NotImplementedError) and sys.platform == "win32":
# add_writer, which paho-mqtt requires. The failure can
# surface as a direct NotImplementedError (add_writer in
# __aenter__) or as a generic timeout (add_reader fails
# inside an event-loop callback, so paho never hears back).
# Either way, if we're on Windows with Proactor the root
# cause is the same and retrying won't help.
_on_proactor = (
sys.platform == "win32"
and type(asyncio.get_event_loop()).__name__ == "ProactorEventLoop"
)
if _on_proactor:
broadcast_error(
"MQTT unavailable — Windows event loop incompatible",
"The default Windows event loop (ProactorEventLoop) does "
"not support MQTT. Restart with: uv run uvicorn "
"app.main:app --loop none",
"not support MQTT. Add --loop none to your uvicorn "
"command and restart. See README.md for details.",
)
_broadcast_health()
logger.error(
+1 -1
View File
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
NO_EVENT_RECEIVED_GUIDANCE = (
"Radio command channel is unresponsive (no_event_received). Ensure that your firmware is not "
"incompatible, outdated, or wrong-mode (e.g. repeater, not client), and that"
"incompatible, outdated, or wrong-mode (e.g. repeater, not client), and that "
"serial/TCP/BLE connectivity is successful (try another app and see if that one works?). The app cannot proceed because it cannot "
"issue commands to the radio."
)
+22 -103
View File
@@ -1,122 +1,41 @@
import os
import logging
import sys
# ---------------------------------------------------------------------------
# Windows event-loop fix for MQTT fanout (aiomqtt / paho-mqtt) compatibility
# Windows event-loop advisory for MQTT fanout
# ---------------------------------------------------------------------------
# On Windows, uvicorn's default "auto" loop explicitly creates ProactorEventLoop,
# which does NOT implement add_reader()/add_writer() — calls that paho-mqtt
# requires internally. Setting the event loop *policy* alone is not enough
# because uvicorn's "auto" factory bypasses it.
#
# The fix: re-exec the current process with "--loop none", which tells uvicorn
# to let asyncio.run() create the loop through the standard policy (where we
# have just installed WindowsSelectorEventLoopPolicy).
#
# Guards:
# - "--loop" already in argv → we (or the operator) already handled it
# - MESHCORE_NO_AUTO_LOOP_ON_WIN32=true → operator opt-out for custom
# runners, test harnesses, or other non-uvicorn invocations
# On Windows, uvicorn's default event loop (ProactorEventLoop) does not
# implement add_reader()/add_writer(), which paho-mqtt (via aiomqtt) requires.
# We cannot fix this from inside the app — the loop is already created by the
# time this module is imported. Log a prominent warning so Windows operators
# who want MQTT know to add ``--loop none`` to their uvicorn command.
# ---------------------------------------------------------------------------
_win32_needs_reexec = (
sys.platform == "win32"
and os.environ.get("MESHCORE_NO_AUTO_LOOP_ON_WIN32", "").lower() not in ("true", "1")
and "--loop" not in sys.argv
)
# Skip re-exec when --reload is active: on Windows os.execv spawns a new
# process and exits, so the reloader's child dies and a fresh uvicorn
# (with its own reloader) starts — creating doubled watchers or a loop.
# Also skip if sys.executable is missing (embedded / frozen Python).
if _win32_needs_reexec and "--reload" in sys.argv:
print(
"\n" + "!" * 78 + "\n"
" WINDOWS + --reload DETECTED\n" + "!" * 78 + "\n"
"\n"
" We can't auto-fix the event loop when --reload is active because\n"
" the re-exec would fight with uvicorn's reloader process.\n"
"\n"
" If you need MQTT fanout, add --loop none to your command:\n"
"\n"
" uv run uvicorn app.main:app --reload \033[1m--loop none\033[0m [... other options ...]\n"
"\n"
" Everything else works fine as-is.\n"
"\n" + "!" * 78 + "\n",
file=sys.stderr,
flush=True,
)
_win32_needs_reexec = False
if _win32_needs_reexec and not sys.executable:
# Embedded or frozen Python — can't re-exec, just warn.
_win32_needs_reexec = False
if _win32_needs_reexec:
if sys.platform == "win32":
import asyncio as _asyncio
_asyncio.set_event_loop_policy(
_asyncio.WindowsSelectorEventLoopPolicy() # type: ignore[attr-defined]
)
print(
"\n" + "=" * 78 + "\n"
" HALLO FRIEND WINDOWS USER <3 WE GOTTA ADJUST THINGS BEFORE YOU STARTUP\n"
+ "="
* 78
+ "\n"
"\n"
" uvicorn's default event loop on Windows (ProactorEventLoop) is not\n"
" compatible with aiomqtt/paho-mqtt, which require add_reader() /\n"
" add_writer(). Re-executing with '--loop none' so uvicorn honours\n"
" WindowsSelectorEventLoopPolicy and MQTT fanout can function.\n"
""
" In English: The code we use for MQTT is fussy. We're restarting\n"
" the server with the right settings for MQTT to work.\n"
"\n"
" This may or may not work :) If the app starts up after this without a warning, you're good to go.\n"
"\n" + "=" * 78 + "\n",
file=sys.stderr,
flush=True,
)
# sys.argv[0] on Windows is typically a .exe console-script launcher
# (e.g. .venv\Scripts\uvicorn.exe) which Python can't open as a script.
# use "python -m uvicorn" instead, forwarding the original arguments.
# yes, this is brittle as all hell.
try:
os.execv(
sys.executable,
[sys.executable, "-m", "uvicorn"] + sys.argv[1:] + ["--loop", "none"],
)
except Exception:
# execv failed — fall through and let the app start normally.
# MQTT fanout will not work, but everything else will.
_loop = _asyncio.get_event_loop()
_is_proactor = type(_loop).__name__ == "ProactorEventLoop"
if _is_proactor:
print(
"\n" + "!" * 78 + "\n"
" AUTO-RESTART FAILED :<\n" + "!" * 78 + "\n"
" NOTE FOR WINDOWS USERS\n" + "!" * 78 + "\n"
"\n"
" We tried to restart uvicorn with the necessary settings\n"
" automatically, but there was a problem with the invocation\n"
" (not shocking; this is a fragile system).\n"
" The running event loop is ProactorEventLoop, which is not\n"
" compatible with MQTT fanout (aiomqtt / paho-mqtt).\n"
"\n"
" Please rerun RemoteTerm with a command like:\n"
" If you use MQTT integrations, restart with --loop none:\n"
"\n"
" uv run uvicorn app.main:app \033[1m--loop none\033[0m [... other options ...]\n"
" uv run uvicorn app.main:app \033[1m--loop none\033[0m"
" [... other options ...]\n"
"\n"
" Setting '--loop none' on uvicorn startup will put you in a good\n"
" state for MQTT and bypass this self-repair.\n"
"\n"
" The server is starting anyway -- everything except MQTT fanout\n"
" will work normally. If you want to suppress this attempt, \n"
" set the env var MESHCORE_NO_AUTO_LOOP_ON_WIN32=true\n"
" Everything else works fine as-is.\n"
"\n" + "!" * 78 + "\n",
file=sys.stderr,
flush=True,
)
# ---------------------------------------------------------------------------
del _loop, _is_proactor
import asyncio
import logging
from contextlib import asynccontextmanager
from pathlib import Path
@@ -157,8 +76,8 @@ from app.routers import (
ws,
)
from app.security import add_optional_basic_auth_middleware
from app.services.radio_noise_floor import start_noise_floor_sampling, stop_noise_floor_sampling
from app.services.radio_runtime import radio_runtime as radio_manager
from app.services.radio_stats import start_radio_stats_sampling, stop_radio_stats_sampling
from app.version_info import get_app_build_info
setup_logging()
@@ -189,7 +108,7 @@ async def lifespan(app: FastAPI):
from app.radio_sync import ensure_default_channels
await ensure_default_channels()
await start_noise_floor_sampling()
await start_radio_stats_sampling()
# Always start connection monitor (even if initial connection failed)
await radio_manager.start_connection_monitor()
@@ -218,7 +137,7 @@ async def lifespan(app: FastAPI):
await radio_manager.stop_connection_monitor()
await stop_background_contact_reconciliation()
await stop_message_polling()
await stop_noise_floor_sampling()
await stop_radio_stats_sampling()
await stop_periodic_advert()
await stop_periodic_sync()
await stop_telemetry_collect()
+44
View File
@@ -419,6 +419,12 @@ async def run_migrations(conn: aiosqlite.Connection) -> int:
await set_version(conn, 55)
applied += 1
if version < 56:
logger.info("Applying migration 56: add sender_key to incoming PRIV dedup index")
await _migrate_056_priv_dedup_include_sender_key(conn)
await set_version(conn, 56)
applied += 1
if applied > 0:
logger.info(
"Applied %d migration(s), schema now at version %d", applied, await get_version(conn)
@@ -3307,3 +3313,41 @@ async def _migrate_055_favorites_to_columns(conn: aiosqlite.Connection) -> None:
await conn.commit()
else:
raise
async def _migrate_056_priv_dedup_include_sender_key(conn: aiosqlite.Connection) -> None:
"""Add sender_key to the incoming PRIV dedup index.
Room-server posts are stored as PRIV messages sharing one conversation_key
(the room contact). Without sender_key in the uniqueness constraint, two
different room participants sending identical text in the same clock second
collide and the second message is silently dropped.
Adding COALESCE(sender_key, '') is strictly more permissive no existing
rows can conflict so the migration only needs to rebuild the index.
"""
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='messages'"
)
if await cursor.fetchone() is None:
await conn.commit()
return
# The index references type, conversation_key, sender_timestamp, outgoing,
# and sender_key. Some migration tests create minimal messages tables that
# lack these columns. Skip gracefully when the schema is too old.
col_cursor = await conn.execute("PRAGMA table_info(messages)")
columns = {row[1] for row in await col_cursor.fetchall()}
required = {"type", "conversation_key", "sender_timestamp", "outgoing", "sender_key"}
if not required.issubset(columns):
await conn.commit()
return
await conn.execute("DROP INDEX IF EXISTS idx_messages_incoming_priv_dedup")
await conn.execute(
"""CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_incoming_priv_dedup
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0),
COALESCE(sender_key, ''))
WHERE type = 'PRIV' AND outgoing = 0"""
)
await conn.commit()
-4
View File
@@ -877,10 +877,6 @@ class NoiseFloorHistoryStats(BaseModel):
latest_timestamp: int | None = Field(
default=None, description="Unix timestamp of the most recent sample"
)
supported: bool | None = Field(
default=None,
description="Whether the connected radio appears to support radio stats sampling",
)
samples: list[NoiseFloorSample] = Field(default_factory=list)
+24 -3
View File
@@ -39,6 +39,7 @@ from app.repository import (
ChannelRepository,
ContactAdvertPathRepository,
ContactRepository,
MessageRepository,
RawPacketRepository,
)
from app.services.contact_reconciliation import (
@@ -645,10 +646,30 @@ async def _process_direct_message(
)
if result is not None:
# Successfully decrypted!
# In the ambiguous direction case (both first bytes match), we
# defaulted to incoming. Check if a matching outgoing message
# already exists — if so, this is actually our own outgoing echo
# and should be treated as such instead of creating a duplicate
# incoming row.
effective_outgoing = is_outgoing
if not is_outgoing and dest_hash == src_hash:
existing_outgoing = await MessageRepository.get_by_content(
msg_type="PRIV",
conversation_key=contact.public_key.lower(),
text=result.message,
sender_timestamp=result.timestamp,
outgoing=True,
)
if existing_outgoing is not None:
effective_outgoing = True
logger.debug(
"Ambiguous DM resolved as outgoing echo (matched existing sent msg %d)",
existing_outgoing.id,
)
logger.debug(
"Decrypted DM %s contact %s: %s",
"to" if is_outgoing else "from",
"to" if effective_outgoing else "from",
contact.name or contact.public_key[:12],
result.message[:50] if result.message else "",
)
@@ -664,7 +685,7 @@ async def _process_direct_message(
path_len=packet_info.path_length if packet_info else None,
rssi=rssi,
snr=snr,
outgoing=is_outgoing,
outgoing=effective_outgoing,
)
return {
+207 -64
View File
@@ -3,7 +3,6 @@ import glob
import logging
import platform
import re
from collections import OrderedDict
from contextlib import asynccontextmanager, nullcontext
from pathlib import Path
@@ -12,22 +11,23 @@ from serial.serialutil import SerialException
from app.config import settings
from app.keystore import clear_keys
from app.radio_runtime_state import (
RadioDisconnectedError,
RadioOperationBusyError,
RadioOperationError,
RadioRuntimeState,
)
logger = logging.getLogger(__name__)
MAX_FRONTEND_RECONNECT_ERROR_BROADCASTS = 3
_SERIAL_PORT_ERROR_RE = re.compile(r"could not open port (?P<port>.+?):")
class RadioOperationError(RuntimeError):
"""Base class for shared radio operation lock errors."""
class RadioOperationBusyError(RadioOperationError):
"""Raised when a non-blocking radio operation cannot acquire the lock."""
class RadioDisconnectedError(RadioOperationError):
"""Raised when the radio disconnects between pre-check and lock acquisition."""
__all__ = [
"RadioDisconnectedError",
"RadioManager",
"RadioOperationBusyError",
"RadioOperationError",
"radio_manager",
]
def detect_serial_devices() -> list[str]:
@@ -118,7 +118,7 @@ async def test_serial_device(port: str, baudrate: int, timeout: float = 3.0) ->
return True
return False
except asyncio.TimeoutError:
except TimeoutError:
logger.debug("Device %s timed out", port)
return False
except Exception as e:
@@ -154,29 +154,189 @@ async def find_radio_port(baudrate: int) -> str | None:
class RadioManager:
"""Manages the MeshCore radio connection."""
def __init__(self):
def __init__(self, runtime_state: RadioRuntimeState | None = None):
self._meshcore: MeshCore | None = None
self._connection_info: str | None = None
self._connection_desired: bool = True
self._reconnect_task: asyncio.Task | None = None
self._last_connected: bool = False
self._reconnect_lock: asyncio.Lock | None = None
self._operation_lock: asyncio.Lock | None = None
self._setup_lock: asyncio.Lock | None = None
self._setup_in_progress: bool = False
self._setup_complete: bool = False
self._frontend_reconnect_error_broadcasts: int = 0
self.device_info_loaded: bool = False
self.max_contacts: int | None = None
self.device_model: str | None = None
self.firmware_build: str | None = None
self.firmware_version: str | None = None
self.max_channels: int = 40
self.path_hash_mode: int = 0
self.path_hash_mode_supported: bool = False
self._channel_slot_by_key: OrderedDict[str, int] = OrderedDict()
self._channel_key_by_slot: dict[int, str] = {}
self._pending_message_channel_key_by_slot: dict[int, str] = {}
self._state = runtime_state or RadioRuntimeState()
@property
def state(self) -> RadioRuntimeState:
return self._state
@property
def _connection_info(self) -> str | None:
return self._state.connection_info
@_connection_info.setter
def _connection_info(self, value: str | None) -> None:
self._state.connection_info = value
@property
def _connection_desired(self) -> bool:
return self._state.connection_desired
@_connection_desired.setter
def _connection_desired(self, value: bool) -> None:
self._state.connection_desired = value
@property
def _reconnect_task(self) -> asyncio.Task | None:
return self._state.reconnect_task
@_reconnect_task.setter
def _reconnect_task(self, value: asyncio.Task | None) -> None:
self._state.reconnect_task = value
@property
def _last_connected(self) -> bool:
return self._state.last_connected
@_last_connected.setter
def _last_connected(self, value: bool) -> None:
self._state.last_connected = value
@property
def _reconnect_lock(self) -> asyncio.Lock | None:
return self._state.reconnect_lock
@_reconnect_lock.setter
def _reconnect_lock(self, value: asyncio.Lock | None) -> None:
self._state.reconnect_lock = value
@property
def _operation_lock(self) -> asyncio.Lock | None:
return self._state.operation_lock
@_operation_lock.setter
def _operation_lock(self, value: asyncio.Lock | None) -> None:
self._state.operation_lock = value
@property
def _setup_lock(self) -> asyncio.Lock | None:
return self._state.setup_lock
@_setup_lock.setter
def _setup_lock(self, value: asyncio.Lock | None) -> None:
self._state.setup_lock = value
@property
def _setup_in_progress(self) -> bool:
return self._state.setup_in_progress
@_setup_in_progress.setter
def _setup_in_progress(self, value: bool) -> None:
self._state.setup_in_progress = value
@property
def _setup_complete(self) -> bool:
return self._state.setup_complete
@_setup_complete.setter
def _setup_complete(self, value: bool) -> None:
self._state.setup_complete = value
@property
def _frontend_reconnect_error_broadcasts(self) -> int:
return self._state.frontend_reconnect_error_broadcasts
@_frontend_reconnect_error_broadcasts.setter
def _frontend_reconnect_error_broadcasts(self, value: int) -> None:
self._state.frontend_reconnect_error_broadcasts = value
@property
def device_info_loaded(self) -> bool:
return self._state.device_info_loaded
@device_info_loaded.setter
def device_info_loaded(self, value: bool) -> None:
self._state.device_info_loaded = value
@property
def max_contacts(self) -> int | None:
return self._state.max_contacts
@max_contacts.setter
def max_contacts(self, value: int | None) -> None:
self._state.max_contacts = value
@property
def device_model(self) -> str | None:
return self._state.device_model
@device_model.setter
def device_model(self, value: str | None) -> None:
self._state.device_model = value
@property
def firmware_build(self) -> str | None:
return self._state.firmware_build
@firmware_build.setter
def firmware_build(self, value: str | None) -> None:
self._state.firmware_build = value
@property
def firmware_version(self) -> str | None:
return self._state.firmware_version
@firmware_version.setter
def firmware_version(self, value: str | None) -> None:
self._state.firmware_version = value
@property
def max_channels(self) -> int:
return self._state.max_channels
@max_channels.setter
def max_channels(self, value: int) -> None:
self._state.max_channels = value
@property
def path_hash_mode(self) -> int:
return self._state.path_hash_mode
@path_hash_mode.setter
def path_hash_mode(self, value: int) -> None:
self._state.path_hash_mode = value
@path_hash_mode.deleter
def path_hash_mode(self) -> None:
self._state.path_hash_mode = 0
@property
def path_hash_mode_supported(self) -> bool:
return self._state.path_hash_mode_supported
@path_hash_mode_supported.setter
def path_hash_mode_supported(self, value: bool) -> None:
self._state.path_hash_mode_supported = value
@path_hash_mode_supported.deleter
def path_hash_mode_supported(self) -> None:
self._state.path_hash_mode_supported = False
@property
def _channel_slot_by_key(self):
return self._state.channel_slot_by_key
@_channel_slot_by_key.setter
def _channel_slot_by_key(self, value) -> None:
self._state.channel_slot_by_key = value
@property
def _channel_key_by_slot(self):
return self._state.channel_key_by_slot
@_channel_key_by_slot.setter
def _channel_key_by_slot(self, value) -> None:
self._state.channel_key_by_slot = value
@property
def _pending_message_channel_key_by_slot(self):
return self._state.pending_message_channel_key_by_slot
@_pending_message_channel_key_by_slot.setter
def _pending_message_channel_key_by_slot(self, value) -> None:
self._state.pending_message_channel_key_by_slot = value
async def _acquire_operation_lock(
self,
@@ -185,40 +345,23 @@ class RadioManager:
blocking: bool,
) -> None:
"""Acquire the shared radio operation lock."""
if self._operation_lock is None:
self._operation_lock = asyncio.Lock()
if not blocking:
if self._operation_lock.locked():
raise RadioOperationBusyError(f"Radio is busy (operation: {name})")
await self._operation_lock.acquire()
else:
await self._operation_lock.acquire()
logger.debug("Acquired radio operation lock (%s)", name)
await self._state.acquire_operation_lock(name, blocking=blocking)
def _release_operation_lock(self, name: str) -> None:
"""Release the shared radio operation lock."""
if self._operation_lock and self._operation_lock.locked():
self._operation_lock.release()
logger.debug("Released radio operation lock (%s)", name)
else:
logger.error("Attempted to release unlocked radio operation lock (%s)", name)
self._state.release_operation_lock(name)
async def acquire_operation_lock(self, name: str, *, blocking: bool = True) -> None:
"""Acquire the shared radio operation lock."""
await self._acquire_operation_lock(name, blocking=blocking)
def release_operation_lock(self, name: str) -> None:
"""Release the shared radio operation lock."""
self._release_operation_lock(name)
def _reset_connected_runtime_state(self) -> None:
"""Clear cached runtime state after a transport teardown completes."""
self._setup_complete = False
self.device_info_loaded = False
self.max_contacts = None
self.device_model = None
self.firmware_build = None
self.firmware_version = None
self.max_channels = 40
self.path_hash_mode = 0
self.path_hash_mode_supported = False
self.reset_channel_send_cache()
self.clear_pending_message_channel_slots()
self._state.reset_connected_runtime_state()
@asynccontextmanager
async def radio_operation(
+187
View File
@@ -0,0 +1,187 @@
import asyncio
import logging
from collections import OrderedDict
from app.config import settings
logger = logging.getLogger(__name__)
class RadioOperationError(RuntimeError):
"""Base class for shared radio operation lock errors."""
class RadioOperationBusyError(RadioOperationError):
"""Raised when a non-blocking radio operation cannot acquire the lock."""
class RadioDisconnectedError(RadioOperationError):
"""Raised when the radio disconnects between pre-check and lock acquisition."""
class RadioRuntimeState:
"""Mutable runtime state for one live radio session manager."""
def __init__(self) -> None:
self.connection_info: str | None = None
self.connection_desired: bool = True
self.reconnect_task: asyncio.Task | None = None
self.last_connected: bool = False
self.reconnect_lock: asyncio.Lock | None = None
self.operation_lock: asyncio.Lock | None = None
self.setup_lock: asyncio.Lock | None = None
self.setup_in_progress: bool = False
self.setup_complete: bool = False
self.frontend_reconnect_error_broadcasts: int = 0
self.device_info_loaded: bool = False
self.max_contacts: int | None = None
self.device_model: str | None = None
self.firmware_build: str | None = None
self.firmware_version: str | None = None
self.max_channels: int = 40
self.path_hash_mode: int = 0
self.path_hash_mode_supported: bool = False
self.channel_slot_by_key: OrderedDict[str, int] = OrderedDict()
self.channel_key_by_slot: dict[int, str] = {}
self.pending_message_channel_key_by_slot: dict[int, str] = {}
@property
def is_reconnecting(self) -> bool:
return self.reconnect_lock is not None and self.reconnect_lock.locked()
async def acquire_operation_lock(self, name: str, *, blocking: bool) -> None:
if self.operation_lock is None:
self.operation_lock = asyncio.Lock()
if not blocking:
if self.operation_lock.locked():
raise RadioOperationBusyError(f"Radio is busy (operation: {name})")
# No coroutine can acquire the lock between the check above and
# this await because we have not yielded yet.
await self.operation_lock.acquire()
else:
await self.operation_lock.acquire()
logger.debug("Acquired radio operation lock (%s)", name)
def release_operation_lock(self, name: str) -> None:
if self.operation_lock and self.operation_lock.locked():
self.operation_lock.release()
logger.debug("Released radio operation lock (%s)", name)
else:
logger.error("Attempted to release unlocked radio operation lock (%s)", name)
def reset_connected_runtime_state(self) -> None:
self.setup_complete = False
self.device_info_loaded = False
self.max_contacts = None
self.device_model = None
self.firmware_build = None
self.firmware_version = None
self.max_channels = 40
self.path_hash_mode = 0
self.path_hash_mode_supported = False
self.reset_channel_send_cache()
self.clear_pending_message_channel_slots()
def reset_channel_send_cache(self) -> None:
self.channel_slot_by_key.clear()
self.channel_key_by_slot.clear()
def remember_pending_message_channel_slot(self, channel_key: str, slot: int) -> None:
self.pending_message_channel_key_by_slot[slot] = channel_key.upper()
def get_pending_message_channel_key(self, slot: int) -> str | None:
return self.pending_message_channel_key_by_slot.get(slot)
def clear_pending_message_channel_slots(self) -> None:
self.pending_message_channel_key_by_slot.clear()
def channel_slot_reuse_enabled(self) -> bool:
if settings.force_channel_slot_reconfigure:
return False
if self.connection_info:
return not self.connection_info.startswith("TCP:")
return settings.connection_type != "tcp"
def get_channel_send_cache_capacity(self) -> int:
try:
return max(1, int(self.max_channels))
except (TypeError, ValueError):
return 1
def get_cached_channel_slot(self, channel_key: str) -> int | None:
return self.channel_slot_by_key.get(channel_key.upper())
def plan_channel_send_slot(
self,
channel_key: str,
*,
preferred_slot: int = 0,
) -> tuple[int, bool, str | None]:
if not self.channel_slot_reuse_enabled():
return preferred_slot, True, None
normalized_key = channel_key.upper()
cached_slot = self.channel_slot_by_key.get(normalized_key)
if cached_slot is not None:
return cached_slot, False, None
capacity = self.get_channel_send_cache_capacity()
if len(self.channel_slot_by_key) < capacity:
slot = self._find_first_free_channel_slot(capacity, preferred_slot)
return slot, True, None
evicted_key, slot = next(iter(self.channel_slot_by_key.items()))
return slot, True, evicted_key
def note_channel_slot_loaded(self, channel_key: str, slot: int) -> None:
if not self.channel_slot_reuse_enabled():
return
normalized_key = channel_key.upper()
previous_slot = self.channel_slot_by_key.pop(normalized_key, None)
if previous_slot is not None and previous_slot != slot:
self.channel_key_by_slot.pop(previous_slot, None)
displaced_key = self.channel_key_by_slot.get(slot)
if displaced_key is not None and displaced_key != normalized_key:
self.channel_slot_by_key.pop(displaced_key, None)
self.channel_key_by_slot[slot] = normalized_key
self.channel_slot_by_key[normalized_key] = slot
def note_channel_slot_used(self, channel_key: str) -> None:
if not self.channel_slot_reuse_enabled():
return
normalized_key = channel_key.upper()
slot = self.channel_slot_by_key.get(normalized_key)
if slot is None:
return
self.channel_slot_by_key.move_to_end(normalized_key)
self.channel_key_by_slot[slot] = normalized_key
def invalidate_cached_channel_slot(self, channel_key: str) -> None:
normalized_key = channel_key.upper()
slot = self.channel_slot_by_key.pop(normalized_key, None)
if slot is None:
return
if self.channel_key_by_slot.get(slot) == normalized_key:
self.channel_key_by_slot.pop(slot, None)
def get_channel_send_cache_snapshot(self) -> list[tuple[str, int]]:
return list(self.channel_slot_by_key.items())
def reset_reconnect_error_broadcasts(self) -> None:
self.frontend_reconnect_error_broadcasts = 0
def _find_first_free_channel_slot(self, capacity: int, preferred_slot: int) -> int:
if preferred_slot < capacity and preferred_slot not in self.channel_key_by_slot:
return preferred_slot
for slot in range(capacity):
if slot not in self.channel_key_by_slot:
return slot
return preferred_slot
+19 -3
View File
@@ -480,7 +480,7 @@ async def drain_pending_messages(mc: MeshCore) -> int:
# Small delay between fetches
await asyncio.sleep(0.1)
except asyncio.TimeoutError:
except TimeoutError:
break
except Exception as e:
logger.warning("Error draining messages: %s", e, exc_info=True)
@@ -518,7 +518,7 @@ async def poll_for_messages(mc: MeshCore) -> int:
# If we got a message, there might be more - drain them
count += await drain_pending_messages(mc)
except asyncio.TimeoutError:
except TimeoutError:
pass
except Exception as e:
logger.warning("Message poll exception: %s", e, exc_info=True)
@@ -1585,9 +1585,10 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
}
try:
timestamp = int(time.time())
await RepeaterTelemetryRepository.record(
public_key=contact.public_key,
timestamp=int(time.time()),
timestamp=timestamp,
data=data,
)
logger.info(
@@ -1595,6 +1596,21 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
contact.name or contact.public_key[:12],
contact.public_key[:12],
)
# Dispatch to fanout modules (e.g. HA MQTT discovery)
from app.fanout.manager import fanout_manager
asyncio.create_task(
fanout_manager.broadcast_telemetry(
{
"public_key": contact.public_key,
"name": contact.name or contact.public_key[:12],
"timestamp": timestamp,
**data,
}
)
)
return True
except Exception as e:
logger.warning(
+4 -3
View File
@@ -557,10 +557,11 @@ class MessageRepository:
@staticmethod
async def increment_ack_count(message_id: int) -> int:
"""Increment ack count and return the new value."""
await db.conn.execute("UPDATE messages SET acked = acked + 1 WHERE id = ?", (message_id,))
await db.conn.commit()
cursor = await db.conn.execute("SELECT acked FROM messages WHERE id = ?", (message_id,))
cursor = await db.conn.execute(
"UPDATE messages SET acked = acked + 1 WHERE id = ? RETURNING acked", (message_id,)
)
row = await cursor.fetchone()
await db.conn.commit()
return row["acked"] if row else 1
@staticmethod
+2 -2
View File
@@ -4,7 +4,7 @@ import os
import platform
import struct
import sys
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any, Literal
from fastapi import APIRouter
@@ -390,7 +390,7 @@ async def debug_support_snapshot() -> DebugSnapshotResponse:
is_reconnecting=is_reconnecting,
)
return DebugSnapshotResponse(
captured_at=datetime.now(timezone.utc).isoformat(),
captured_at=datetime.now(UTC).isoformat(),
system=_build_system_info(),
application=_build_application_info(),
health=_build_debug_health_summary(health_data, radio_state=radio_state),
+47
View File
@@ -7,6 +7,7 @@ from pydantic import BaseModel, Field
from app.config import settings
from app.repository import RawPacketRepository
from app.services.radio_runtime import radio_runtime as radio_manager
from app.services.radio_stats import get_latest_radio_stats
from app.version_info import get_app_build_info
router = APIRouter(tags=["health"])
@@ -32,6 +33,28 @@ class FanoutStatusResponse(BaseModel):
last_error: str | None = None
class RadioStatsSnapshot(BaseModel):
"""Latest cached stats from the local radio's periodic 60s poll."""
timestamp: int | None = None
# Core stats
battery_mv: int | None = None
uptime_secs: int | None = None
# Radio stats
noise_floor: int | None = None
last_rssi: int | None = None
last_snr: float | None = None
tx_air_secs: int | None = None
rx_air_secs: int | None = None
# Packet stats
packets_recv: int | None = None
packets_sent: int | None = None
flood_tx: int | None = None
direct_tx: int | None = None
flood_rx: int | None = None
direct_rx: int | None = None
class HealthResponse(BaseModel):
status: str
radio_connected: bool
@@ -40,6 +63,7 @@ class HealthResponse(BaseModel):
connection_info: str | None
app_info: AppInfoResponse | None = None
radio_device_info: RadioDeviceInfoResponse | None = None
radio_stats: RadioStatsSnapshot | None = None
database_size_mb: float
oldest_undecrypted_timestamp: int | None
fanout_statuses: dict[str, FanoutStatusResponse] = Field(default_factory=dict)
@@ -122,6 +146,28 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
"max_channels": getattr(radio_manager, "max_channels", None),
}
# Local radio stats from the 60s background sampler
raw_stats = get_latest_radio_stats()
radio_stats = None
if raw_stats:
packets = raw_stats.get("packets") or {}
radio_stats = {
"timestamp": raw_stats.get("timestamp"),
"battery_mv": raw_stats.get("battery_mv"),
"uptime_secs": raw_stats.get("uptime_secs"),
"noise_floor": raw_stats.get("noise_floor"),
"last_rssi": raw_stats.get("last_rssi"),
"last_snr": raw_stats.get("last_snr"),
"tx_air_secs": raw_stats.get("tx_air_secs"),
"rx_air_secs": raw_stats.get("rx_air_secs"),
"packets_recv": packets.get("recv"),
"packets_sent": packets.get("sent"),
"flood_tx": packets.get("flood_tx"),
"direct_tx": packets.get("direct_tx"),
"flood_rx": packets.get("flood_rx"),
"direct_rx": packets.get("direct_rx"),
}
return {
"status": "ok" if radio_connected and not radio_initializing else "degraded",
"radio_connected": radio_connected,
@@ -133,6 +179,7 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
"commit_hash": app_build_info.commit_hash,
},
"radio_device_info": radio_device_info,
"radio_stats": radio_stats,
"database_size_mb": db_size_mb,
"oldest_undecrypted_timestamp": oldest_ts,
"fanout_statuses": fanout_statuses,
+2 -2
View File
@@ -473,7 +473,7 @@ async def discover_mesh(request: RadioDiscoveryRequest) -> RadioDiscoveryRespons
break
try:
event = await asyncio.wait_for(events.get(), timeout=remaining)
except asyncio.TimeoutError:
except TimeoutError:
break
merged = _merge_discovery_result(
@@ -536,7 +536,7 @@ async def trace_path(request: RadioTraceRequest) -> RadioTraceResponse:
timeout_seconds = _trace_timeout_seconds(send_result)
try:
event = await asyncio.wait_for(response_task, timeout=timeout_seconds)
except asyncio.TimeoutError as exc:
except TimeoutError as exc:
raise HTTPException(status_code=504, detail="No trace response heard") from exc
finally:
if not response_task.done():
+15
View File
@@ -1,3 +1,4 @@
import asyncio
import logging
import time
@@ -133,6 +134,20 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
timestamp=now,
data=status_dict,
)
# Dispatch to fanout modules (e.g. HA MQTT discovery)
from app.fanout.manager import fanout_manager
asyncio.create_task(
fanout_manager.broadcast_telemetry(
{
"public_key": contact.public_key,
"name": contact.name or contact.public_key[:12],
"timestamp": now,
**status_dict,
}
)
)
except Exception as e:
logger.warning("Failed to record telemetry history: %s", e)
+2 -2
View File
@@ -94,7 +94,7 @@ async def fetch_contact_cli_response(
while _monotonic() < deadline:
try:
result = await mc.commands.get_msg(timeout=2.0)
except asyncio.TimeoutError:
except TimeoutError:
continue
except Exception as exc:
logger.debug("get_msg() exception: %s", exc)
@@ -196,7 +196,7 @@ async def prepare_authenticated_contact_connection(
login_future,
timeout=response_timeout,
)
except asyncio.TimeoutError:
except TimeoutError:
logger.warning(
"No login response from %s %s within %.1fs",
contact_label,
+2 -2
View File
@@ -2,7 +2,7 @@ from fastapi import APIRouter
from app.models import StatisticsResponse
from app.repository import StatisticsRepository
from app.services.radio_noise_floor import get_noise_floor_history
from app.services.radio_stats import get_noise_floor_history
router = APIRouter(prefix="/statistics", tags=["statistics"])
@@ -10,5 +10,5 @@ router = APIRouter(prefix="/statistics", tags=["statistics"])
@router.get("", response_model=StatisticsResponse)
async def get_statistics() -> StatisticsResponse:
data = await StatisticsRepository.get_all()
data["noise_floor_24h"] = await get_noise_floor_history()
data["noise_floor_24h"] = get_noise_floor_history()
return StatisticsResponse(**data)
+45 -26
View File
@@ -264,38 +264,43 @@ async def send_channel_message_with_effective_scope(
return send_result
finally:
if override_scope and override_scope != baseline_scope:
try:
restore_result = await mc.commands.set_flood_scope(
baseline_scope if baseline_scope else ""
)
if restore_result is not None and restore_result.type == EventType.ERROR:
logger.error(
"Failed to restore baseline flood_scope after sending to %s: %s",
restored = False
for attempt in range(3):
try:
restore_result = await mc.commands.set_flood_scope(
baseline_scope if baseline_scope else ""
)
if restore_result is not None and restore_result.type == EventType.ERROR:
logger.warning(
"Attempt %d/3: failed to restore flood_scope after sending to %s: %s",
attempt + 1,
channel.name,
restore_result.payload,
)
else:
logger.debug(
"Restored baseline flood_scope after channel send: %r",
baseline_scope or "(disabled)",
)
restored = True
break
except Exception:
logger.exception(
"Attempt %d/3: exception restoring flood_scope after sending to %s",
attempt + 1,
channel.name,
restore_result.payload,
)
error_broadcast_fn(
"Regional override restore failed",
(
f"Sent to {channel.name}, but restoring flood scope failed. "
"The radio may still be region-scoped. Consider rebooting the radio."
),
)
else:
logger.debug(
"Restored baseline flood_scope after channel send: %r",
baseline_scope or "(disabled)",
)
except Exception:
logger.exception(
"Failed to restore baseline flood_scope after sending to %s",
if not restored:
logger.error(
"All 3 attempts to restore flood_scope failed for %s",
channel.name,
)
error_broadcast_fn(
"Regional override restore failed",
(
f"Sent to {channel.name}, but restoring flood scope failed. "
"The radio may still be region-scoped. Consider rebooting the radio."
f"Sent to {channel.name}, but restoring flood scope failed "
f"after 3 attempts. The radio may still be region-scoped. "
f"Consider rebooting the radio."
),
)
@@ -421,7 +426,8 @@ async def _retry_direct_message_until_acked(
message_repository,
) -> None:
next_wait_timeout_ms = wait_timeout_ms
for attempt in range(1, DM_SEND_MAX_ATTEMPTS):
attempt = 1
while attempt < DM_SEND_MAX_ATTEMPTS:
await sleep_fn((next_wait_timeout_ms / 1000) * DM_RETRY_WAIT_MARGIN)
if await _is_message_acked(message_id=message_id, message_repository=message_repository):
return
@@ -463,6 +469,14 @@ async def _retry_direct_message_until_acked(
timestamp=sender_timestamp,
attempt=attempt,
)
except RadioOperationBusyError:
logger.debug(
"Radio busy during DM retry attempt %d/%d for %s, will retry without consuming attempt",
attempt + 1,
DM_SEND_MAX_ATTEMPTS,
contact.public_key[:12],
)
continue
except Exception:
logger.exception(
"Background DM retry attempt %d/%d failed for %s",
@@ -470,6 +484,7 @@ async def _retry_direct_message_until_acked(
DM_SEND_MAX_ATTEMPTS,
contact.public_key[:12],
)
attempt += 1
continue
if result is None:
@@ -479,6 +494,7 @@ async def _retry_direct_message_until_acked(
DM_SEND_MAX_ATTEMPTS,
contact.public_key[:12],
)
attempt += 1
continue
if result.type == EventType.ERROR:
@@ -489,6 +505,7 @@ async def _retry_direct_message_until_acked(
contact.public_key[:12],
result.payload,
)
attempt += 1
continue
if await _is_message_acked(message_id=message_id, message_repository=message_repository):
@@ -516,6 +533,8 @@ async def _retry_direct_message_until_acked(
if ack_count > 0:
return
attempt += 1
async def send_direct_message_to_contact(
*,
+3 -3
View File
@@ -1,6 +1,6 @@
import asyncio
import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
logger = logging.getLogger(__name__)
@@ -193,7 +193,7 @@ async def run_post_connect_setup(radio_manager) -> None:
logger.info(
"Radio clock at connect: epoch=%d utc=%s",
radio_time,
datetime.fromtimestamp(radio_time, timezone.utc).strftime(
datetime.fromtimestamp(radio_time, UTC).strftime(
"%Y-%m-%d %H:%M:%S UTC"
),
)
@@ -274,7 +274,7 @@ async def prepare_connected_radio(radio_manager, *, broadcast_on_success: bool =
try:
await radio_manager.post_connect_setup()
break
except asyncio.TimeoutError as exc:
except TimeoutError as exc:
if attempt < POST_CONNECT_SETUP_MAX_ATTEMPTS:
logger.warning(
"Post-connect setup timed out after %ds on attempt %d/%d; retrying once",
-119
View File
@@ -1,119 +0,0 @@
"""In-memory local-radio noise floor history sampling."""
import asyncio
import logging
import time
from collections import deque
from meshcore import EventType
from app.radio import RadioDisconnectedError, RadioOperationBusyError
from app.services.radio_runtime import radio_runtime as radio_manager
logger = logging.getLogger(__name__)
NOISE_FLOOR_SAMPLE_INTERVAL_SECONDS = 300
NOISE_FLOOR_WINDOW_SECONDS = 24 * 60 * 60
MAX_NOISE_FLOOR_SAMPLES = 300
_noise_floor_task: asyncio.Task | None = None
_noise_floor_samples: deque[tuple[int, int]] = deque(maxlen=MAX_NOISE_FLOOR_SAMPLES)
_noise_floor_supported: bool | None = None
_samples_lock = asyncio.Lock()
async def _append_sample(timestamp: int, noise_floor_dbm: int) -> None:
async with _samples_lock:
_noise_floor_samples.append((timestamp, noise_floor_dbm))
async def sample_noise_floor_once(*, blocking: bool = False) -> None:
"""Fetch the current radio noise floor once and record it when available."""
global _noise_floor_supported
if not radio_manager.is_connected:
return
try:
async with radio_manager.radio_operation("noise_floor_sample", blocking=blocking) as mc:
event = await mc.commands.get_stats_radio()
except (RadioDisconnectedError, RadioOperationBusyError):
return
except Exception as exc:
logger.debug("Noise floor sampling failed: %s", exc)
return
if event.type == EventType.ERROR:
_noise_floor_supported = False
return
if event.type != EventType.STATS_RADIO:
return
noise_floor = event.payload.get("noise_floor")
if not isinstance(noise_floor, int):
return
_noise_floor_supported = True
await _append_sample(int(time.time()), noise_floor)
async def _noise_floor_sampling_loop() -> None:
while True:
try:
await sample_noise_floor_once()
except asyncio.CancelledError:
raise
except Exception:
logger.exception("Noise floor sampling loop crashed during sample")
try:
await asyncio.sleep(NOISE_FLOOR_SAMPLE_INTERVAL_SECONDS)
except asyncio.CancelledError:
raise
async def start_noise_floor_sampling() -> None:
global _noise_floor_task
if _noise_floor_task is not None and not _noise_floor_task.done():
return
_noise_floor_task = asyncio.create_task(_noise_floor_sampling_loop())
async def stop_noise_floor_sampling() -> None:
global _noise_floor_task
if _noise_floor_task is None:
return
if not _noise_floor_task.done():
_noise_floor_task.cancel()
try:
await _noise_floor_task
except asyncio.CancelledError:
pass
_noise_floor_task = None
async def get_noise_floor_history() -> dict:
"""Return the current 24-hour in-memory noise floor history snapshot."""
now = int(time.time())
cutoff = now - NOISE_FLOOR_WINDOW_SECONDS
async with _samples_lock:
samples = [
{"timestamp": timestamp, "noise_floor_dbm": noise_floor_dbm}
for timestamp, noise_floor_dbm in _noise_floor_samples
if timestamp >= cutoff
]
latest = samples[-1] if samples else None
oldest_timestamp = samples[0]["timestamp"] if samples else None
coverage_seconds = 0 if oldest_timestamp is None else max(0, now - oldest_timestamp)
return {
"sample_interval_seconds": NOISE_FLOOR_SAMPLE_INTERVAL_SECONDS,
"coverage_seconds": coverage_seconds,
"latest_noise_floor_dbm": latest["noise_floor_dbm"] if latest else None,
"latest_timestamp": latest["timestamp"] if latest else None,
"supported": _noise_floor_supported,
"samples": samples,
}
+133 -20
View File
@@ -1,8 +1,8 @@
"""Shared access seam over the global RadioManager instance.
"""Shared access seam over the process-global radio runtime.
This module deliberately keeps behavior thin and forwarding-only. The goal is
to reduce direct `app.radio.radio_manager` imports across routers and helpers
without changing radio lifecycle, lock, or connection semantics.
The runtime object is the public boundary for application code. It exposes the
current manager plus its mutable session state through an explicit API instead
of forwarding arbitrary attribute access to the manager instance.
"""
from collections.abc import Callable
@@ -15,7 +15,7 @@ import app.radio as radio_module
class RadioRuntime:
"""Thin forwarding wrapper around the process-global RadioManager."""
"""Explicit access seam over the process-global RadioManager."""
def __init__(self, manager_or_getter=None):
if manager_or_getter is None:
@@ -30,24 +30,90 @@ class RadioRuntime:
return self._manager_getter()
def __getattr__(self, name: str) -> Any:
"""Forward unknown attributes to the current global manager."""
return getattr(self.manager, name)
raise AttributeError(
f"{type(self).__name__!s} does not expose attribute {name!r}. "
"Use an explicit RadioRuntime property or method."
)
@staticmethod
def _is_local_runtime_attr(name: str) -> bool:
return name.startswith("_") or hasattr(RadioRuntime, name)
@property
def state(self) -> Any:
return self.manager.state
def __setattr__(self, name: str, value: Any) -> None:
if self._is_local_runtime_attr(name):
object.__setattr__(self, name, value)
return
setattr(self.manager, name, value)
@property
def meshcore(self) -> Any:
return self.manager.meshcore
def __delattr__(self, name: str) -> None:
if self._is_local_runtime_attr(name):
object.__delattr__(self, name)
return
delattr(self.manager, name)
@property
def connection_info(self) -> str | None:
return self.manager.connection_info
@property
def is_connected(self) -> bool:
return self.manager.is_connected
@property
def is_reconnecting(self) -> bool:
return self.manager.is_reconnecting
@property
def is_setup_in_progress(self) -> bool:
return self.manager.is_setup_in_progress
@property
def is_setup_complete(self) -> bool:
return self.manager.is_setup_complete
@property
def connection_desired(self) -> bool:
return self.manager.connection_desired
@property
def max_contacts(self) -> int | None:
return self.state.max_contacts
@max_contacts.setter
def max_contacts(self, value: int | None) -> None:
self.state.max_contacts = value
@property
def max_channels(self) -> int:
return self.state.max_channels
@max_channels.setter
def max_channels(self, value: int) -> None:
self.state.max_channels = value
@property
def path_hash_mode(self) -> int:
return self.state.path_hash_mode
@path_hash_mode.setter
def path_hash_mode(self, value: int) -> None:
self.state.path_hash_mode = value
@property
def path_hash_mode_supported(self) -> bool:
return self.state.path_hash_mode_supported
@path_hash_mode_supported.setter
def path_hash_mode_supported(self, value: bool) -> None:
self.state.path_hash_mode_supported = value
@property
def device_info_loaded(self) -> bool:
return self.state.device_info_loaded
@property
def device_model(self) -> str | None:
return self.state.device_model
@property
def firmware_build(self) -> str | None:
return self.state.firmware_build
@property
def firmware_version(self) -> str | None:
return self.state.firmware_version
def require_connected(self):
"""Return MeshCore when available, mirroring existing HTTP semantics."""
@@ -89,5 +155,52 @@ class RadioRuntime:
broadcast_on_success=broadcast_on_success,
)
def reset_channel_send_cache(self) -> None:
self.state.reset_channel_send_cache()
def remember_pending_message_channel_slot(self, channel_key: str, slot: int) -> None:
self.state.remember_pending_message_channel_slot(channel_key, slot)
def get_pending_message_channel_key(self, slot: int) -> str | None:
return self.state.get_pending_message_channel_key(slot)
def clear_pending_message_channel_slots(self) -> None:
self.state.clear_pending_message_channel_slots()
def channel_slot_reuse_enabled(self) -> bool:
return self.state.channel_slot_reuse_enabled()
def get_channel_send_cache_capacity(self) -> int:
return self.state.get_channel_send_cache_capacity()
def get_cached_channel_slot(self, channel_key: str) -> int | None:
return self.state.get_cached_channel_slot(channel_key)
def plan_channel_send_slot(
self,
channel_key: str,
*,
preferred_slot: int = 0,
) -> tuple[int, bool, str | None]:
return self.state.plan_channel_send_slot(channel_key, preferred_slot=preferred_slot)
def note_channel_slot_loaded(self, channel_key: str, slot: int) -> None:
self.state.note_channel_slot_loaded(channel_key, slot)
def note_channel_slot_used(self, channel_key: str) -> None:
self.state.note_channel_slot_used(channel_key)
def invalidate_cached_channel_slot(self, channel_key: str) -> None:
self.state.invalidate_cached_channel_slot(channel_key)
def get_channel_send_cache_snapshot(self) -> list[tuple[str, int]]:
return self.state.get_channel_send_cache_snapshot()
def resume_connection(self) -> None:
self.manager.resume_connection()
async def pause_connection(self) -> None:
await self.manager.pause_connection()
radio_runtime = RadioRuntime()
+195
View File
@@ -0,0 +1,195 @@
"""In-memory local-radio stats sampling.
A single 60s loop fetches core, radio, and packet stats from the connected
radio in one radio-lock acquisition. The noise-floor 24h history deque is
maintained as a side effect.
After each sample the loop:
1. Broadcasts a WS ``health`` frame so frontend dashboards refresh.
2. Dispatches a ``broadcast_health_fanout`` event carrying the full stats
snapshot plus radio identity, so fanout modules (e.g. HA MQTT) can
publish sensor state without a second radio poll.
Consumers:
- GET /api/health get_latest_radio_stats() (battery, uptime, etc.)
- GET /api/statistics get_noise_floor_history() (24h noise-floor chart)
- Fanout on_health _build_fanout_payload() (identity + stats)
"""
import asyncio
import logging
import time
from collections import deque
from typing import Any
from meshcore import EventType
from app.radio import RadioDisconnectedError, RadioOperationBusyError
from app.services.radio_runtime import radio_runtime as radio_manager
logger = logging.getLogger(__name__)
STATS_SAMPLE_INTERVAL_SECONDS = 60
NOISE_FLOOR_WINDOW_SECONDS = 24 * 60 * 60
MAX_NOISE_FLOOR_SAMPLES = 1500 # 24h at 60s intervals = 1440
_stats_task: asyncio.Task | None = None
_noise_floor_samples: deque[tuple[int, int]] = deque(maxlen=MAX_NOISE_FLOOR_SAMPLES)
_latest_stats: dict[str, Any] = {}
async def _sample_all_stats() -> dict[str, Any]:
"""Fetch core, radio, and packet stats in one radio operation.
Returns the snapshot dict (may be empty if the radio is disconnected or
all commands errored).
"""
if not radio_manager.is_connected:
return {}
try:
async with radio_manager.radio_operation("radio_stats_sample", blocking=False) as mc:
core_event = await mc.commands.get_stats_core()
radio_event = await mc.commands.get_stats_radio()
packet_event = await mc.commands.get_stats_packets()
except (RadioDisconnectedError, RadioOperationBusyError):
return {}
except Exception as exc:
logger.debug("Radio stats sampling failed: %s", exc)
return {}
now = int(time.time())
snapshot: dict[str, Any] = {"timestamp": now}
if getattr(core_event, "type", None) == EventType.STATS_CORE:
snapshot.update(core_event.payload)
if getattr(radio_event, "type", None) == EventType.STATS_RADIO:
snapshot.update(radio_event.payload)
noise_floor = radio_event.payload.get("noise_floor")
if isinstance(noise_floor, int):
_noise_floor_samples.append((now, noise_floor))
if getattr(packet_event, "type", None) == EventType.STATS_PACKETS:
snapshot["packets"] = packet_event.payload
has_any_data = len(snapshot) > 1
return snapshot if has_any_data else {}
def _build_fanout_payload(stats: dict[str, Any]) -> dict:
"""Build the health fanout payload from a stats snapshot + radio identity.
Includes radio identity (public_key, name), connection state, and the
full stats snapshot so fanout modules can publish rich sensor data
without a second radio poll.
"""
mc = radio_manager.meshcore
self_info = mc.self_info if mc else None
payload: dict = {
"connected": radio_manager.is_connected,
"connection_info": radio_manager.connection_info,
"public_key": (self_info.get("public_key") or None) if self_info else None,
"name": (self_info.get("name") or None) if self_info else None,
}
if stats:
payload["noise_floor_dbm"] = stats.get("noise_floor")
payload["battery_mv"] = stats.get("battery_mv")
payload["uptime_secs"] = stats.get("uptime_secs")
payload["last_rssi"] = stats.get("last_rssi")
payload["last_snr"] = stats.get("last_snr")
payload["tx_air_secs"] = stats.get("tx_air_secs")
payload["rx_air_secs"] = stats.get("rx_air_secs")
packets = stats.get("packets") or {}
payload["packets_recv"] = packets.get("recv")
payload["packets_sent"] = packets.get("sent")
payload["flood_tx"] = packets.get("flood_tx")
payload["direct_tx"] = packets.get("direct_tx")
payload["flood_rx"] = packets.get("flood_rx")
payload["direct_rx"] = packets.get("direct_rx")
return payload
async def _stats_sampling_loop() -> None:
global _latest_stats
while True:
try:
snapshot = await _sample_all_stats()
if snapshot:
_latest_stats = snapshot
elif not radio_manager.is_connected:
_latest_stats = {}
from app.websocket import broadcast_health
broadcast_health(radio_manager.is_connected, radio_manager.connection_info)
# Dispatch enriched health snapshot to fanout modules
from app.fanout.manager import fanout_manager
await fanout_manager.broadcast_health_fanout(_build_fanout_payload(snapshot))
except asyncio.CancelledError:
raise
except Exception:
logger.exception("Radio stats sampling loop error")
try:
await asyncio.sleep(STATS_SAMPLE_INTERVAL_SECONDS)
except asyncio.CancelledError:
raise
# ── Public API ────────────────────────────────────────────────────────────
async def start_radio_stats_sampling() -> None:
"""Start the periodic radio stats background task."""
global _stats_task
if _stats_task is not None and not _stats_task.done():
return
_stats_task = asyncio.create_task(_stats_sampling_loop())
async def stop_radio_stats_sampling() -> None:
"""Stop the periodic radio stats background task."""
global _stats_task
if _stats_task is None:
return
if not _stats_task.done():
_stats_task.cancel()
try:
await _stats_task
except asyncio.CancelledError:
pass
_stats_task = None
def get_noise_floor_history() -> dict:
"""Return the current 24-hour in-memory noise floor history snapshot."""
now = int(time.time())
cutoff = now - NOISE_FLOOR_WINDOW_SECONDS
samples = [
{"timestamp": timestamp, "noise_floor_dbm": noise_floor_dbm}
for timestamp, noise_floor_dbm in _noise_floor_samples
if timestamp >= cutoff
]
latest = samples[-1] if samples else None
oldest_timestamp = samples[0]["timestamp"] if samples else None
coverage_seconds = 0 if oldest_timestamp is None else max(0, now - oldest_timestamp)
return {
"sample_interval_seconds": STATS_SAMPLE_INTERVAL_SECONDS,
"coverage_seconds": coverage_seconds,
"latest_noise_floor_dbm": latest["noise_floor_dbm"] if latest else None,
"latest_timestamp": latest["timestamp"] if latest else None,
"samples": samples,
}
def get_latest_radio_stats() -> dict[str, Any]:
"""Return the most recent radio stats snapshot (for health endpoint)."""
return dict(_latest_stats)
+1 -2
View File
@@ -13,13 +13,12 @@ import importlib.metadata
import json
import os
import subprocess
import tomllib
from dataclasses import dataclass
from functools import lru_cache
from pathlib import Path
from typing import Any
import tomllib
RELEASE_BUILD_INFO_FILENAME = "build_info.json"
PROJECT_NAME = "remoteterm-meshcore"
+3 -1
View File
@@ -59,7 +59,7 @@ class WebSocketManager:
try:
# Timeout prevents blocking on slow/unresponsive clients
await asyncio.wait_for(connection.send_text(message), timeout=SEND_TIMEOUT_SECONDS)
except asyncio.TimeoutError:
except TimeoutError:
logger.debug("Timeout sending to WebSocket client, marking disconnected")
disconnected.append(connection)
except Exception as e:
@@ -110,6 +110,8 @@ def broadcast_event(event_type: str, data: dict, *, realtime: bool = True) -> No
asyncio.create_task(fanout_manager.broadcast_message(data))
elif event_type == "raw_packet":
asyncio.create_task(fanout_manager.broadcast_raw(data))
elif event_type == "contact":
asyncio.create_task(fanout_manager.broadcast_contact(data))
def broadcast_error(message: str, details: str | None = None) -> None:
+1 -1
View File
@@ -31,7 +31,7 @@ services:
# TCP
# MESHCORE_TCP_HOST: 192.168.1.100
# MESHCORE_TCP_PORT: 4000
# MESHCORE_TCP_PORT: 5000
# BLE
# BLE in Docker usually needs additional manual compose changes such as
+2 -2
View File
@@ -348,14 +348,14 @@ LocalStorage migration helpers for favorites; canonical favorites are server-sid
`AppSettings` currently includes:
- `max_radio_contacts`
- `favorites`
- `auto_decrypt_dm_on_advert`
- `last_message_times`
- `preferences_migrated`
- `advert_interval`
- `last_advert_time`
- `flood_scope`
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
- `tracked_telemetry_repeaters`
- `auto_resend_channel`
Note: MQTT, bot, and community MQTT settings were migrated to the `fanout_configs` table (managed via `/api/fanout`). They are no longer part of `AppSettings`.
+1 -1
View File
@@ -1,7 +1,7 @@
{
"name": "remoteterm-meshcore-frontend",
"private": true,
"version": "3.9.0",
"version": "3.11.0",
"type": "module",
"scripts": {
"dev": "vite",
+4 -4
View File
@@ -7,7 +7,7 @@ import {
Radio,
Route,
Search,
Sparkles,
Star,
User,
Waypoints,
} from 'lucide-react';
@@ -296,7 +296,7 @@ export function CommandPalette({
>
<Hash className="text-muted-foreground" />
<span>{ch.name}</span>
<Sparkles className="ml-auto h-3 w-3 text-yellow-500" />
<Star className="ml-auto h-3 w-3 text-favorite" />
</CommandItem>
))}
</CommandGroup>
@@ -384,7 +384,7 @@ function ContactGroup({
>
<Icon className="text-muted-foreground" />
<span>{displayName}</span>
{showStar && <Sparkles className="ml-auto h-3 w-3 text-yellow-500" />}
{showStar && <Star className="ml-auto h-3 w-3 text-favorite" />}
</CommandItem>
))}
</CommandGroup>
@@ -419,7 +419,7 @@ function RepeaterGroup({
>
<Waypoints className="text-muted-foreground" />
<span>{displayName}</span>
{showStar && <Sparkles className="ml-auto h-3 w-3 text-yellow-500" />}
{showStar && <Star className="ml-auto h-3 w-3 text-favorite" />}
</CommandItem>,
<CommandItem
key={`${c.public_key}-acl`}
+179 -26
View File
@@ -11,11 +11,14 @@ import {
Cell,
} from 'recharts';
import { MeshCoreDecoder, Utils } from '@michaelhart/meshcore-decoder';
import { RawPacketList } from './RawPacketList';
import { RawPacketInspectorDialog } from './RawPacketDetailModal';
import { Button } from './ui/button';
import type { Channel, Contact, RawPacket } from '../types';
import {
KNOWN_PAYLOAD_TYPES,
RAW_PACKET_STATS_WINDOWS,
buildRawPacketStatsSnapshot,
type NeighborStat,
@@ -24,9 +27,26 @@ import {
type RawPacketStatsSessionState,
type RawPacketStatsWindow,
} from '../utils/rawPacketStats';
import { createDecoderOptions } from '../utils/rawPacketInspector';
import { getContactDisplayName } from '../utils/pubkey';
import { cn } from '@/lib/utils';
const KNOWN_PAYLOAD_TYPE_SET = new Set<string>(KNOWN_PAYLOAD_TYPES);
function getPacketTypeName(
packet: RawPacket,
decoderOptions?: ReturnType<typeof createDecoderOptions>
): string {
try {
const decoded = MeshCoreDecoder.decode(packet.data, decoderOptions);
if (!decoded.isValid) return 'Unknown';
const name = Utils.getPayloadTypeName(decoded.payloadType);
return KNOWN_PAYLOAD_TYPE_SET.has(name) ? name : 'Unknown';
} catch {
return 'Unknown';
}
}
interface RawPacketFeedViewProps {
packets: RawPacket[];
rawPacketStatsSession: RawPacketStatsSessionState;
@@ -428,6 +448,48 @@ export function RawPacketFeedView({
const [nowSec, setNowSec] = useState(() => Math.floor(Date.now() / 1000));
const [selectedPacket, setSelectedPacket] = useState<RawPacket | null>(null);
const [analyzeModalOpen, setAnalyzeModalOpen] = useState(false);
const [mobileFiltersOpen, setMobileFiltersOpen] = useState(false);
const [enabledTypes, setEnabledTypes] = useState<Set<string>>(() => new Set(KNOWN_PAYLOAD_TYPES));
const decoderOptions = useMemo(() => createDecoderOptions(channels), [channels]);
const packetsWithTypes = useMemo(
() =>
packets.map((packet) => ({
packet,
payloadType: getPacketTypeName(packet, decoderOptions),
})),
[packets, decoderOptions]
);
const allTypesEnabled = enabledTypes.size === KNOWN_PAYLOAD_TYPES.length;
const filteredPackets = useMemo(() => {
if (allTypesEnabled) return packets;
return packetsWithTypes
.filter(({ payloadType }) => enabledTypes.has(payloadType))
.map(({ packet }) => packet);
}, [packetsWithTypes, enabledTypes, packets, allTypesEnabled]);
const handleToggleAll = () => {
setEnabledTypes(allTypesEnabled ? new Set() : new Set(KNOWN_PAYLOAD_TYPES));
};
const handleToggleType = (type: string) => {
setEnabledTypes((prev) => {
const next = new Set(prev);
if (next.has(type)) {
next.delete(type);
} else {
next.add(type);
}
return next;
});
};
const handleOnly = (type: string) => {
setEnabledTypes(new Set([type]));
};
useEffect(() => {
const interval = window.setInterval(() => {
@@ -468,38 +530,129 @@ export function RawPacketFeedView({
);
return (
<>
<div className="flex items-center justify-between gap-3 border-b border-border px-4 py-2.5">
<div>
<h2 className="font-semibold text-base text-foreground">Raw Packet Feed</h2>
<p className="text-xs text-muted-foreground">
Collecting stats since {formatTimestamp(rawPacketStatsSession.sessionStartedAt)}
</p>
<div className="border-b border-border px-4 py-2.5">
<div className="flex items-center justify-between gap-3">
<div>
<h2 className="font-semibold text-base text-foreground">Raw Packet Feed</h2>
<p className="hidden md:block text-xs text-muted-foreground">
Collecting stats since {formatTimestamp(rawPacketStatsSession.sessionStartedAt)}
</p>
</div>
<div className="flex items-center gap-2">
<Button
type="button"
variant="outline"
size="sm"
onClick={() => setAnalyzeModalOpen(true)}
>
Analyze Packet
</Button>
<Button
type="button"
variant="outline"
size="sm"
onClick={() => setStatsOpen((current) => !current)}
aria-expanded={statsOpen}
>
{statsOpen ? (
<ChevronRight className="h-4 w-4" />
) : (
<ChevronLeft className="h-4 w-4" />
)}
{statsOpen ? 'Hide Stats' : 'Show Stats'}
</Button>
</div>
</div>
<div className="flex items-center gap-2">
<Button
type="button"
variant="outline"
size="sm"
onClick={() => setAnalyzeModalOpen(true)}
>
Analyze Packet
</Button>
<Button
type="button"
variant="outline"
size="sm"
onClick={() => setStatsOpen((current) => !current)}
aria-expanded={statsOpen}
>
{statsOpen ? <ChevronRight className="h-4 w-4" /> : <ChevronLeft className="h-4 w-4" />}
{statsOpen ? 'Hide Stats' : 'Show Stats'}
</Button>
<p className="md:hidden text-xs text-muted-foreground">
Collecting stats since {formatTimestamp(rawPacketStatsSession.sessionStartedAt)}
{!mobileFiltersOpen && (
<>
{' · '}
<button
type="button"
className="text-primary hover:text-primary/80 transition-colors"
onClick={() => setMobileFiltersOpen(true)}
>
Show Filters
</button>
</>
)}
</p>
{mobileFiltersOpen && (
<div className="mt-1.5 md:hidden flex flex-wrap items-center gap-x-3 gap-y-1">
<label className="flex items-center gap-1 text-xs text-muted-foreground cursor-pointer">
<input
type="checkbox"
checked={allTypesEnabled}
onChange={handleToggleAll}
className="rounded"
/>
All
</label>
{KNOWN_PAYLOAD_TYPES.map((type) => (
<span key={type} className="inline-flex items-center gap-1 text-xs">
<label className="flex items-center gap-1 text-foreground cursor-pointer">
<input
type="checkbox"
checked={enabledTypes.has(type)}
onChange={() => handleToggleType(type)}
className="rounded"
/>
{type}
</label>
<button
type="button"
className="text-[0.625rem] text-muted-foreground hover:text-primary transition-colors"
onClick={() => handleOnly(type)}
>
(only)
</button>
</span>
))}
</div>
)}
<div className="mt-1.5 hidden md:flex flex-wrap items-center gap-x-3 gap-y-1">
<label className="flex items-center gap-1 text-xs text-muted-foreground cursor-pointer">
<input
type="checkbox"
checked={allTypesEnabled}
onChange={handleToggleAll}
className="rounded"
/>
All
</label>
{KNOWN_PAYLOAD_TYPES.map((type) => (
<span key={type} className="inline-flex items-center gap-1 text-xs">
<label className="flex items-center gap-1 text-foreground cursor-pointer">
<input
type="checkbox"
checked={enabledTypes.has(type)}
onChange={() => handleToggleType(type)}
className="rounded"
/>
{type}
</label>
<button
type="button"
className="text-[0.625rem] text-muted-foreground hover:text-primary transition-colors"
onClick={() => handleOnly(type)}
>
(only)
</button>
</span>
))}
</div>
</div>
<div className="flex min-h-0 flex-1 flex-col md:flex-row">
<div className={cn('min-h-0 min-w-0 flex-1', statsOpen && 'md:border-r md:border-border')}>
<RawPacketList packets={packets} channels={channels} onPacketClick={setSelectedPacket} />
<RawPacketList
packets={filteredPackets}
channels={channels}
onPacketClick={setSelectedPacket}
/>
</div>
<aside
+57 -2
View File
@@ -1,10 +1,24 @@
import { useEffect, useState } from 'react';
import { Menu, Moon, Sun } from 'lucide-react';
import { useEffect, useMemo, useState } from 'react';
import {
BatteryFull,
BatteryLow,
BatteryMedium,
BatteryWarning,
Menu,
Moon,
Sun,
} from 'lucide-react';
import type { HealthStatus, RadioConfig } from '../types';
import { api } from '../api';
import { toast } from './ui/sonner';
import { handleKeyboardActivate } from '../utils/a11y';
import { applyTheme, getSavedTheme, THEME_CHANGE_EVENT } from '../utils/theme';
import {
BATTERY_DISPLAY_CHANGE_EVENT,
getShowBatteryPercent,
getShowBatteryVoltage,
mvToPercent,
} from '../utils/batteryDisplay';
import { cn } from '@/lib/utils';
interface StatusBarProps {
@@ -22,6 +36,35 @@ export function StatusBar({
onSettingsClick,
onMenuClick,
}: StatusBarProps) {
const [showBatteryPercent, setShowBatteryPercent] = useState(getShowBatteryPercent);
const [showBatteryVoltage, setShowBatteryVoltage] = useState(getShowBatteryVoltage);
useEffect(() => {
const handler = () => {
setShowBatteryPercent(getShowBatteryPercent());
setShowBatteryVoltage(getShowBatteryVoltage());
};
window.addEventListener(BATTERY_DISPLAY_CHANGE_EVENT, handler);
return () => window.removeEventListener(BATTERY_DISPLAY_CHANGE_EVENT, handler);
}, []);
const batteryMv = health?.radio_stats?.battery_mv;
const batteryInfo = useMemo(() => {
if ((!showBatteryPercent && !showBatteryVoltage) || !batteryMv || batteryMv <= 0) return null;
const pct = mvToPercent(batteryMv);
const Icon =
pct >= 80 ? BatteryFull : pct >= 40 ? BatteryMedium : pct >= 15 ? BatteryLow : BatteryWarning;
const color =
pct >= 40 ? 'text-status-connected' : pct >= 15 ? 'text-warning' : 'text-destructive';
const label =
showBatteryPercent && showBatteryVoltage
? `${pct}% (${batteryMv}mV)`
: showBatteryPercent
? `${pct}%`
: `${batteryMv}mV`;
return { pct, Icon, color, label, mv: batteryMv };
}, [batteryMv, showBatteryPercent, showBatteryVoltage]);
const radioState =
health?.radio_state ??
(health?.radio_initializing
@@ -119,6 +162,18 @@ export function StatusBar({
<span className="hidden lg:inline text-muted-foreground">{statusLabel}</span>
</div>
{connected && batteryInfo && (
<div
className={cn('flex items-center gap-1', batteryInfo.color)}
title={`Battery: ${batteryInfo.pct}% (${(batteryInfo.mv / 1000).toFixed(2)}V)`}
role="status"
aria-label={`Battery ${batteryInfo.pct} percent`}
>
<batteryInfo.Icon className="h-4 w-4" aria-hidden="true" />
<span className="hidden sm:inline text-[0.6875rem]">{batteryInfo.label}</span>
</div>
)}
{config && (
<div className="hidden lg:flex items-center gap-2 text-muted-foreground">
<span className="text-foreground font-medium">{config.name || 'Unnamed'}</span>
+4 -2
View File
@@ -9,7 +9,8 @@ import type {
RadioTraceResponse,
} from '../types';
import { CONTACT_TYPE_REPEATER } from '../types';
import { calculateDistance, isValidLocation } from '../utils/pathUtils';
import { calculateDistance, formatDistance, isValidLocation } from '../utils/pathUtils';
import { useDistanceUnit } from '../contexts/DistanceUnitContext';
import { getContactDisplayName } from '../utils/pubkey';
import { handleKeyboardActivate } from '../utils/a11y';
import { ContactAvatar } from './ContactAvatar';
@@ -186,6 +187,7 @@ function TraceNodeRow({
}
export function TracePane({ contacts, config, onRunTracePath }: TracePaneProps) {
const { distanceUnit } = useDistanceUnit();
const [searchQuery, setSearchQuery] = useState('');
const [sortMode, setSortMode] = useState<TraceSortMode>('alpha');
const [draftHops, setDraftHops] = useState<TraceDraftHop[]>([]);
@@ -536,7 +538,7 @@ export function TracePane({ contacts, config, onRunTracePath }: TracePaneProps)
</div>
{sortMode === 'distance' && distanceKm !== null ? (
<div className="mt-1 text-[0.6875rem] text-muted-foreground">
{distanceKm.toFixed(1)} km away
{formatDistance(distanceKm, distanceUnit)} away
</div>
) : null}
{selectedCount > 0 ? (
@@ -1666,7 +1666,8 @@ function AppriseConfigEditor({
rows={4}
/>
<p className="text-xs text-muted-foreground">
One URL per line. All URLs receive every matched notification.
One URL per line. All URLs receive every matched notification. For Matrix room version 12
(servername-less room IDs), append <code>?hsreq=no</code> to the URL.
</p>
</div>
@@ -28,6 +28,13 @@ import {
setSavedFontScale,
} from '../../utils/fontScale';
import { getAutoFocusInputEnabled, setAutoFocusInputEnabled } from '../../utils/autoFocusInput';
import {
BATTERY_DISPLAY_CHANGE_EVENT,
getShowBatteryPercent,
setShowBatteryPercent as saveBatteryPercent,
getShowBatteryVoltage,
setShowBatteryVoltage as saveBatteryVoltage,
} from '../../utils/batteryDisplay';
export function SettingsLocalSection({
onLocalLabelChange,
@@ -50,6 +57,8 @@ export function SettingsLocalSection({
const [localLabelText, setLocalLabelText] = useState(() => getLocalLabel().text);
const [localLabelColor, setLocalLabelColor] = useState(() => getLocalLabel().color);
const [autoFocusInput, setAutoFocusInput] = useState(getAutoFocusInputEnabled);
const [batteryPercent, setBatteryPercent] = useState(getShowBatteryPercent);
const [batteryVoltage, setBatteryVoltage] = useState(getShowBatteryVoltage);
const [fontScale, setFontScale] = useState(getSavedFontScale);
const [fontScaleSlider, setFontScaleSlider] = useState(getSavedFontScale);
const [fontScaleInput, setFontScaleInput] = useState(() => String(getSavedFontScale()));
@@ -201,6 +210,43 @@ export function SettingsLocalSection({
<span className="text-sm">Auto-focus input on conversation load (desktop only)</span>
</label>
<label className="flex items-center gap-3 cursor-pointer">
<input
type="checkbox"
checked={batteryPercent}
onChange={(e) => {
const v = e.target.checked;
setBatteryPercent(v);
saveBatteryPercent(v);
window.dispatchEvent(new Event(BATTERY_DISPLAY_CHANGE_EVENT));
}}
className="w-4 h-4 rounded border-input accent-primary"
/>
<span className="text-sm">Show battery percentage in status bar</span>
</label>
<label className="flex items-center gap-3 cursor-pointer">
<input
type="checkbox"
checked={batteryVoltage}
onChange={(e) => {
const v = e.target.checked;
setBatteryVoltage(v);
saveBatteryVoltage(v);
window.dispatchEvent(new Event(BATTERY_DISPLAY_CHANGE_EVENT));
}}
className="w-4 h-4 rounded border-input accent-primary"
/>
<span className="text-sm">Show battery voltage in status bar</span>
</label>
{(batteryPercent || batteryVoltage) && (
<p className="text-xs text-muted-foreground ml-7">
Battery data updates every 60 seconds and may take up to a minute to appear after
connecting.
</p>
)}
<div className="space-y-3">
<Label htmlFor="font-scale-input">Relative Font Size</Label>
<div className="flex flex-col gap-3 sm:flex-row sm:items-center">
@@ -447,7 +447,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
)}
{/* Noise Floor */}
{stats.noise_floor_24h.supported !== false && (
{stats.noise_floor_24h && (
<>
<Separator />
<div>
@@ -468,14 +468,14 @@ export function SettingsStatisticsSection({ className }: { className?: string })
<NoiseFloorChart samples={stats.noise_floor_24h.samples} />
) : stats.noise_floor_24h.samples.length === 0 ? (
<p className="text-sm text-muted-foreground">
No noise floor samples collected yet. Samples are collected every five minutes,
and retained until server restart.
No noise floor samples collected yet. Samples are collected every minute and
retained until server restart.
</p>
) : (
<p className="text-sm text-muted-foreground">
Only one sample so far ({stats.noise_floor_24h.samples[0].noise_floor_dbm} dBm).
More data needed for a chart. Samples are collected every five minutes, and
retained until server restart.
More data needed for a chart. Samples are collected every minute and retained
until server restart.
</p>
)}
</div>
@@ -372,6 +372,8 @@ export function useConversationMessages(
const olderAbortControllerRef = useRef<AbortController | null>(null);
const newerAbortControllerRef = useRef<AbortController | null>(null);
const fetchingConversationIdRef = useRef<string | null>(null);
const activeConversationRef = useRef(activeConversation);
activeConversationRef.current = activeConversation;
const latestReconcileRequestIdRef = useRef(0);
const pendingReconnectReconcileRef = useRef(false);
const messagesRef = useRef<Message[]>([]);
@@ -664,9 +666,11 @@ export function useConversationMessages(
}, [activeConversation]);
const reconcileOnReconnect = useCallback(() => {
if (!isMessageConversation(activeConversation)) {
return;
}
// Read the current conversation from the ref rather than closing over
// activeConversation, so that a conversation switch during WS reconnect
// targets the right conversation instead of a stale capture.
const current = activeConversationRef.current;
if (!isMessageConversation(current)) return;
if (hasNewerMessagesRef.current) {
pendingReconnectReconcileRef.current = true;
@@ -677,8 +681,8 @@ export function useConversationMessages(
const controller = new AbortController();
const requestId = latestReconcileRequestIdRef.current + 1;
latestReconcileRequestIdRef.current = requestId;
reconcileFromBackend(activeConversation, controller.signal, requestId);
}, [activeConversation, reconcileFromBackend]);
reconcileFromBackend(current, controller.signal, requestId);
}, [reconcileFromBackend]);
useEffect(() => {
if (abortControllerRef.current) {
+44
View File
@@ -0,0 +1,44 @@
import { describe, expect, it } from 'vitest';
import { mvToPercent, formatBatteryLabel } from '../utils/batteryDisplay';
describe('mvToPercent', () => {
it('clamps to 100 above table ceiling', () => {
expect(mvToPercent(4500)).toBe(100);
expect(mvToPercent(4190)).toBe(100);
});
it('clamps to 0 below table floor', () => {
expect(mvToPercent(3100)).toBe(0);
expect(mvToPercent(2800)).toBe(0);
});
it('returns exact table values at boundaries', () => {
expect(mvToPercent(4050)).toBe(90);
expect(mvToPercent(3630)).toBe(40);
});
it('interpolates between table entries', () => {
// Midpoint between 3630 (40%) and 3720 (50%) = 3675 → ~45%
const mid = mvToPercent(3675);
expect(mid).toBeGreaterThan(40);
expect(mid).toBeLessThan(50);
});
});
describe('formatBatteryLabel', () => {
it('returns null when both toggles are off', () => {
expect(formatBatteryLabel(4050, false, false)).toBeNull();
});
it('returns percentage only', () => {
expect(formatBatteryLabel(4050, true, false)).toBe('90%');
});
it('returns voltage only', () => {
expect(formatBatteryLabel(4050, false, true)).toBe('4050mV');
});
it('returns combined when both enabled', () => {
expect(formatBatteryLabel(4050, true, true)).toBe('90% (4050mV)');
});
});
+2 -4
View File
@@ -657,11 +657,10 @@ describe('SettingsModal', () => {
{ timestamp: 1711796400, count: 8 },
],
noise_floor_24h: {
sample_interval_seconds: 300,
sample_interval_seconds: 60,
coverage_seconds: 3600,
latest_noise_floor_dbm: -105,
latest_timestamp: 1711800000,
supported: true,
samples: [],
},
};
@@ -728,11 +727,10 @@ describe('SettingsModal', () => {
},
packets_per_hour_72h: [],
noise_floor_24h: {
sample_interval_seconds: 300,
sample_interval_seconds: 60,
coverage_seconds: 0,
latest_noise_floor_dbm: null,
latest_timestamp: null,
supported: null,
samples: [],
},
};
@@ -161,6 +161,80 @@ describe('getMessageContentKey', () => {
expect(getMessageContentKey(msg1)).toBe(getMessageContentKey(msg2));
});
it('PRIV messages with different sender_key produce different keys (room dedup)', () => {
const msg1 = createMessage({
type: 'PRIV',
conversation_key: 'room_pubkey',
text: 'ok',
sender_timestamp: 1700000000,
sender_key: 'alice_key',
});
const msg2 = createMessage({
type: 'PRIV',
conversation_key: 'room_pubkey',
text: 'ok',
sender_timestamp: 1700000000,
sender_key: 'bob_key',
});
expect(getMessageContentKey(msg1)).not.toBe(getMessageContentKey(msg2));
});
it('PRIV messages with same sender_key still dedup (true room echo)', () => {
const msg1 = createMessage({
type: 'PRIV',
conversation_key: 'room_pubkey',
text: 'ok',
sender_timestamp: 1700000000,
sender_key: 'alice_key',
});
const msg2 = createMessage({
type: 'PRIV',
conversation_key: 'room_pubkey',
text: 'ok',
sender_timestamp: 1700000000,
sender_key: 'alice_key',
});
expect(getMessageContentKey(msg1)).toBe(getMessageContentKey(msg2));
});
it('CHAN messages ignore sender_key (channel dedup unchanged)', () => {
const msg1 = createMessage({
type: 'CHAN',
text: 'hello',
sender_timestamp: 1700000000,
sender_key: 'alice_key',
});
const msg2 = createMessage({
type: 'CHAN',
text: 'hello',
sender_timestamp: 1700000000,
sender_key: 'bob_key',
});
expect(getMessageContentKey(msg1)).toBe(getMessageContentKey(msg2));
});
it('PRIV messages with null sender_key still dedup normally', () => {
const msg1 = createMessage({
type: 'PRIV',
conversation_key: 'contact_key',
text: 'hi',
sender_timestamp: 1700000000,
sender_key: null,
});
const msg2 = createMessage({
type: 'PRIV',
conversation_key: 'contact_key',
text: 'hi',
sender_timestamp: 1700000000,
sender_key: null,
});
expect(getMessageContentKey(msg1)).toBe(getMessageContentKey(msg2));
});
});
describe('mergePendingAck', () => {
+18 -1
View File
@@ -62,6 +62,23 @@ export interface AppInfo {
commit_hash: string | null;
}
export interface RadioStatsSnapshot {
timestamp: number | null;
battery_mv: number | null;
uptime_secs: number | null;
noise_floor: number | null;
last_rssi: number | null;
last_snr: number | null;
tx_air_secs: number | null;
rx_air_secs: number | null;
packets_recv: number | null;
packets_sent: number | null;
flood_tx: number | null;
direct_tx: number | null;
flood_rx: number | null;
direct_rx: number | null;
}
export interface HealthStatus {
status: string;
radio_connected: boolean;
@@ -76,6 +93,7 @@ export interface HealthStatus {
max_contacts: number | null;
max_channels: number | null;
} | null;
radio_stats?: RadioStatsSnapshot | null;
database_size_mb: number;
oldest_undecrypted_timestamp: number | null;
fanout_statuses: Record<string, FanoutStatusEntry>;
@@ -540,7 +558,6 @@ export interface NoiseFloorHistoryStats {
coverage_seconds: number;
latest_noise_floor_dbm: number | null;
latest_timestamp: number | null;
supported: boolean | null;
samples: NoiseFloorSample[];
}
+83
View File
@@ -0,0 +1,83 @@
export const BATTERY_DISPLAY_CHANGE_EVENT = 'remoteterm-battery-display-change';
// Meshtastic default OCV table (meshtastic/firmware src/power.h)
const OCV_TABLE: [number, number][] = [
[4190, 100],
[4050, 90],
[3990, 80],
[3890, 70],
[3800, 60],
[3720, 50],
[3630, 40],
[3530, 30],
[3420, 20],
[3300, 10],
[3100, 0],
];
export function mvToPercent(mv: number): number {
if (mv >= OCV_TABLE[0][0]) return 100;
if (mv <= OCV_TABLE[OCV_TABLE.length - 1][0]) return 0;
for (let i = 0; i < OCV_TABLE.length - 1; i++) {
const [highMv, highPct] = OCV_TABLE[i];
const [lowMv, lowPct] = OCV_TABLE[i + 1];
if (mv >= lowMv)
return Math.round(lowPct + ((mv - lowMv) / (highMv - lowMv)) * (highPct - lowPct));
}
return 0;
}
export function formatBatteryLabel(
mv: number,
showPercent: boolean,
showVoltage: boolean
): string | null {
if (!showPercent && !showVoltage) return null;
const pct = mvToPercent(mv);
if (showPercent && showVoltage) return `${pct}% (${mv}mV)`;
if (showPercent) return `${pct}%`;
return `${mv}mV`;
}
const PERCENT_KEY = 'remoteterm-show-battery-percent';
const VOLTAGE_KEY = 'remoteterm-show-battery-voltage';
export function getShowBatteryPercent(): boolean {
try {
return localStorage.getItem(PERCENT_KEY) === 'true';
} catch {
return false;
}
}
export function setShowBatteryPercent(enabled: boolean): void {
try {
if (enabled) {
localStorage.setItem(PERCENT_KEY, 'true');
} else {
localStorage.removeItem(PERCENT_KEY);
}
} catch {
// localStorage may be unavailable
}
}
export function getShowBatteryVoltage(): boolean {
try {
return localStorage.getItem(VOLTAGE_KEY) === 'true';
} catch {
return false;
}
}
export function setShowBatteryVoltage(enabled: boolean): void {
try {
if (enabled) {
localStorage.setItem(VOLTAGE_KEY, 'true');
} else {
localStorage.removeItem(VOLTAGE_KEY);
}
} catch {
// localStorage may be unavailable
}
}
+6 -2
View File
@@ -1,10 +1,14 @@
import type { Message } from '../types';
// Content identity matches the frontend's message-level dedup contract.
// Content identity matches the backend's message-level dedup indexes.
export function getMessageContentKey(msg: Message): string {
// When sender_timestamp exists, dedup by content (catches radio-path duplicates with different IDs).
// When null, include msg.id so each message gets a unique key — avoids silently dropping
// different messages that share the same text and received_at second.
const ts = msg.sender_timestamp ?? `r${msg.received_at}-${msg.id}`;
return `${msg.type}-${msg.conversation_key}-${msg.text}-${ts}`;
// For incoming PRIV messages (room-server posts), include sender_key so that
// two different room participants sending identical text in the same second
// are not collapsed. Mirrors idx_messages_incoming_priv_dedup.
const senderSuffix = msg.type === 'PRIV' && msg.sender_key ? `-${msg.sender_key}` : '';
return `${msg.type}-${msg.conversation_key}-${msg.text}-${ts}${senderSuffix}`;
}
+1 -1
View File
@@ -15,7 +15,7 @@ const RAW_PACKET_STATS_WINDOW_SECONDS: Record<Exclude<RawPacketStatsWindow, 'ses
export const MAX_RAW_PACKET_STATS_OBSERVATIONS = 20000;
const KNOWN_PAYLOAD_TYPES = [
export const KNOWN_PAYLOAD_TYPES = [
'Advert',
'GroupText',
'TextMessage',
+127
View File
@@ -0,0 +1,127 @@
# Maintainer: Jack Kingsman <jack@jackkingsman.me>
pkgname=remoteterm-meshcore
# pkgver is rewritten by .github/workflows/publish-aur.yml on each release.
pkgver=3.9.0
pkgrel=1
pkgdesc='Web interface for MeshCore mesh radio networks'
arch=(x86_64 aarch64)
url='https://github.com/jkingsman/Remote-Terminal-for-MeshCore'
license=('MIT')
# No system python dependency — we bundle a standalone interpreter via
# python-build-standalone so the package is immune to Arch python ABI bumps.
depends=(glibc)
makedepends=(uv nodejs npm)
optdepends=('bluez: BLE transport support')
backup=(etc/remoteterm-meshcore/remoteterm.env)
# The bundled python-build-standalone binary ships pre-stripped. makepkg's
# default strip pass corrupts its unusual ELF layout (.dynstr not in segment),
# so we disable stripping for the whole package.
options=(!strip)
install=remoteterm-meshcore.install
source=(
"$pkgname-$pkgver.tar.gz::https://github.com/jkingsman/Remote-Terminal-for-MeshCore/archive/refs/tags/$pkgver.tar.gz"
"remoteterm-meshcore.service"
"remoteterm.env"
"remoteterm-meshcore.sysusers"
"remoteterm-meshcore.tmpfiles"
)
# sha256sums are recomputed by `updpkgsums` in the publish workflow before
# the PKGBUILD is pushed to AUR. The committed values are intentionally SKIP
# so the file is honest about not tracking real hashes in this repo.
sha256sums=('SKIP'
'SKIP'
'SKIP'
'SKIP'
'SKIP')
# python-build-standalone: stripped install_only builds (~30 MB each).
# Bump _pyver and _pybuilddate when updating the bundled interpreter.
_pyver=3.13.13
_pybuilddate=20260408
source_x86_64=("python-${_pyver}-x86_64.tar.gz::https://github.com/astral-sh/python-build-standalone/releases/download/${_pybuilddate}/cpython-${_pyver}+${_pybuilddate}-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz")
sha256sums_x86_64=('SKIP')
source_aarch64=("python-${_pyver}-aarch64.tar.gz::https://github.com/astral-sh/python-build-standalone/releases/download/${_pybuilddate}/cpython-${_pyver}+${_pybuilddate}-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz")
sha256sums_aarch64=('SKIP')
_srcname="Remote-Terminal-for-MeshCore-$pkgver"
build() {
cd "$_srcname"
# Build frontend
cd frontend
npm ci
npm run build
cd ..
# Create venv using the bundled standalone Python interpreter, then install
# Python dependencies into it. This produces a fully self-contained venv
# that does not reference the system Python at all.
uv venv --python "$srcdir/python/bin/python3" .venv
uv sync --no-dev --frozen
}
package() {
cd "$_srcname"
local _optdir=/opt/remoteterm-meshcore
local _instdir="$pkgdir$_optdir"
# App source
install -d "$_instdir"
cp -r app "$_instdir/"
cp pyproject.toml uv.lock "$_instdir/"
# Frontend build
install -d "$_instdir/frontend"
cp -r frontend/dist "$_instdir/frontend/"
# Bundled Python interpreter
cp -a "$srcdir/python" "$_instdir/python"
# Python venv
cp -a .venv "$_instdir/"
# Fix shebangs and venv config: replace build-time paths with final
# install paths so the venv works from /opt after installation.
# sed only operates on regular file contents, so symlinks need separate
# fixup below.
find "$_instdir/.venv/bin" -type f -exec \
sed -i "s|$srcdir/$_srcname/.venv|$_optdir/.venv|g" {} +
find "$_instdir/.venv/bin" -type f -exec \
sed -i "s|$srcdir/python|$_optdir/python|g" {} +
sed -i \
-e "s|$srcdir/$_srcname/.venv|$_optdir/.venv|g" \
-e "s|$srcdir/python|$_optdir/python|g" \
"$_instdir/.venv/pyvenv.cfg" 2>/dev/null || true
# Recreate the venv interpreter symlinks — these are symlinks (not files),
# so sed cannot fix them. Point them at the bundled Python.
ln -sf "$_optdir/python/bin/python3" "$_instdir/.venv/bin/python"
ln -sf python "$_instdir/.venv/bin/python3"
ln -sf python "$_instdir/.venv/bin/python3.13"
# Data directory symlink
ln -s /var/lib/remoteterm-meshcore "$_instdir/data"
# Systemd service
install -Dm644 "$srcdir/remoteterm-meshcore.service" \
"$pkgdir/usr/lib/systemd/system/remoteterm-meshcore.service"
# Environment file
install -Dm640 "$srcdir/remoteterm.env" \
"$pkgdir/etc/remoteterm-meshcore/remoteterm.env"
# System user and data directory
install -Dm644 "$srcdir/remoteterm-meshcore.sysusers" \
"$pkgdir/usr/lib/sysusers.d/remoteterm-meshcore.conf"
install -Dm644 "$srcdir/remoteterm-meshcore.tmpfiles" \
"$pkgdir/usr/lib/tmpfiles.d/remoteterm-meshcore.conf"
# License
install -Dm644 LICENSE.md \
"$pkgdir/usr/share/licenses/$pkgname/LICENSE"
}
+35
View File
@@ -0,0 +1,35 @@
post_install() {
echo "==> Set your radio connection (serial, TCP, or BLE) in"
echo "==> /etc/remoteterm-meshcore/remoteterm.env"
echo "==> Start the service with: systemctl enable --now remoteterm-meshcore"
echo "==> The web UI will be at http://localhost:8000"
}
post_upgrade() {
# Clean orphaned __pycache__ dirs left by the previous Python version
find /opt/remoteterm-meshcore -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
# Skip systemd operations in chroots/containers where the binary may exist
# but PID 1 is not systemd.
if [ -d /run/systemd/system ] && command -v systemctl &>/dev/null; then
systemctl daemon-reload || true
if systemctl is-active --quiet remoteterm-meshcore; then
systemctl restart remoteterm-meshcore || true
fi
fi
}
pre_remove() {
if [ -d /run/systemd/system ] && command -v systemctl &>/dev/null; then
systemctl disable --now remoteterm-meshcore 2>/dev/null || true
fi
}
post_remove() {
if [ -d /run/systemd/system ] && command -v systemctl &>/dev/null; then
systemctl daemon-reload
fi
echo "==> Database and config remain in /var/lib/remoteterm-meshcore/, remoteterm user retained."
echo "==> To fully clean up: sudo rm -rf /var/lib/remoteterm-meshcore"
}
+29
View File
@@ -0,0 +1,29 @@
[Unit]
Description=RemoteTerm for MeshCore
Documentation=https://github.com/jkingsman/Remote-Terminal-for-MeshCore
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=remoteterm
Group=remoteterm
WorkingDirectory=/opt/remoteterm-meshcore
EnvironmentFile=/etc/remoteterm-meshcore/remoteterm.env
ExecStart=/opt/remoteterm-meshcore/.venv/bin/uvicorn app.main:app --host 0.0.0.0 --port 8000
Restart=on-failure
RestartSec=5s
StateDirectory=remoteterm-meshcore
# Hardening
ProtectSystem=strict
ProtectHome=yes
PrivateTmp=yes
NoNewPrivileges=yes
# Serial port access (uucp group on Arch)
SupplementaryGroups=uucp
[Install]
WantedBy=multi-user.target
+1
View File
@@ -0,0 +1 @@
u remoteterm - "RemoteTerm for MeshCore" /var/lib/remoteterm-meshcore
+1
View File
@@ -0,0 +1 @@
d /var/lib/remoteterm-meshcore 0750 remoteterm remoteterm
+31
View File
@@ -0,0 +1,31 @@
# RemoteTerm for MeshCore configuration
# https://github.com/jkingsman/Remote-Terminal-for-MeshCore
# Transport: uncomment ONE section below
# Serial auto-detect (default — no config needed)
# Serial manual port
#MESHCORE_SERIAL_PORT=/dev/ttyUSB0
# TCP
#MESHCORE_TCP_HOST=192.168.1.100
#MESHCORE_TCP_PORT=5000
# BLE (also requires the optional `bluez` package)
# NOTE: The systemd service sets ProtectHome=yes, which may block the D-Bus
# session bus at /run/user/. If BLE fails to connect, try overriding with
# ProtectHome=no in a systemd drop-in.
#MESHCORE_BLE_ADDRESS=AA:BB:CC:DD:EE:FF
#MESHCORE_BLE_PIN=123456
# Database
MESHCORE_DATABASE_PATH=/var/lib/remoteterm-meshcore/meshcore.db
# Bots can run arbitrary Python on the server. Leave this set to 'true' unless
# you trust everyone on your network.
MESHCORE_DISABLE_BOTS=true
# HTTP Basic Auth (recommended when bots are enabled)
#MESHCORE_BASIC_AUTH_USERNAME=
#MESHCORE_BASIC_AUTH_PASSWORD=
+3 -6
View File
@@ -1,6 +1,6 @@
[project]
name = "remoteterm-meshcore"
version = "3.9.0"
version = "3.11.0"
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
readme = "README.md"
requires-python = ">=3.11"
@@ -14,7 +14,7 @@ dependencies = [
"pynacl>=1.5.0",
"meshcore==2.3.2",
"aiomqtt>=2.0",
"apprise>=1.9.7",
"apprise>=1.9.8",
"boto3>=1.38.0",
]
@@ -32,7 +32,7 @@ testpaths = ["tests"]
addopts = "-n auto --dist worksteal"
[tool.ruff]
target-version = "py310"
target-version = "py311"
line-length = 100
[tool.ruff.lint]
@@ -53,9 +53,6 @@ ignore = [
"SIM117", # nested with statements - can be clearer in tests
]
[tool.ruff.lint.per-file-ignores]
"app/main.py" = ["E402"] # imports after Windows event-loop re-exec block
[tool.ruff.lint.isort]
known-first-party = ["app"]
+88
View File
@@ -0,0 +1,88 @@
#!/usr/bin/env bash
set -euo pipefail
# run_aur_with_radio.sh — Install the published AUR package via yay in an Arch
# container with a real radio attached over serial.
#
# Usage:
# ./scripts/quality/run_aur_with_radio.sh [--device PATH] [--port PORT]
#
# Defaults:
# --device /dev/serial/by-id/usb-Heltec_HT-n5262_F423934AA2AB2A5E-if00
# --port 8000
DEVICE="/dev/serial/by-id/usb-Heltec_HT-n5262_F423934AA2AB2A5E-if00"
PORT=8000
while [ "${1:-}" ]; do
case "$1" in
--device) DEVICE="$2"; shift 2 ;;
--port) PORT="$2"; shift 2 ;;
*) echo "Unknown arg: $1" >&2; exit 1 ;;
esac
done
if [ ! -e "$DEVICE" ]; then
echo "Error: device $DEVICE not found" >&2
exit 1
fi
CONTAINER="remoteterm-aur-radio-$$"
cleanup() {
echo
echo "Cleaning up..."
docker rm -f "$CONTAINER" 2>/dev/null || true
echo "Done."
}
trap cleanup EXIT
echo "=== Installing AUR package with radio ==="
echo " Device: $DEVICE"
echo " Port: http://localhost:$PORT"
echo
docker run -it --rm \
--name "$CONTAINER" \
--device "$DEVICE:/dev/meshcore-radio" \
-p "$PORT:8000" \
archlinux:latest bash -c '
set -euo pipefail
echo "[1/3] Setting up yay..."
pacman -Syu --noconfirm base-devel git curl nodejs npm >/dev/null 2>&1
curl -LsSf https://astral.sh/uv/install.sh | sh >/dev/null 2>&1
# yay needs a non-root user
useradd -m builder
echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
# Install yay
su builder -c "
export PATH=\"\$HOME/.local/bin:\$PATH\"
cd /tmp
git clone https://aur.archlinux.org/yay-bin.git 2>&1
cd yay-bin
makepkg -si --noconfirm 2>&1
"
echo "[2/3] Installing remoteterm-meshcore from AUR..."
su builder -c "
export PATH=\"\$HOME/.local/bin:\$PATH\"
yay -S --noconfirm remoteterm-meshcore 2>&1
"
# Create user and data dir (no systemd PID 1 in container)
systemd-sysusers
systemd-tmpfiles --create
# Give the service user access to the serial device
chmod 666 /dev/meshcore-radio
echo "[3/3] Starting RemoteTerm..."
cd /opt/remoteterm-meshcore
exec su -s /bin/bash remoteterm -c "\
MESHCORE_SERIAL_PORT=/dev/meshcore-radio \
MESHCORE_DATABASE_PATH=/var/lib/remoteterm-meshcore/meshcore.db \
exec .venv/bin/uvicorn app.main:app --host 0.0.0.0 --port 8000"
'
+112
View File
@@ -0,0 +1,112 @@
#!/usr/bin/env bash
set -euo pipefail
# test_aur_package.sh — Build the AUR package in one Arch container, then
# install and run it in a clean Arch container with port 8000 exposed.
#
# Usage:
# ./scripts/quality/test_aur_package.sh [--port PORT]
#
# The script streams application logs until you Ctrl-C.
REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
PORT=8000
if [ "${1:-}" = "--port" ]; then PORT="${2:-8000}"; fi
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
BOLD='\033[1m'
NC='\033[0m'
ARTIFACT_DIR="$(mktemp -d)"
INSTALL_CONTAINER="remoteterm-aur-test-$$"
cleanup() {
echo
echo -e "${YELLOW}Cleaning up...${NC}"
docker rm -f "$INSTALL_CONTAINER" 2>/dev/null || true
rm -rf "$ARTIFACT_DIR"
echo -e "${GREEN}Done.${NC}"
}
trap cleanup EXIT
# ── Phase 1: Build ────────────────────────────────────────────────────────────
echo -e "${BOLD}=== Phase 1: Build AUR package ===${NC}"
docker run --rm \
-v "$REPO_ROOT/pkg/aur:/pkg:ro" \
-v "$ARTIFACT_DIR:/out" \
archlinux:latest bash -c '
set -euo pipefail
pacman -Syu --noconfirm base-devel git curl >/dev/null 2>&1
curl -LsSf https://astral.sh/uv/install.sh | sh >/dev/null 2>&1
export PATH="$HOME/.local/bin:$PATH"
pacman -S --noconfirm nodejs npm >/dev/null 2>&1
useradd -m builder
echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
BUILD_DIR=/home/builder/build
mkdir -p "$BUILD_DIR"
cp /pkg/PKGBUILD /pkg/remoteterm-meshcore.install \
/pkg/remoteterm-meshcore.service /pkg/remoteterm-meshcore.sysusers \
/pkg/remoteterm-meshcore.tmpfiles /pkg/remoteterm.env "$BUILD_DIR/"
chown -R builder:builder "$BUILD_DIR"
echo "Building package..."
su builder -c "export PATH=\"$HOME/.local/bin:\$PATH\" && cd $BUILD_DIR && makepkg -sf --noconfirm" 2>&1
cp "$BUILD_DIR"/remoteterm-meshcore-*.pkg.tar.zst /out/
echo "Package artifact copied to /out/"
ls -lh /out/*.pkg.tar.zst
'
PKG_FILE="$(ls "$ARTIFACT_DIR"/*.pkg.tar.zst 2>/dev/null | head -1)"
if [ -z "$PKG_FILE" ]; then
echo -e "${RED}Build failed — no .pkg.tar.zst produced${NC}"
exit 1
fi
echo -e "${GREEN}Built: $(basename "$PKG_FILE") ($(du -h "$PKG_FILE" | cut -f1))${NC}"
echo
# ── Phase 2: Install and run ─────────────────────────────────────────────────
echo -e "${BOLD}=== Phase 2: Install and run ===${NC}"
docker run -d \
--name "$INSTALL_CONTAINER" \
-p "$PORT:8000" \
-v "$ARTIFACT_DIR:/pkg:ro" \
archlinux:latest bash -c '
set -euo pipefail
# Install the package (sysusers.d creates the remoteterm user, tmpfiles.d creates the data dir)
pacman -Syu --noconfirm >/dev/null 2>&1
pacman -U --noconfirm /pkg/*.pkg.tar.zst
# In a container there is no systemd to trigger sysusers/tmpfiles automatically,
# so run them manually.
systemd-sysusers
systemd-tmpfiles --create
echo "============================================"
echo " RemoteTerm installed — starting server"
echo "============================================"
# Run as the remoteterm service user, matching the systemd unit
exec su -s /bin/bash remoteterm -c "cd /opt/remoteterm-meshcore && exec .venv/bin/uvicorn app.main:app --host 0.0.0.0 --port 8000"
' >/dev/null
echo -e "${CYAN}Container:${NC} $INSTALL_CONTAINER"
echo -e "${CYAN}Listening:${NC} http://localhost:$PORT"
echo -e "${CYAN}Health: ${NC} http://localhost:$PORT/api/health"
echo
echo -e "${YELLOW}Streaming logs (Ctrl-C to stop and clean up)...${NC}"
echo
docker logs -f "$INSTALL_CONTAINER"
+3 -3
View File
@@ -35,7 +35,7 @@ SERIAL_HOST_PATH="/dev/ttyACM0"
SERIAL_COMPOSE_HOST_PATH="/dev/ttyACM0"
SERIAL_CONTAINER_PATH="/dev/meshcore-radio"
TCP_HOST=""
TCP_PORT="4000"
TCP_PORT="5000"
BLE_ADDRESS=""
BLE_PIN=""
ENABLE_BOTS="N"
@@ -311,8 +311,8 @@ case "$TRANSPORT_CHOICE" in
echo -e "${RED}TCP host is required.${NC}"
read -r -p "TCP host: " TCP_HOST
done
read -r -p "TCP port (default: 4000): " TCP_PORT
TCP_PORT="${TCP_PORT:-4000}"
read -r -p "TCP port (default: 5000): " TCP_PORT
TCP_PORT="${TCP_PORT:-5000}"
echo -e "${GREEN}TCP: ${TCP_HOST}:${TCP_PORT}${NC}"
;;
3)
+3 -3
View File
@@ -114,8 +114,8 @@ case "$TRANSPORT_CHOICE" in
echo -e "${RED}TCP host is required.${NC}"
read -rp "TCP host: " TCP_HOST
done
read -rp "TCP port (default: 4000): " TCP_PORT
TCP_PORT="${TCP_PORT:-4000}"
read -rp "TCP port (default: 5000): " TCP_PORT
TCP_PORT="${TCP_PORT:-5000}"
echo -e "${GREEN}TCP: ${TCP_HOST}:${TCP_PORT}${NC}"
;;
4)
@@ -241,7 +241,7 @@ if [ "$FRONTEND_MODE" = "build" ]; then
NODE_VERSION="$(node -v)"
NPM_VERSION="$(npm -v)"
require_minimum_version "Node.js" "$NODE_VERSION" 18
require_minimum_version "Node.js" "$NODE_VERSION" 20
require_minimum_version "npm" "$NPM_VERSION" 9
echo -e "${YELLOW}Building frontend locally with Node ${NODE_VERSION} and npm ${NPM_VERSION}...${NC}"
+9 -4
View File
@@ -282,14 +282,19 @@ export function deleteFanoutConfig(id: string): Promise<{ deleted: boolean }> {
// --- Helpers ---
/**
* Ensure #flightless channel exists, creating it if needed.
* Ensure a channel exists by name, creating it if needed.
* Returns the channel object.
*/
export async function ensureFlightlessChannel(): Promise<Channel> {
export async function ensureChannel(name: string): Promise<Channel> {
const channels = await getChannels();
const existing = channels.find((c) => c.name === '#flightless');
const existing = channels.find((c) => c.name === name);
if (existing) return existing;
return createChannel('#flightless');
return createChannel(name);
}
/** Convenience alias — ensures #flightless exists. */
export async function ensureFlightlessChannel(): Promise<Channel> {
return ensureChannel('#flightless');
}
/**
+46
View File
@@ -0,0 +1,46 @@
/**
* Centralized E2E environment configuration.
*
* All environment-dependent values live here with sensible defaults that
* match the maintainer's test rig. Contributors can override any of these
* via environment variables to match their own hardware setup.
*
* See CONTRIBUTING.md § "E2E Testing" for what each variable means and
* how to set up a test environment from scratch.
*/
/**
* Channel used to trigger echo-bot traffic generation.
*
* The echo bot (running on a second "partner" radio) should monitor this
* channel and reply to any message, generating incoming RF traffic that
* mesh-traffic tests can observe. The channel is created automatically if
* it doesn't exist in the test database.
*/
export const E2E_ECHO_CHANNEL =
process.env.E2E_ECHO_CHANNEL ?? '#flightless';
/**
* Message sent to the echo channel to nudge the bot into replying.
* The bot just needs to see *any* message and respond; the exact text
* doesn't matter as long as the bot doesn't filter it out.
*/
export const E2E_ECHO_TRIGGER_MESSAGE =
process.env.E2E_ECHO_TRIGGER_MESSAGE ?? '!echo please give incoming message';
/**
* Public key (64-char hex) of a nearby node that will ACK direct messages
* sent by the test radio. This node must have the test radio's public key
* in its contact list. Used only by the partner-radio DM ACK test.
*/
export const E2E_PARTNER_RADIO_PUBKEY =
process.env.E2E_PARTNER_RADIO_PUBKEY ??
'ae92577bae6c269a1da3c87b5333e1bdb007e372b66e94204b9f92a6b52a62b1';
/**
* Display name for the partner radio node above. Used in UI assertions
* (searching the sidebar, verifying the conversation header, etc.).
*/
export const E2E_PARTNER_RADIO_NAME =
process.env.E2E_PARTNER_RADIO_NAME ?? 'FlightlessDt\u{1F95D}';
+11 -7
View File
@@ -17,7 +17,8 @@
* long polling timeout for environments without the bot.
*/
import { test as base, expect } from '@playwright/test';
import { ensureFlightlessChannel, sendChannelMessage } from './api';
import { ensureChannel, sendChannelMessage } from './api';
import { E2E_ECHO_CHANNEL, E2E_ECHO_TRIGGER_MESSAGE } from './env';
export { expect };
@@ -26,15 +27,18 @@ const TRAFFIC_ADVISORY =
'network. Failure may indicate insufficient mesh traffic rather than a bug.';
/**
* Best-effort: send a message to #flightless that triggers a remote echo
* bot. If the bot is within radio range it will reply, generating the
* incoming traffic the test needs. Failures are silently ignored the
* test will fall back to waiting for organic mesh traffic.
* Best-effort: send a message to the echo channel that triggers a remote
* echo bot on a partner radio. If the bot is within radio range it will
* reply, generating the incoming traffic the test needs. Failures are
* silently ignored the test will fall back to waiting for organic mesh
* traffic.
*
* Configure the channel via E2E_ECHO_CHANNEL (default: #flightless).
*/
export async function nudgeEchoBot(): Promise<void> {
try {
const channel = await ensureFlightlessChannel();
await sendChannelMessage(channel.key, '!echo please give incoming message');
const channel = await ensureChannel(E2E_ECHO_CHANNEL);
await sendChannelMessage(channel.key, E2E_ECHO_TRIGGER_MESSAGE);
} catch {
// Best-effort — bot may not be reachable
}
@@ -6,50 +6,46 @@ import {
getMessages,
setContactRoutingOverride,
} from '../helpers/api';
import {
E2E_PARTNER_RADIO_PUBKEY,
E2E_PARTNER_RADIO_NAME,
} from '../helpers/env';
const DEV_ONLY_ENV = 'MESHCORE_ENABLE_DEV_FLIGHTLESS_ROUTE_E2E';
const FLIGHTLESS_NAME = 'FlightlessDt🥝';
const FLIGHTLESS_PUBLIC_KEY =
'ae92577bae6c269a1da3c87b5333e1bdb007e372b66e94204b9f92a6b52a62b1';
const DEVELOPER_ONLY_NOTICE =
`Developer-only hardware test. This scenario assumes ${FLIGHTLESS_NAME} ` +
`(${FLIGHTLESS_PUBLIC_KEY.slice(0, 12)}...) is a nearby reachable node for the author's test radio. ` +
`Set ${DEV_ONLY_ENV}=1 to run it intentionally.`;
const PARTNER_RADIO_NOTICE =
`Partner-radio hardware test. Requires a nearby node "${E2E_PARTNER_RADIO_NAME}" ` +
`(${E2E_PARTNER_RADIO_PUBKEY.slice(0, 12)}...) that will ACK DMs from this radio. ` +
`Set E2E_USE_PARTNER_RADIO_FOR_DM_ACK_TEST=1 to run, and override ` +
`E2E_PARTNER_RADIO_PUBKEY / E2E_PARTNER_RADIO_NAME to match your hardware.`;
function escapeRegex(value: string): string {
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
test.describe('Developer-only direct-route learning for FlightlessDt🥝', () => {
test('zero-hop adverts then DM ACK learns a direct route', { tag: '@developer-only' }, async ({
test.describe('Partner-radio direct-route learning via DM ACK', () => {
test('zero-hop adverts then DM ACK learns a direct route', { tag: '@partner-radio' }, async ({
page,
}, testInfo) => {
testInfo.annotations.push({ type: 'notice', description: DEVELOPER_ONLY_NOTICE });
if (process.env[DEV_ONLY_ENV] !== '1') {
test.skip(true, DEVELOPER_ONLY_NOTICE);
}
testInfo.annotations.push({ type: 'notice', description: PARTNER_RADIO_NOTICE });
test.setTimeout(180_000);
console.warn(`[developer-only e2e] ${DEVELOPER_ONLY_NOTICE}`);
try {
await deleteContact(FLIGHTLESS_PUBLIC_KEY);
await deleteContact(E2E_PARTNER_RADIO_PUBKEY);
} catch {
// Best-effort reset; the contact may not exist yet in the temp E2E DB.
}
await createContact(FLIGHTLESS_PUBLIC_KEY, FLIGHTLESS_NAME);
await setContactRoutingOverride(FLIGHTLESS_PUBLIC_KEY, '');
await createContact(E2E_PARTNER_RADIO_PUBKEY, E2E_PARTNER_RADIO_NAME);
await setContactRoutingOverride(E2E_PARTNER_RADIO_PUBKEY, '');
await expect
.poll(
async () => {
const contact = await getContactByKey(FLIGHTLESS_PUBLIC_KEY);
const contact = await getContactByKey(E2E_PARTNER_RADIO_PUBKEY);
return contact?.direct_path_len ?? null;
},
{
timeout: 10_000,
message: 'Waiting for recreated FlightlessDt contact to start in flood mode',
message: 'Waiting for recreated partner contact to start in flood mode',
}
)
.toBe(-1);
@@ -74,22 +70,22 @@ test.describe('Developer-only direct-route learning for FlightlessDt🥝', () =>
});
const searchInput = page.getByLabel('Search conversations');
await searchInput.fill(FLIGHTLESS_PUBLIC_KEY.slice(0, 12));
await expect(page.getByText(FLIGHTLESS_NAME, { exact: true })).toBeVisible({
await searchInput.fill(E2E_PARTNER_RADIO_PUBKEY.slice(0, 12));
await expect(page.getByText(E2E_PARTNER_RADIO_NAME, { exact: true })).toBeVisible({
timeout: 15_000,
});
await page.getByText(FLIGHTLESS_NAME, { exact: true }).click();
await page.getByText(E2E_PARTNER_RADIO_NAME, { exact: true }).click();
await expect
.poll(() => page.url(), {
timeout: 15_000,
message: 'Waiting for FlightlessDt conversation route to load',
message: 'Waiting for partner contact conversation route to load',
})
.toContain(`#contact/${encodeURIComponent(FLIGHTLESS_PUBLIC_KEY)}`);
.toContain(`#contact/${encodeURIComponent(E2E_PARTNER_RADIO_PUBKEY)}`);
await expect(
page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(FLIGHTLESS_NAME)}`, 'i'))
page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(E2E_PARTNER_RADIO_NAME)}`, 'i'))
).toBeVisible({ timeout: 15_000 });
const text = `dev-flightless-direct-${Date.now()}`;
const text = `dm-ack-route-test-${Date.now()}`;
const input = page.getByPlaceholder(/message/i);
await input.fill(text);
await page.getByRole('button', { name: 'Send', exact: true }).click();
@@ -100,7 +96,7 @@ test.describe('Developer-only direct-route learning for FlightlessDt🥝', () =>
async () => {
const messages = await getMessages({
type: 'PRIV',
conversation_key: FLIGHTLESS_PUBLIC_KEY,
conversation_key: E2E_PARTNER_RADIO_PUBKEY,
limit: 25,
});
const match = messages.find((message) => message.outgoing && message.text === text);
@@ -108,7 +104,7 @@ test.describe('Developer-only direct-route learning for FlightlessDt🥝', () =>
},
{
timeout: 90_000,
message: 'Waiting for FlightlessDt DM ACK',
message: 'Waiting for partner radio DM ACK',
}
)
.toBeGreaterThan(0);
@@ -116,17 +112,17 @@ test.describe('Developer-only direct-route learning for FlightlessDt🥝', () =>
await expect
.poll(
async () => {
const contact = await getContactByKey(FLIGHTLESS_PUBLIC_KEY);
const contact = await getContactByKey(E2E_PARTNER_RADIO_PUBKEY);
return contact?.direct_path_len ?? null;
},
{
timeout: 90_000,
message: 'Waiting for FlightlessDt route to update from flood to direct',
message: 'Waiting for partner radio route to update from flood to direct',
}
)
.toBe(0);
const learnedContact = await getContactByKey(FLIGHTLESS_PUBLIC_KEY);
const learnedContact = await getContactByKey(E2E_PARTNER_RADIO_PUBKEY);
expect(learnedContact?.direct_path ?? '').toBe('');
await page.locator('[title="View contact info"]').click();
+1 -1
View File
@@ -15,6 +15,6 @@ test.describe('Statistics page', () => {
await expect(page.locator('h4').getByText('Network')).toBeVisible({ timeout: 10_000 });
await expect(page.getByText('Contacts', { exact: true }).first()).toBeVisible();
await expect(page.getByText('Channels', { exact: true }).first()).toBeVisible();
await expect(page.locator('h4').getByText('Packets')).toBeVisible();
await expect(page.locator('h4').getByText('Packets', { exact: true })).toBeVisible();
});
});
+72
View File
@@ -127,6 +127,78 @@ class TestHealthEndpoint:
assert data["radio_connected"] is False
assert data["connection_info"] is None
def test_health_includes_radio_stats_when_available(self):
"""Health endpoint includes cached radio stats snapshot."""
from fastapi.testclient import TestClient
fake_stats = {
"timestamp": 1700000000,
"battery_mv": 4150,
"uptime_secs": 3600,
"noise_floor": -120,
"last_rssi": -85,
"last_snr": 9.5,
"tx_air_secs": 100,
"rx_air_secs": 200,
"packets": {
"recv": 500,
"sent": 250,
"flood_tx": 100,
"direct_tx": 150,
"flood_rx": 300,
"direct_rx": 200,
},
}
with (
patch("app.routers.health.radio_manager") as mock_rm,
patch("app.routers.health.get_latest_radio_stats", return_value=fake_stats),
):
mock_rm.is_connected = True
mock_rm.connection_info = "Serial: /dev/ttyUSB0"
mock_rm.is_setup_in_progress = False
mock_rm.is_setup_complete = True
mock_rm.connection_desired = True
mock_rm.is_reconnecting = False
mock_rm.device_info_loaded = False
from app.main import app
client = TestClient(app)
response = client.get("/api/health")
assert response.status_code == 200
stats = response.json()["radio_stats"]
assert stats["battery_mv"] == 4150
assert stats["uptime_secs"] == 3600
assert stats["noise_floor"] == -120
assert stats["packets_recv"] == 500
assert stats["packets_sent"] == 250
def test_health_radio_stats_null_when_no_data(self):
"""Health endpoint returns null radio_stats when cache is empty."""
from fastapi.testclient import TestClient
with (
patch("app.routers.health.radio_manager") as mock_rm,
patch("app.routers.health.get_latest_radio_stats", return_value={}),
):
mock_rm.is_connected = False
mock_rm.connection_info = None
mock_rm.is_setup_in_progress = False
mock_rm.is_setup_complete = False
mock_rm.connection_desired = True
mock_rm.is_reconnecting = False
mock_rm.device_info_loaded = False
from app.main import app
client = TestClient(app)
response = client.get("/api/health")
assert response.status_code == 200
assert response.json()["radio_stats"] is None
class TestDebugEndpoint:
"""Test the debug support snapshot endpoint."""
+230
View File
@@ -987,6 +987,130 @@ class TestDirectMessageDirectionDetection:
assert len(messages) == 1
assert messages[0].outgoing is False # Defaults to incoming
@pytest.mark.asyncio
async def test_ambiguous_direction_resolves_outgoing_echo(self, test_db, captured_broadcasts):
"""Ambiguous direction resolves to outgoing when a matching sent message exists.
Uses real colliding keys where both public keys start with 0xAA.
Without the fix, the echo would be stored as a second (incoming) row.
"""
from app.packet_processor import _process_direct_message
our_pub = "AAAA09479CF6FD6733CF052769E7C229CB86CA7F81E82439F9E4EB832CA7F8DC"
contact_pub = "AAAA2A563964F9B66E25E81FE6931B0E72AF585AEF79F43C1364DB4F6F882F07"
our_pub_bytes = bytes.fromhex(our_pub)
first_byte = "aa"
await ContactRepository.upsert(
{"public_key": contact_pub, "name": "CollidingContact", "type": 1}
)
# The send endpoint already stored the outgoing message
outgoing_id = await MessageRepository.create(
msg_type="PRIV",
text="Echo collision test",
conversation_key=contact_pub.lower(),
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP,
outgoing=True,
)
assert outgoing_id is not None
packet_info = MagicMock()
packet_info.payload = bytes([0xAA, 0xAA, 0x00, 0x00]) + b"\x00" * 20
packet_info.path = b"\xbb"
packet_info.path_length = 1
decrypted = DecryptedDirectMessage(
timestamp=SENDER_TIMESTAMP,
flags=0,
message="Echo collision test",
dest_hash=first_byte,
src_hash=first_byte,
)
pkt_id, _ = await RawPacketRepository.create(b"ambig_echo", SENDER_TIMESTAMP + 1)
broadcasts, mock_broadcast = captured_broadcasts
with (
patch("app.packet_processor.has_private_key", return_value=True),
patch("app.packet_processor.get_private_key", return_value=b"\x00" * 32),
patch("app.packet_processor.get_public_key", return_value=our_pub_bytes),
patch("app.packet_processor.try_decrypt_dm", return_value=decrypted),
patch("app.packet_processor.broadcast_event", mock_broadcast),
):
result = await _process_direct_message(
b"\x00" * 40, pkt_id, SENDER_TIMESTAMP + 1, packet_info
)
assert result is not None
# Should have exactly one message — the original outgoing, not a ghost incoming
messages = await MessageRepository.get_all(
msg_type="PRIV", conversation_key=contact_pub.lower(), limit=10
)
assert len(messages) == 1
assert messages[0].outgoing is True
assert messages[0].id == outgoing_id
# Path from the echo should have been added to the outgoing message
ack_broadcasts = [b for b in broadcasts if b["type"] == "message_acked"]
assert len(ack_broadcasts) == 1
assert ack_broadcasts[0]["data"]["message_id"] == outgoing_id
assert any(p["path"] == "bb" for p in ack_broadcasts[0]["data"]["paths"])
@pytest.mark.asyncio
async def test_ambiguous_direction_genuine_incoming_still_stored(
self, test_db, captured_broadcasts
):
"""Ambiguous direction with no matching outgoing message stores as incoming."""
from app.packet_processor import _process_direct_message
our_pub = "AAAA09479CF6FD6733CF052769E7C229CB86CA7F81E82439F9E4EB832CA7F8DC"
contact_pub = "AAAA2A563964F9B66E25E81FE6931B0E72AF585AEF79F43C1364DB4F6F882F07"
our_pub_bytes = bytes.fromhex(our_pub)
first_byte = "aa"
await ContactRepository.upsert(
{"public_key": contact_pub, "name": "CollidingContact", "type": 1}
)
# No outgoing message exists — this is a genuine incoming DM
packet_info = MagicMock()
packet_info.payload = bytes([0xAA, 0xAA, 0x00, 0x00]) + b"\x00" * 20
packet_info.path = b""
packet_info.path_length = 0
decrypted = DecryptedDirectMessage(
timestamp=SENDER_TIMESTAMP,
flags=0,
message="Genuine incoming",
dest_hash=first_byte,
src_hash=first_byte,
)
pkt_id, _ = await RawPacketRepository.create(b"ambig_genuine", SENDER_TIMESTAMP)
broadcasts, mock_broadcast = captured_broadcasts
with (
patch("app.packet_processor.has_private_key", return_value=True),
patch("app.packet_processor.get_private_key", return_value=b"\x00" * 32),
patch("app.packet_processor.get_public_key", return_value=our_pub_bytes),
patch("app.packet_processor.try_decrypt_dm", return_value=decrypted),
patch("app.packet_processor.broadcast_event", mock_broadcast),
):
result = await _process_direct_message(
b"\x00" * 40, pkt_id, SENDER_TIMESTAMP, packet_info
)
assert result is not None
messages = await MessageRepository.get_all(
msg_type="PRIV", conversation_key=contact_pub.lower(), limit=10
)
assert len(messages) == 1
assert messages[0].outgoing is False # Still incoming when no outgoing match
@pytest.mark.asyncio
async def test_neither_hash_matches_returns_none(self, test_db, captured_broadcasts):
"""Neither hash byte matches us → not our message → returns None."""
@@ -1286,3 +1410,109 @@ class TestMessageAckedBroadcastShape:
assert isinstance(payload["ack_count"], int)
assert payload["ack_count"] == 0 # Outgoing DM duplicates no longer count as delivery
assert payload["packet_id"] == pkt1
class TestRoomServerMessageDedup:
"""Test that room-server posts from different authors are not collapsed.
Room messages are PRIV type sharing one conversation_key (the room contact's
pubkey). The dedup index includes sender_key so that two different room
participants sending identical text in the same clock second are stored as
separate messages.
"""
ROOM_PUB = "bb" * 32 # Room contact public key
SENDER_A_KEY = "aa" * 32
SENDER_B_KEY = "cc" * 32
@pytest.mark.asyncio
async def test_distinct_room_authors_same_text_same_second_stored_separately(self, test_db):
"""Two room users sending identical text in the same second produce two rows."""
msg_id_a = await MessageRepository.create(
msg_type="PRIV",
text="ok",
conversation_key=self.ROOM_PUB,
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP,
outgoing=False,
sender_key=self.SENDER_A_KEY,
sender_name="Alice",
)
assert msg_id_a is not None
msg_id_b = await MessageRepository.create(
msg_type="PRIV",
text="ok",
conversation_key=self.ROOM_PUB,
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP + 1,
outgoing=False,
sender_key=self.SENDER_B_KEY,
sender_name="Bob",
)
assert msg_id_b is not None, (
"Second room post with different sender_key should not be deduped"
)
assert msg_id_a != msg_id_b
messages = await MessageRepository.get_all(
msg_type="PRIV", conversation_key=self.ROOM_PUB, limit=10
)
assert len(messages) == 2
@pytest.mark.asyncio
async def test_same_room_author_same_text_same_second_still_deduped(self, test_db):
"""True echo from the same room author is still collapsed (same sender_key)."""
msg_id_1 = await MessageRepository.create(
msg_type="PRIV",
text="ok",
conversation_key=self.ROOM_PUB,
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP,
outgoing=False,
sender_key=self.SENDER_A_KEY,
sender_name="Alice",
)
assert msg_id_1 is not None
msg_id_2 = await MessageRepository.create(
msg_type="PRIV",
text="ok",
conversation_key=self.ROOM_PUB,
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP + 1,
outgoing=False,
sender_key=self.SENDER_A_KEY,
sender_name="Alice",
)
assert msg_id_2 is None, "Same sender_key should still be deduped"
messages = await MessageRepository.get_all(
msg_type="PRIV", conversation_key=self.ROOM_PUB, limit=10
)
assert len(messages) == 1
@pytest.mark.asyncio
async def test_null_sender_key_still_dedupes_normally(self, test_db):
"""Non-room incoming DMs (sender_key=None) still dedupe on content."""
msg_id_1 = await MessageRepository.create(
msg_type="PRIV",
text="hello",
conversation_key=CONTACT_PUB.lower(),
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP,
outgoing=False,
sender_key=None,
)
assert msg_id_1 is not None
msg_id_2 = await MessageRepository.create(
msg_type="PRIV",
text="hello",
conversation_key=CONTACT_PUB.lower(),
sender_timestamp=SENDER_TIMESTAMP,
received_at=SENDER_TIMESTAMP + 1,
outgoing=False,
sender_key=None,
)
assert msg_id_2 is None, "Both NULL sender_key should still collide"
+160
View File
@@ -101,6 +101,9 @@ class StubModule(FanoutModule):
super().__init__("stub", {})
self.message_calls: list[dict] = []
self.raw_calls: list[dict] = []
self.contact_calls: list[dict] = []
self.telemetry_calls: list[dict] = []
self.health_calls: list[dict] = []
self._status = "connected"
async def start(self) -> None:
@@ -115,6 +118,15 @@ class StubModule(FanoutModule):
async def on_raw(self, data: dict) -> None:
self.raw_calls.append(data)
async def on_contact(self, data: dict) -> None:
self.contact_calls.append(data)
async def on_telemetry(self, data: dict) -> None:
self.telemetry_calls.append(data)
async def on_health(self, data: dict) -> None:
self.health_calls.append(data)
@property
def status(self) -> str:
return self._status
@@ -301,6 +313,113 @@ class TestFanoutManagerDispatch:
assert statuses["test-id"]["last_error"] == "ConnectionError: broker down"
# ---------------------------------------------------------------------------
# New event dispatch (contact, telemetry, health)
# ---------------------------------------------------------------------------
class TestFanoutManagerNewEventDispatch:
@pytest.mark.asyncio
async def test_broadcast_contact_dispatches_to_all_modules(self):
manager = FanoutManager()
mod = StubModule()
manager._modules["test-id"] = (mod, {})
await manager.broadcast_contact({"public_key": "aabb", "name": "Alice"})
assert len(mod.contact_calls) == 1
assert mod.contact_calls[0]["public_key"] == "aabb"
@pytest.mark.asyncio
async def test_broadcast_contact_ignores_scope(self):
"""Contact dispatch is unconditional — scope doesn't affect it."""
manager = FanoutManager()
mod = StubModule()
manager._modules["test-id"] = (mod, {"messages": "none", "raw_packets": "none"})
await manager.broadcast_contact({"public_key": "aabb"})
assert len(mod.contact_calls) == 1
@pytest.mark.asyncio
async def test_broadcast_telemetry_dispatches_to_all_modules(self):
manager = FanoutManager()
mod = StubModule()
manager._modules["test-id"] = (mod, {})
await manager.broadcast_telemetry(
{"public_key": "ccdd", "battery_volts": 4.1, "timestamp": 1000}
)
assert len(mod.telemetry_calls) == 1
assert mod.telemetry_calls[0]["battery_volts"] == 4.1
@pytest.mark.asyncio
async def test_broadcast_health_fanout_dispatches_to_all_modules(self):
manager = FanoutManager()
mod = StubModule()
manager._modules["test-id"] = (mod, {})
await manager.broadcast_health_fanout({"connected": True, "noise_floor_dbm": -112})
assert len(mod.health_calls) == 1
assert mod.health_calls[0]["connected"] is True
@pytest.mark.asyncio
async def test_new_events_do_not_affect_message_or_raw(self):
"""Verify new dispatch paths are independent of message/raw."""
manager = FanoutManager()
mod = StubModule()
manager._modules["test-id"] = (mod, {"messages": "all", "raw_packets": "all"})
await manager.broadcast_contact({"public_key": "aabb"})
await manager.broadcast_telemetry({"public_key": "ccdd", "battery_volts": 3.8})
await manager.broadcast_health_fanout({"connected": False})
assert len(mod.message_calls) == 0
assert len(mod.raw_calls) == 0
assert len(mod.contact_calls) == 1
assert len(mod.telemetry_calls) == 1
assert len(mod.health_calls) == 1
@pytest.mark.asyncio
async def test_base_module_no_ops_do_not_raise(self):
"""Default FanoutModule no-ops accept data without error."""
manager = FanoutManager()
class MinimalModule(FanoutModule):
@property
def status(self) -> str:
return "connected"
mod = MinimalModule("test", {})
manager._modules["test-id"] = (mod, {})
# Should not raise — base class no-ops silently accept
await manager.broadcast_contact({"public_key": "aabb"})
await manager.broadcast_telemetry({"public_key": "ccdd"})
await manager.broadcast_health_fanout({"connected": True})
@pytest.mark.asyncio
async def test_error_in_one_module_does_not_block_others(self):
manager = FanoutManager()
bad_mod = StubModule()
async def fail(data):
raise RuntimeError("boom")
bad_mod.on_contact = fail
good_mod = StubModule()
manager._modules["bad"] = (bad_mod, {})
manager._modules["good"] = (good_mod, {})
await manager.broadcast_contact({"public_key": "aabb"})
assert len(good_mod.contact_calls) == 1
# ---------------------------------------------------------------------------
# Repository tests
# ---------------------------------------------------------------------------
@@ -476,6 +595,47 @@ class TestBroadcastEventRealtime:
mock_ws.broadcast.assert_called_once()
mock_fm.broadcast_message.assert_called_once()
@pytest.mark.asyncio
async def test_contact_event_dispatches_to_fanout(self):
"""broadcast_event for 'contact' should trigger fanout contact dispatch."""
from app.websocket import broadcast_event
with (
patch("app.websocket.ws_manager") as mock_ws,
patch("app.fanout.manager.fanout_manager") as mock_fm,
):
mock_ws.broadcast = AsyncMock()
mock_fm.broadcast_contact = AsyncMock()
broadcast_event("contact", {"public_key": "aabb"}, realtime=True)
import asyncio
await asyncio.sleep(0)
mock_ws.broadcast.assert_called_once()
mock_fm.broadcast_contact.assert_called_once()
@pytest.mark.asyncio
async def test_contact_event_skipped_when_not_realtime(self):
"""broadcast_event('contact', ..., realtime=False) should skip fanout."""
from app.websocket import broadcast_event
with (
patch("app.websocket.ws_manager") as mock_ws,
patch("app.fanout.manager.fanout_manager") as mock_fm,
):
mock_ws.broadcast = AsyncMock()
broadcast_event("contact", {"public_key": "aabb"}, realtime=False)
import asyncio
await asyncio.sleep(0)
mock_ws.broadcast.assert_called_once()
mock_fm.broadcast_contact.assert_not_called()
# ---------------------------------------------------------------------------
# Webhook module unit tests
+26 -19
View File
@@ -7,6 +7,12 @@ import pytest
from app.migrations import get_version, run_migrations, set_version
# Updated automatically when a new migration is added. Migration tests that
# run ``run_migrations`` to completion assert ``get_version == LATEST`` and
# ``applied == LATEST - starting_version`` so only this constant needs to
# change, not every individual assertion.
LATEST_SCHEMA_VERSION = 56
class TestMigration001:
"""Test migration 001: add last_read_at columns."""
@@ -833,9 +839,9 @@ class TestMigration044:
assert [row["message_id"] for row in await cursor.fetchall()] == [1, 1]
cursor = await conn.execute(
"INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp, received_at, outgoing) "
"VALUES (?, ?, ?, ?, ?, ?)",
("PRIV", "abc123", "hello", 0, 9999, 0),
"INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp, received_at, outgoing, sender_key) "
"VALUES (?, ?, ?, ?, ?, ?, ?)",
("PRIV", "abc123", "hello", 0, 9999, 0, "abc123"),
)
assert cursor.rowcount == 0
@@ -844,6 +850,7 @@ class TestMigration044:
)
index_sql = (await cursor.fetchone())["sql"]
assert "WHERE type = 'PRIV' AND outgoing = 0" in index_sql
assert "sender_key" in index_sql
finally:
await conn.close()
@@ -1224,8 +1231,8 @@ class TestMigration039:
applied = await run_migrations(conn)
assert applied == 17
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 38
assert await get_version(conn) == LATEST_SCHEMA_VERSION
cursor = await conn.execute(
"""
@@ -1296,8 +1303,8 @@ class TestMigration039:
applied = await run_migrations(conn)
assert applied == 17
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 38
assert await get_version(conn) == LATEST_SCHEMA_VERSION
cursor = await conn.execute(
"""
@@ -1363,8 +1370,8 @@ class TestMigration039:
applied = await run_migrations(conn)
assert applied == 11
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 44
assert await get_version(conn) == LATEST_SCHEMA_VERSION
cursor = await conn.execute(
"""
@@ -1416,8 +1423,8 @@ class TestMigration040:
applied = await run_migrations(conn)
assert applied == 16
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 39
assert await get_version(conn) == LATEST_SCHEMA_VERSION
await conn.execute(
"""
@@ -1478,8 +1485,8 @@ class TestMigration041:
applied = await run_migrations(conn)
assert applied == 15
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 40
assert await get_version(conn) == LATEST_SCHEMA_VERSION
await conn.execute(
"""
@@ -1531,8 +1538,8 @@ class TestMigration042:
applied = await run_migrations(conn)
assert applied == 14
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 41
assert await get_version(conn) == LATEST_SCHEMA_VERSION
await conn.execute(
"""
@@ -1671,8 +1678,8 @@ class TestMigration046:
applied = await run_migrations(conn)
assert applied == 10
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 45
assert await get_version(conn) == LATEST_SCHEMA_VERSION
cursor = await conn.execute(
"""
@@ -1765,8 +1772,8 @@ class TestMigration047:
applied = await run_migrations(conn)
assert applied == 9
assert await get_version(conn) == 55
assert applied == LATEST_SCHEMA_VERSION - 46
assert await get_version(conn) == LATEST_SCHEMA_VERSION
cursor = await conn.execute(
"""
+2 -4
View File
@@ -1073,9 +1073,7 @@ class TestPostConnectSetupOrdering:
rm = RadioManager()
rm._connection_info = "Serial: /dev/ttyUSB0"
rm.post_connect_setup = AsyncMock(
side_effect=[asyncio.TimeoutError(), asyncio.TimeoutError()]
)
rm.post_connect_setup = AsyncMock(side_effect=[TimeoutError(), TimeoutError()])
with (
patch("app.websocket.broadcast_error") as mock_broadcast_error,
@@ -1099,7 +1097,7 @@ class TestPostConnectSetupOrdering:
rm = RadioManager()
rm._connection_info = "Serial: /dev/ttyUSB0"
rm.post_connect_setup = AsyncMock(side_effect=[asyncio.TimeoutError(), None])
rm.post_connect_setup = AsyncMock(side_effect=[TimeoutError(), None])
with (
patch("app.websocket.broadcast_error") as mock_broadcast_error,
-37
View File
@@ -1,37 +0,0 @@
import asyncio
from unittest.mock import patch
import pytest
from app.services import radio_noise_floor
class TestNoiseFloorSamplingLoop:
@pytest.mark.asyncio
async def test_logs_and_continues_after_unexpected_sample_exception(self):
sample_calls = 0
sleep_calls = 0
async def fake_sample() -> None:
nonlocal sample_calls
sample_calls += 1
if sample_calls == 1:
raise RuntimeError("boom")
async def fake_sleep(_seconds: int) -> None:
nonlocal sleep_calls
sleep_calls += 1
if sleep_calls >= 2:
raise asyncio.CancelledError()
with (
patch.object(radio_noise_floor, "sample_noise_floor_once", side_effect=fake_sample),
patch.object(radio_noise_floor.asyncio, "sleep", side_effect=fake_sleep),
patch.object(radio_noise_floor.logger, "exception") as mock_exception,
):
with pytest.raises(asyncio.CancelledError):
await radio_noise_floor._noise_floor_sampling_loop()
assert sample_calls == 2
assert sleep_calls == 2
mock_exception.assert_called_once()
+15
View File
@@ -4,6 +4,7 @@ from unittest.mock import AsyncMock
import pytest
from fastapi import HTTPException
from app.radio_runtime_state import RadioRuntimeState
from app.services.radio_runtime import RadioRuntime
@@ -114,3 +115,17 @@ async def test_lifecycle_passthrough_methods_delegate_to_current_manager():
manager.start_connection_monitor.assert_awaited_once()
manager.stop_connection_monitor.assert_awaited_once()
manager.disconnect.assert_awaited_once()
def test_explicit_runtime_state_api_replaces_attribute_forwarding():
manager = _Manager(meshcore="meshcore", is_connected=True)
manager.state = RadioRuntimeState()
manager.state.path_hash_mode = 2
runtime = RadioRuntime(manager)
assert runtime.path_hash_mode == 2
runtime.path_hash_mode = 1
assert manager.state.path_hash_mode == 1
with pytest.raises(AttributeError, match="does not expose attribute"):
_ = runtime.some_random_attr
+264
View File
@@ -0,0 +1,264 @@
import asyncio
from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from app.services import radio_stats
def _make_event(event_type, payload=None):
return SimpleNamespace(type=event_type, payload=payload or {})
class TestRadioStatsSamplingLoop:
@pytest.mark.asyncio
async def test_logs_and_continues_after_unexpected_sample_exception(self):
sample_calls = 0
sleep_calls = 0
async def fake_sample():
nonlocal sample_calls
sample_calls += 1
if sample_calls == 1:
raise RuntimeError("boom")
return {}
async def fake_sleep(_seconds: int) -> None:
nonlocal sleep_calls
sleep_calls += 1
if sleep_calls >= 2:
raise asyncio.CancelledError()
mock_fanout = MagicMock()
mock_fanout.broadcast_health_fanout = AsyncMock()
with (
patch.object(radio_stats, "_sample_all_stats", side_effect=fake_sample),
patch.object(radio_stats.asyncio, "sleep", side_effect=fake_sleep),
patch.object(radio_stats.logger, "exception") as mock_exception,
patch("app.fanout.manager.fanout_manager", mock_fanout),
):
with pytest.raises(asyncio.CancelledError):
await radio_stats._stats_sampling_loop()
assert sample_calls == 2
assert sleep_calls == 2
mock_exception.assert_called_once()
@pytest.mark.asyncio
async def test_broadcasts_health_every_cycle(self):
"""The loop should push a WS health broadcast and fanout after every iteration."""
sleep_calls = 0
async def fake_sample():
return {}
async def fake_sleep(_seconds: int) -> None:
nonlocal sleep_calls
sleep_calls += 1
if sleep_calls >= 2:
raise asyncio.CancelledError()
mock_fanout = MagicMock()
mock_fanout.broadcast_health_fanout = AsyncMock()
with (
patch.object(radio_stats, "_sample_all_stats", side_effect=fake_sample),
patch.object(radio_stats.asyncio, "sleep", side_effect=fake_sleep),
patch("app.websocket.broadcast_health") as mock_broadcast,
patch("app.fanout.manager.fanout_manager", mock_fanout),
):
with pytest.raises(asyncio.CancelledError):
await radio_stats._stats_sampling_loop()
assert mock_broadcast.call_count == 2
assert mock_fanout.broadcast_health_fanout.call_count == 2
@pytest.mark.asyncio
async def test_fanout_receives_enriched_payload(self):
"""The health fanout payload should include radio identity + stats."""
sleep_calls = 0
fake_snapshot = {
"timestamp": 1700000000,
"battery_mv": 4100,
"uptime_secs": 3600,
"noise_floor": -118,
"last_rssi": -85,
"last_snr": 9.5,
"tx_air_secs": 100,
"rx_air_secs": 200,
"packets": {"recv": 500, "sent": 250},
}
async def fake_sample():
return dict(fake_snapshot)
async def fake_sleep(_seconds: int) -> None:
nonlocal sleep_calls
sleep_calls += 1
raise asyncio.CancelledError()
mock_fanout = MagicMock()
mock_fanout.broadcast_health_fanout = AsyncMock()
with (
patch.object(radio_stats, "_sample_all_stats", side_effect=fake_sample),
patch.object(radio_stats.asyncio, "sleep", side_effect=fake_sleep),
patch("app.websocket.broadcast_health"),
patch("app.fanout.manager.fanout_manager", mock_fanout),
patch.object(radio_stats, "radio_manager") as mock_rm,
):
mock_rm.is_connected = True
mock_rm.connection_info = "Serial: /dev/ttyUSB0"
mock_rm.meshcore = MagicMock()
mock_rm.meshcore.self_info = {"public_key": "aabbccddeeff", "name": "MyRadio"}
with pytest.raises(asyncio.CancelledError):
await radio_stats._stats_sampling_loop()
payload = mock_fanout.broadcast_health_fanout.call_args[0][0]
assert payload["connected"] is True
assert payload["public_key"] == "aabbccddeeff"
assert payload["name"] == "MyRadio"
assert payload["battery_mv"] == 4100
assert payload["noise_floor_dbm"] == -118
assert payload["packets_recv"] == 500
class TestSampleAllStats:
@pytest.mark.asyncio
async def test_returns_empty_when_disconnected(self):
"""Should return empty dict when radio is disconnected."""
with patch.object(radio_stats, "radio_manager") as mock_rm:
mock_rm.is_connected = False
result = await radio_stats._sample_all_stats()
assert result == {}
@pytest.mark.asyncio
async def test_partial_stats_still_records_available_data(self):
"""If core stats return ERROR but radio/packet stats succeed, noise floor
is still sampled and available fields are returned."""
from meshcore import EventType
radio_stats._noise_floor_samples.clear()
core_event = _make_event(EventType.ERROR, {"reason": "unsupported"})
radio_event = _make_event(
EventType.STATS_RADIO,
{
"noise_floor": -118,
"last_rssi": -90,
"last_snr": 8.0,
"tx_air_secs": 10,
"rx_air_secs": 20,
},
)
packet_event = _make_event(
EventType.STATS_PACKETS,
{
"recv": 100,
"sent": 50,
"flood_tx": 20,
"direct_tx": 30,
"flood_rx": 60,
"direct_rx": 40,
},
)
mock_mc = AsyncMock()
mock_mc.commands.get_stats_core = AsyncMock(return_value=core_event)
mock_mc.commands.get_stats_radio = AsyncMock(return_value=radio_event)
mock_mc.commands.get_stats_packets = AsyncMock(return_value=packet_event)
mock_ctx = AsyncMock()
mock_ctx.__aenter__ = AsyncMock(return_value=mock_mc)
mock_ctx.__aexit__ = AsyncMock(return_value=False)
with patch.object(radio_stats, "radio_manager") as mock_rm:
mock_rm.is_connected = True
mock_rm.radio_operation = MagicMock(return_value=mock_ctx)
snapshot = await radio_stats._sample_all_stats()
# Core fields missing (ERROR), but radio + packet fields present
assert "battery_mv" not in snapshot
assert snapshot["noise_floor"] == -118
assert snapshot["packets"]["recv"] == 100
# Noise floor history was still appended
assert len(radio_stats._noise_floor_samples) == 1
@pytest.mark.asyncio
async def test_all_stats_succeed(self):
"""All three stats commands succeed — full snapshot returned."""
from meshcore import EventType
radio_stats._noise_floor_samples.clear()
core_event = _make_event(
EventType.STATS_CORE,
{"battery_mv": 4100, "uptime_secs": 7200, "errors": 0, "queue_len": 2},
)
radio_event = _make_event(
EventType.STATS_RADIO,
{
"noise_floor": -120,
"last_rssi": -85,
"last_snr": 9.5,
"tx_air_secs": 100,
"rx_air_secs": 200,
},
)
packet_event = _make_event(
EventType.STATS_PACKETS,
{
"recv": 500,
"sent": 250,
"flood_tx": 100,
"direct_tx": 150,
"flood_rx": 300,
"direct_rx": 200,
},
)
mock_mc = AsyncMock()
mock_mc.commands.get_stats_core = AsyncMock(return_value=core_event)
mock_mc.commands.get_stats_radio = AsyncMock(return_value=radio_event)
mock_mc.commands.get_stats_packets = AsyncMock(return_value=packet_event)
mock_ctx = AsyncMock()
mock_ctx.__aenter__ = AsyncMock(return_value=mock_mc)
mock_ctx.__aexit__ = AsyncMock(return_value=False)
with patch.object(radio_stats, "radio_manager") as mock_rm:
mock_rm.is_connected = True
mock_rm.radio_operation = MagicMock(return_value=mock_ctx)
snapshot = await radio_stats._sample_all_stats()
assert snapshot["battery_mv"] == 4100
assert snapshot["noise_floor"] == -120
assert snapshot["packets"]["sent"] == 250
assert len(radio_stats._noise_floor_samples) == 1
@pytest.mark.asyncio
async def test_all_errors_returns_empty(self):
"""If every stats command returns ERROR, result is empty."""
from meshcore import EventType
error = _make_event(EventType.ERROR, {"reason": "unsupported"})
mock_mc = AsyncMock()
mock_mc.commands.get_stats_core = AsyncMock(return_value=error)
mock_mc.commands.get_stats_radio = AsyncMock(return_value=error)
mock_mc.commands.get_stats_packets = AsyncMock(return_value=error)
mock_ctx = AsyncMock()
mock_ctx.__aenter__ = AsyncMock(return_value=mock_mc)
mock_ctx.__aexit__ = AsyncMock(return_value=False)
with patch.object(radio_stats, "radio_manager") as mock_rm:
mock_rm.is_connected = True
mock_rm.radio_operation = MagicMock(return_value=mock_ctx)
snapshot = await radio_stats._sample_all_stats()
assert snapshot == {}
+318
View File
@@ -1220,6 +1220,324 @@ class TestResendChannelMessage:
assert "expired" in exc_info.value.detail.lower()
class TestPathHashModeOverride:
"""Test per-channel path_hash_mode_override apply/restore behavior."""
@pytest.mark.asyncio
async def test_send_channel_msg_uses_phm_override(self, test_db):
"""Override is applied before send and baseline restored after."""
mc = _make_mc(name="MyNode")
mc.commands.set_path_hash_mode = AsyncMock(return_value=_make_radio_result())
chan_key = "f1" * 16
await ChannelRepository.upsert(key=chan_key, name="#phm")
await ChannelRepository.update_path_hash_mode_override(chan_key, 2)
radio_manager.path_hash_mode = 0
radio_manager.path_hash_mode_supported = True
with (
patch("app.routers.messages.radio_manager.require_connected", return_value=mc),
patch.object(radio_manager, "_meshcore", mc),
patch("app.routers.messages.broadcast_event"),
):
await send_channel_message(
SendChannelMessageRequest(channel_key=chan_key, text="hello")
)
assert mc.commands.set_path_hash_mode.await_args_list == [call(2), call(0)]
assert radio_manager.path_hash_mode == 0
@pytest.mark.asyncio
async def test_send_channel_msg_skips_phm_when_matching_baseline(self, test_db):
"""No set_path_hash_mode calls when override matches baseline."""
mc = _make_mc(name="MyNode")
mc.commands.set_path_hash_mode = AsyncMock()
chan_key = "f2" * 16
await ChannelRepository.upsert(key=chan_key, name="#same")
await ChannelRepository.update_path_hash_mode_override(chan_key, 1)
radio_manager.path_hash_mode = 1
radio_manager.path_hash_mode_supported = True
with (
patch("app.routers.messages.radio_manager.require_connected", return_value=mc),
patch.object(radio_manager, "_meshcore", mc),
patch("app.routers.messages.broadcast_event"),
):
await send_channel_message(
SendChannelMessageRequest(channel_key=chan_key, text="hello")
)
mc.commands.set_path_hash_mode.assert_not_awaited()
@pytest.mark.asyncio
async def test_send_channel_msg_skips_phm_when_unsupported(self, test_db):
"""No set_path_hash_mode calls when radio doesn't support it."""
mc = _make_mc(name="MyNode")
mc.commands.set_path_hash_mode = AsyncMock()
chan_key = "f3" * 16
await ChannelRepository.upsert(key=chan_key, name="#nosupport")
await ChannelRepository.update_path_hash_mode_override(chan_key, 2)
radio_manager.path_hash_mode = 0
radio_manager.path_hash_mode_supported = False
with (
patch("app.routers.messages.radio_manager.require_connected", return_value=mc),
patch.object(radio_manager, "_meshcore", mc),
patch("app.routers.messages.broadcast_event"),
):
await send_channel_message(
SendChannelMessageRequest(channel_key=chan_key, text="hello")
)
mc.commands.set_path_hash_mode.assert_not_awaited()
@pytest.mark.asyncio
async def test_send_channel_msg_aborts_on_phm_apply_error(self, test_db):
"""ERROR on apply aborts the send entirely."""
mc = _make_mc(name="MyNode")
mc.commands.set_path_hash_mode = AsyncMock(
return_value=MagicMock(type=EventType.ERROR, payload="unsupported mode")
)
chan_key = "f4" * 16
await ChannelRepository.upsert(key=chan_key, name="#fail")
await ChannelRepository.update_path_hash_mode_override(chan_key, 2)
radio_manager.path_hash_mode = 0
radio_manager.path_hash_mode_supported = True
with (
patch("app.routers.messages.radio_manager.require_connected", return_value=mc),
patch.object(radio_manager, "_meshcore", mc),
patch("app.routers.messages.broadcast_event"),
pytest.raises(HTTPException) as exc_info,
):
await send_channel_message(
SendChannelMessageRequest(channel_key=chan_key, text="hello")
)
assert exc_info.value.status_code == 500
assert "path hash mode" in exc_info.value.detail.lower()
mc.commands.send_chan_msg.assert_not_awaited()
@pytest.mark.asyncio
async def test_send_channel_msg_phm_restore_failure_broadcasts_error(self, test_db):
"""Message sends OK but restore failure after 3 attempts broadcasts an error."""
mc = _make_mc(name="MyNode")
mc.commands.set_path_hash_mode = AsyncMock(
side_effect=[
_make_radio_result(), # apply succeeds
MagicMock(type=EventType.ERROR, payload="fail 1"),
MagicMock(type=EventType.ERROR, payload="fail 2"),
MagicMock(type=EventType.ERROR, payload="fail 3"),
]
)
chan_key = "f5" * 16
await ChannelRepository.upsert(key=chan_key, name="#restorefail")
await ChannelRepository.update_path_hash_mode_override(chan_key, 2)
radio_manager.path_hash_mode = 0
radio_manager.path_hash_mode_supported = True
with (
patch("app.routers.messages.radio_manager.require_connected", return_value=mc),
patch.object(radio_manager, "_meshcore", mc),
patch("app.routers.messages.broadcast_event"),
patch("app.routers.messages.broadcast_error") as mock_err,
):
result = await send_channel_message(
SendChannelMessageRequest(channel_key=chan_key, text="hello")
)
assert result is not None # message sent OK
mock_err.assert_called_once()
assert "path hash mode" in mock_err.call_args.args[0].lower()
# 1 apply + 3 restore attempts = 4 calls total
assert mc.commands.set_path_hash_mode.await_count == 4
@pytest.mark.asyncio
async def test_send_channel_msg_phm_restore_succeeds_on_second_attempt(self, test_db):
"""Restore retries and succeeds on the second attempt — no error broadcast."""
mc = _make_mc(name="MyNode")
mc.commands.set_path_hash_mode = AsyncMock(
side_effect=[
_make_radio_result(), # apply succeeds
MagicMock(type=EventType.ERROR, payload="transient"), # restore attempt 1
_make_radio_result(), # restore attempt 2 succeeds
]
)
chan_key = "f6" * 16
await ChannelRepository.upsert(key=chan_key, name="#retry")
await ChannelRepository.update_path_hash_mode_override(chan_key, 2)
radio_manager.path_hash_mode = 0
radio_manager.path_hash_mode_supported = True
with (
patch("app.routers.messages.radio_manager.require_connected", return_value=mc),
patch.object(radio_manager, "_meshcore", mc),
patch("app.routers.messages.broadcast_event"),
patch("app.routers.messages.broadcast_error") as mock_err,
):
await send_channel_message(
SendChannelMessageRequest(channel_key=chan_key, text="hello")
)
mock_err.assert_not_called()
assert radio_manager.path_hash_mode == 0 # restored to baseline
# 1 apply + 2 restore attempts = 3 calls
assert mc.commands.set_path_hash_mode.await_count == 3
class TestChannelEchoWatchdog:
"""Test the auto-resend echo watchdog for channel messages."""
@pytest.fixture(autouse=True)
def _skip_watchdog_delay(self, monkeypatch):
monkeypatch.setattr(message_send_service, "ECHO_WATCHDOG_DELAY_SECONDS", 0)
@pytest.mark.asyncio
async def test_watchdog_skips_when_echo_already_received(self, test_db):
"""Watchdog sees acked > 0 and returns without resending."""
chan_key = "e1" * 16
await ChannelRepository.upsert(key=chan_key, name="#echo")
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: hello",
conversation_key=chan_key.upper(),
sender_timestamp=int(time.time()),
received_at=int(time.time()),
outgoing=True,
)
await MessageRepository.increment_ack_count(msg_id)
mc = _make_mc(name="MyNode")
with patch.object(radio_manager, "_meshcore", mc):
await message_send_service._channel_echo_watchdog(
message_id=msg_id,
radio_manager=radio_manager,
broadcast_fn=MagicMock(),
error_broadcast_fn=MagicMock(),
)
# No radio operation attempted
mc.commands.send_chan_msg.assert_not_called()
@pytest.mark.asyncio
async def test_watchdog_skips_when_outside_resend_window(self, test_db):
"""Watchdog skips resend when message is older than 30 seconds."""
chan_key = "e2" * 16
await ChannelRepository.upsert(key=chan_key, name="#stale")
old_ts = int(time.time()) - 60
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: old",
conversation_key=chan_key.upper(),
sender_timestamp=old_ts,
received_at=old_ts,
outgoing=True,
)
mc = _make_mc(name="MyNode")
with patch.object(radio_manager, "_meshcore", mc):
await message_send_service._channel_echo_watchdog(
message_id=msg_id,
radio_manager=radio_manager,
broadcast_fn=MagicMock(),
error_broadcast_fn=MagicMock(),
)
mc.commands.send_chan_msg.assert_not_called()
@pytest.mark.asyncio
async def test_watchdog_resends_when_no_echo(self, test_db):
"""Watchdog resends byte-perfect when no echo has arrived."""
chan_key = "e3" * 16
await ChannelRepository.upsert(key=chan_key, name="#resend")
now = int(time.time())
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: payload",
conversation_key=chan_key.upper(),
sender_timestamp=now,
received_at=now,
outgoing=True,
)
mc = _make_mc(name="MyNode")
with patch.object(radio_manager, "_meshcore", mc):
await message_send_service._channel_echo_watchdog(
message_id=msg_id,
radio_manager=radio_manager,
broadcast_fn=MagicMock(),
error_broadcast_fn=MagicMock(),
)
mc.commands.send_chan_msg.assert_awaited_once()
call_kwargs = mc.commands.send_chan_msg.await_args.kwargs
assert call_kwargs["msg"] == "payload" # sender prefix stripped
assert call_kwargs["timestamp"] == now.to_bytes(4, "little")
@pytest.mark.asyncio
async def test_watchdog_handles_radio_busy_gracefully(self, test_db):
"""RadioOperationBusyError is caught — no exception propagates."""
chan_key = "e4" * 16
await ChannelRepository.upsert(key=chan_key, name="#busy")
now = int(time.time())
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: busy test",
conversation_key=chan_key.upper(),
sender_timestamp=now,
received_at=now,
outgoing=True,
)
mc = _make_mc(name="MyNode")
radio_manager._meshcore = mc
# Lock the radio so the non-blocking acquire raises RadioOperationBusyError
if radio_manager._operation_lock is None:
radio_manager._operation_lock = asyncio.Lock()
await radio_manager._operation_lock.acquire()
try:
# Should not raise — RadioOperationBusyError is caught internally
await message_send_service._channel_echo_watchdog(
message_id=msg_id,
radio_manager=radio_manager,
broadcast_fn=MagicMock(),
error_broadcast_fn=MagicMock(),
)
finally:
radio_manager._operation_lock.release()
mc.commands.send_chan_msg.assert_not_called()
@pytest.mark.asyncio
async def test_watchdog_skips_deleted_message(self, test_db):
"""Watchdog exits cleanly if the message was deleted before it wakes."""
# Use a message_id that doesn't exist
mc = _make_mc(name="MyNode")
with patch.object(radio_manager, "_meshcore", mc):
await message_send_service._channel_echo_watchdog(
message_id=999999,
radio_manager=radio_manager,
broadcast_fn=MagicMock(),
error_broadcast_fn=MagicMock(),
)
mc.commands.send_chan_msg.assert_not_called()
class TestRadioExceptionMidSend:
"""Test that radio exceptions during send don't leave orphaned DB state."""
+127
View File
@@ -0,0 +1,127 @@
"""Tests for the SQS fanout module helper functions.
Covers region inference from queue URLs, FIFO deduplication ID fallback chains,
and message group ID construction the non-trivial logic in app/fanout/sqs.py.
"""
import hashlib
from app.fanout.sqs import (
_build_message_deduplication_id,
_build_message_group_id,
_infer_region_from_queue_url,
_is_fifo_queue,
)
class TestInferRegionFromQueueUrl:
"""URL parsing for AWS region extraction."""
def test_standard_us_east_1(self):
url = "https://sqs.us-east-1.amazonaws.com/123456789012/my-queue"
assert _infer_region_from_queue_url(url) == "us-east-1"
def test_standard_eu_west_2(self):
url = "https://sqs.eu-west-2.amazonaws.com/123456789012/my-queue"
assert _infer_region_from_queue_url(url) == "eu-west-2"
def test_china_region(self):
url = "https://sqs.cn-north-1.amazonaws.com.cn/123456789012/my-queue"
assert _infer_region_from_queue_url(url) == "cn-north-1"
def test_non_sqs_hostname_returns_none(self):
url = "https://s3.us-east-1.amazonaws.com/bucket/key"
assert _infer_region_from_queue_url(url) is None
def test_localstack_endpoint_returns_none(self):
url = "http://localhost:4566/000000000000/my-queue"
assert _infer_region_from_queue_url(url) is None
def test_empty_url_returns_none(self):
assert _infer_region_from_queue_url("") is None
def test_non_amazonaws_domain_returns_none(self):
url = "https://sqs.us-east-1.example.com/123/queue"
assert _infer_region_from_queue_url(url) is None
def test_fifo_queue_url_still_parses_region(self):
url = "https://sqs.ap-southeast-1.amazonaws.com/123456789012/my-queue.fifo"
assert _infer_region_from_queue_url(url) == "ap-southeast-1"
class TestIsFifoQueue:
def test_fifo_suffix(self):
assert _is_fifo_queue("https://sqs.us-east-1.amazonaws.com/123/queue.fifo") is True
def test_standard_queue(self):
assert _is_fifo_queue("https://sqs.us-east-1.amazonaws.com/123/queue") is False
def test_trailing_slash_stripped(self):
assert _is_fifo_queue("https://sqs.us-east-1.amazonaws.com/123/queue.fifo/") is True
class TestBuildMessageGroupId:
"""FIFO message group ID selection."""
def test_message_event_with_conversation_key(self):
data = {"conversation_key": "abc123", "text": "hello"}
assert _build_message_group_id(data, event_type="message") == "message-abc123"
def test_message_event_without_conversation_key_falls_back(self):
data = {"text": "hello"}
assert _build_message_group_id(data, event_type="message") == "message-default"
def test_raw_packet_event_always_returns_raw_packets(self):
data = {"id": 1, "payload": "deadbeef"}
assert _build_message_group_id(data, event_type="raw_packet") == "raw-packets"
def test_message_event_with_empty_conversation_key_falls_back(self):
data = {"conversation_key": " ", "text": "hello"}
assert _build_message_group_id(data, event_type="message") == "message-default"
class TestBuildMessageDeduplicationId:
"""FIFO deduplication ID fallback chain."""
def test_message_with_int_id(self):
data = {"id": 42}
result = _build_message_deduplication_id(data, event_type="message", body="{}")
assert result == "message-42"
def test_message_with_string_id_falls_back_to_hash(self):
body = '{"event_type":"message","data":{"id":"not-an-int"}}'
data = {"id": "not-an-int"}
result = _build_message_deduplication_id(data, event_type="message", body=body)
assert result == hashlib.sha256(body.encode()).hexdigest()
def test_message_without_id_falls_back_to_hash(self):
body = '{"event_type":"message","data":{}}'
data = {}
result = _build_message_deduplication_id(data, event_type="message", body=body)
assert result == hashlib.sha256(body.encode()).hexdigest()
def test_raw_with_observation_id(self):
data = {"observation_id": "obs-123", "id": 7}
result = _build_message_deduplication_id(data, event_type="raw_packet", body="{}")
assert result == "raw-obs-123"
def test_raw_with_empty_observation_id_falls_to_packet_id(self):
data = {"observation_id": " ", "id": 7}
result = _build_message_deduplication_id(data, event_type="raw_packet", body="{}")
assert result == "raw-7"
def test_raw_with_no_observation_id_uses_packet_id(self):
data = {"id": 99}
result = _build_message_deduplication_id(data, event_type="raw_packet", body="{}")
assert result == "raw-99"
def test_raw_with_no_ids_falls_back_to_hash(self):
body = '{"event_type":"raw_packet","data":{}}'
data = {}
result = _build_message_deduplication_id(data, event_type="raw_packet", body=body)
assert result == hashlib.sha256(body.encode()).hexdigest()
def test_raw_with_non_string_observation_id_falls_to_packet_id(self):
data = {"observation_id": 123, "id": 5}
result = _build_message_deduplication_id(data, event_type="raw_packet", body="{}")
assert result == "raw-5"
+2 -3
View File
@@ -450,11 +450,10 @@ class TestStatisticsEndpoint:
@pytest.mark.asyncio
async def test_statistics_endpoint_includes_noise_floor_history(self, test_db, client):
noise_floor_history = {
"sample_interval_seconds": 300,
"sample_interval_seconds": 60,
"coverage_seconds": 1800,
"latest_noise_floor_dbm": -119,
"latest_timestamp": 1_700_000_000,
"supported": True,
"samples": [
{"timestamp": 1_699_998_200, "noise_floor_dbm": -121},
{"timestamp": 1_700_000_000, "noise_floor_dbm": -119},
@@ -463,7 +462,7 @@ class TestStatisticsEndpoint:
with patch(
"app.routers.statistics.get_noise_floor_history",
new=AsyncMock(return_value=noise_floor_history),
return_value=noise_floor_history,
):
response = await client.get("/api/statistics")
Generated
+5 -5
View File
@@ -56,7 +56,7 @@ wheels = [
[[package]]
name = "apprise"
version = "1.9.7"
version = "1.9.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@@ -67,9 +67,9 @@ dependencies = [
{ name = "requests-oauthlib" },
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bc/f5/97dc06b3401bb67abcef6e8bef7155f192b75795c2a2aa4d59eb5aa7fa66/apprise-1.9.7.tar.gz", hash = "sha256:2f73cc1e0264fb119fdb9b7cde82e8fde40a0f531ac885d8c6f0cf0f6e13aec2", size = 1937173 }
sdist = { url = "https://files.pythonhosted.org/packages/20/f4/be5c7e39b83a2285ab62ae7c19bb10704836f59c0a5b4c471730f54c9f98/apprise-1.9.9.tar.gz", hash = "sha256:fd622c0df16bdc79ed385539735573488cafe2405d25747e87eebd6b09b26012", size = 2032822 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/6b/cfa80a13437896eb8f4504ddac6dfa4ef7f1d2b2261057aa4a30003b8de6/apprise-1.9.7-py3-none-any.whl", hash = "sha256:c7640a81a1097685de66e0508e3da89f49235d566cb44bbead1dd98419bf5ee3", size = 1459879 },
{ url = "https://files.pythonhosted.org/packages/e6/2f/54d068d7e011a8b4e0aae3e93b09a30b33bcf780829fe70c6e8876aeb0e0/apprise-1.9.9-py3-none-any.whl", hash = "sha256:55ceb8827a1c783d683881c9f77fa42eb43b3fc91b854419c452d557101c7068", size = 1519940 },
]
[[package]]
@@ -983,7 +983,7 @@ wheels = [
[[package]]
name = "remoteterm-meshcore"
version = "3.9.0"
version = "3.11.0"
source = { virtual = "." }
dependencies = [
{ name = "aiomqtt" },
@@ -1022,7 +1022,7 @@ dev = [
requires-dist = [
{ name = "aiomqtt", specifier = ">=2.0" },
{ name = "aiosqlite", specifier = ">=0.19.0" },
{ name = "apprise", specifier = ">=1.9.7" },
{ name = "apprise", specifier = ">=1.9.8" },
{ name = "boto3", specifier = ">=1.38.0" },
{ name = "fastapi", specifier = ">=0.115.0" },
{ name = "httpx", specifier = ">=0.28.1" },