diff --git a/AGENTS.md b/AGENTS.md
index f8aa473..f6debc9 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -296,9 +296,9 @@ cd frontend
npm run test:run
```
-### Before Completing Changes
+### Before Completing Major Changes
-**Always run `./scripts/all_quality.sh` before finishing any changes that have modified code or tests.** It is the standard repo gate: autofix first, then type checks, tests, and the standard frontend build. This is not necessary for docs-only changes.
+**Run `./scripts/all_quality.sh` before finishing major changes that have modified code or tests.** It is the standard repo gate: autofix first, then type checks, tests, and the standard frontend build. This is not necessary for docs-only changes. For minor changes (like wording, color, spacing, etc.), wait until prompted to run the quality gate.
## API Summary
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3cc0059..9e75979 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,36 @@
+## [3.6.2] - 2026-03-29
+
+Feature: Be more flexible about timing and volume of full contact offload
+Feature: Improve room server and repeater ops to be much more clearer about auth status
+Feature: Show last error status on integrations
+Feature: Push multi-platform docker builds
+Bugfix: Fix advert interval time unit display
+Bugfix: Don't cast RSSI/SNR to string for community MQTT
+Bugfix: Map uploader follows redirect
+Misc: Thin out unnecessary cruft in unreads endpoint
+Misc: Fall back gracefully if linked to an unknown contact
+
+## [3.6.1] - 2026-03-26
+
+Feature: MeshCore Map integration
+Feature: Add warning screen about bots
+Feature: Favicon reflects unread message state
+Feature: Show hop map in larger modal
+Feature: Add prebuilt frontend install script
+Feature: Add clean service installer script
+Feature: Swipe in to show menu
+Bugfix: Invalid backend API path serves error, not fallback index
+Bugfix: Fix some spacing/page height issues
+Misc: Misc. bugfixes and performance and test improvements
+
+## [3.6.0] - 2026-03-22
+
+Feature: Add incoming-packet analytics
+Feature: BYOPacket for analysis
+Feature: Add room activity to stats view
+Bugfix: Handle Heltec v3 serial noise
+Misc: Swap repeaters and room servers for better ordering
+
## [3.5.0] - 2026-03-19
Feature: Add room server alpha support
diff --git a/LICENSES.md b/LICENSES.md
index db8924e..7fac22f 100644
--- a/LICENSES.md
+++ b/LICENSES.md
@@ -330,7 +330,7 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-### meshcore (2.3.1) — MIT
+### meshcore (2.3.2) — MIT
Full license text
@@ -1592,6 +1592,39 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
+### react-swipeable (7.0.2) — MIT
+
+
+Full license text
+
+```
+The MIT License (MIT)
+
+Copyright (C) 2014-2022 Josh Perez
+Copyright (C) 2014-2022 Brian Emil Hartz
+Copyright (C) 2022 Formidable Labs, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+```
+
+
+
### sonner (2.0.7) — MIT
diff --git a/README.md b/README.md
index 455fd67..a20e334 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@ Backend server + browser interface for MeshCore mesh radio networks. Connect you
* Run multiple Python bots that can analyze messages and respond to DMs and channels
* Monitor unlimited contacts and channels (radio limits don't apply -- packets are decrypted server-side)
* Access your radio remotely over your network or VPN
-* Search for hashtag room names for channels you don't have keys for yet
+* Search for hashtag channel names for channels you don't have keys for yet
* Forward packets to MQTT, LetsMesh, MeshRank, SQS, Apprise, etc.
* Use the more recent 1.14 firmwares which support multibyte pathing
* Visualize the mesh as a map or node set, view repeater stats, and more!
@@ -41,8 +41,6 @@ If you plan to contribute, read [CONTRIBUTING.md](CONTRIBUTING.md).
- [UV](https://astral.sh/uv) package manager: `curl -LsSf https://astral.sh/uv/install.sh | sh`
- MeshCore radio connected via USB serial, TCP, or BLE
-If you are on a low-resource system and do not want to build the frontend locally, download the release zip named `remoteterm-prebuilt-frontend-vX.X.X-.zip`. That bundle includes `frontend/prebuilt`, so you can run the app without doing a frontend build from source.
-
Finding your serial port
@@ -97,6 +95,8 @@ Access the app at http://localhost:8000.
Source checkouts expect a normal frontend build in `frontend/dist`.
+On Linux, if you want this installed as a persistent `systemd` service that starts on boot and restarts automatically on failure, run `bash scripts/install_service.sh` from the repo root.
+
## Path 1.5: Use The Prebuilt Release Zip
Release zips can be found as an asset within the [releases listed here](https://github.com/jkingsman/Remote-Terminal-for-MeshCore/releases). This can be beneficial on resource constrained systems that cannot cope with the RAM-hungry frontend build process.
@@ -111,10 +111,14 @@ uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
The release bundle includes `frontend/prebuilt`, so it does not require a local frontend build.
+Alternatively, if you have already cloned the repo, you can fetch just the prebuilt frontend into your working tree without downloading the full release zip via `python3 scripts/fetch_prebuilt_frontend.py`.
+
## Path 2: Docker
> **Warning:** Docker has had reports intermittent issues with serial event subscriptions. The native method above is more reliable.
+Local Docker builds are architecture-native by default. On Apple Silicon Macs and ARM64 Linux hosts such as Raspberry Pi, `docker compose build` / `docker compose up --build` will produce an ARM64 image unless you override the platform.
+
Edit `docker-compose.yaml` to set a serial device for passthrough, or uncomment your transport (serial or TCP). Then:
```bash
@@ -146,6 +150,15 @@ docker compose pull
docker compose up -d
```
+Published Docker tags are intended to be multi-arch (`linux/amd64` and `linux/arm64`). If you are building and publishing manually, use Docker Buildx:
+
+```bash
+docker buildx build \
+ --platform linux/amd64,linux/arm64 \
+ -t jkingsman/remoteterm-meshcore:latest \
+ --push .
+```
+
The container runs as root by default for maximum serial passthrough compatibility across host setups. On Linux, if you switch between native and Docker runs, `./data` can end up root-owned. If you do not need that serial compatibility behavior, you can enable the optional `user: "${UID:-1000}:${GID:-1000}"` line in `docker-compose.yaml` to keep ownership aligned with your host user.
To stop:
@@ -192,7 +205,7 @@ $env:MESHCORE_SERIAL_PORT="COM8" # or your COM port
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
```
-If you enable Basic Auth, protect the app with HTTPS. HTTP Basic credentials are not safe on plain HTTP.
+If you enable Basic Auth, protect the app with HTTPS. HTTP Basic credentials are not safe on plain HTTP. Also note that the app's permissive CORS policy is a deliberate trusted-network tradeoff, so cross-origin browser JavaScript is not a reliable way to use that Basic Auth gate.
## Where To Go Next
diff --git a/README_ADVANCED.md b/README_ADVANCED.md
index f061324..2b0684c 100644
--- a/README_ADVANCED.md
+++ b/README_ADVANCED.md
@@ -21,7 +21,7 @@ If the audit finds a mismatch, you'll see an error in the application UI and you
## HTTPS
-WebGPU room-finding requires a secure context when you are not on `localhost`.
+WebGPU channel-finding requires a secure context when you are not on `localhost`.
Generate a local cert and start the backend with TLS:
@@ -46,59 +46,37 @@ Accept the browser warning, or use [mkcert](https://github.com/FiloSottile/mkcer
## Systemd Service
-Assumes you are running from `/opt/remoteterm`; adjust paths if you deploy elsewhere.
+Two paths are available depending on your comfort level with Linux system administration.
+
+### Simple install (recommended for most users)
+
+On Linux systems, this is the recommended installation method if you want RemoteTerm set up as a persistent systemd service that starts automatically on boot and restarts automatically if it crashes. Run the installer script from the repo root. It runs as your current user, installs from wherever you cloned the repo, and prints a quick-reference cheatsheet when done — no separate service account or path juggling required.
```bash
-# Create service user
-sudo useradd -r -m -s /bin/false remoteterm
-
-# Install to /opt/remoteterm
-sudo mkdir -p /opt/remoteterm
-sudo cp -r . /opt/remoteterm/
-sudo chown -R remoteterm:remoteterm /opt/remoteterm
-
-# Install dependencies
-cd /opt/remoteterm
-sudo -u remoteterm uv venv
-sudo -u remoteterm uv sync
-
-# If deploying from a source checkout, build the frontend first
-sudo -u remoteterm bash -lc 'cd /opt/remoteterm/frontend && npm install && npm run build'
-
-# If deploying from the release zip artifact, frontend/prebuilt is already present
+bash scripts/install_service.sh
```
-Create `/etc/systemd/system/remoteterm.service` with:
+The script interactively asks which transport to use (serial auto-detect, serial with explicit port, TCP, or BLE), whether to build the frontend locally or download a prebuilt copy, whether to enable the bot system, and whether to set up HTTP Basic Auth. It handles dependency installation (`uv sync`), validates `node`/`npm` for local builds, adds your user to the `dialout` group if needed, writes the systemd unit file, and enables the service. After installation, normal operations work without any `sudo -u` gymnastics:
-```ini
-[Unit]
-Description=RemoteTerm for MeshCore
-After=network.target
-
-[Service]
-Type=simple
-User=remoteterm
-Group=remoteterm
-WorkingDirectory=/opt/remoteterm
-ExecStart=/opt/remoteterm/.venv/bin/uvicorn app.main:app --host 0.0.0.0 --port 8000
-Restart=always
-RestartSec=5
-Environment=MESHCORE_DATABASE_PATH=/opt/remoteterm/data/meshcore.db
-# Uncomment and set if auto-detection doesn't work:
-# Environment=MESHCORE_SERIAL_PORT=/dev/ttyUSB0
-SupplementaryGroups=dialout
-
-[Install]
-WantedBy=multi-user.target
-```
-
-Then install and start it:
+You can also rerun the script later to change transport, bot, or auth settings. If the service is already running, the installer stops it, rewrites the unit file, reloads systemd, and starts it again with the new configuration.
```bash
-sudo systemctl daemon-reload
-sudo systemctl enable --now remoteterm
-sudo systemctl status remoteterm
+# Update to latest and restart
+cd /path/to/repo
+git pull
+uv sync
+cd frontend && npm install && npm run build && cd ..
+sudo systemctl restart remoteterm
+
+# Refresh prebuilt frontend only (skips local build)
+python3 scripts/fetch_prebuilt_frontend.py
+sudo systemctl restart remoteterm
+
+# View live logs
sudo journalctl -u remoteterm -f
+
+# Service control
+sudo systemctl start|stop|restart|status remoteterm
```
## Debug Logging And Bug Reports
diff --git a/app/AGENTS.md b/app/AGENTS.md
index f87c8a8..105fe7f 100644
--- a/app/AGENTS.md
+++ b/app/AGENTS.md
@@ -101,7 +101,7 @@ app/
- Packet `path_len` values are hop counts, not byte counts.
- Hop width comes from the packet or radio `path_hash_mode`: `0` = 1-byte, `1` = 2-byte, `2` = 3-byte.
- Channel slot count comes from firmware-reported `DEVICE_INFO.max_channels`; do not hardcode `40` when scanning/offloading channel slots.
-- Channel sends use a session-local LRU slot cache after startup channel offload clears the radio. Repeated sends to the same room reuse the loaded slot; new rooms fill free slots up to the discovered channel capacity, then evict the least recently used cached room.
+- Channel sends use a session-local LRU slot cache after startup channel offload clears the radio. Repeated sends to the same channel reuse the loaded slot; new channels fill free slots up to the discovered channel capacity, then evict the least recently used cached channel.
- TCP radios do not reuse cached slot contents. For TCP, channel sends still force `set_channel(...)` before every send because this backend does not have exclusive device access.
- `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true` disables slot reuse on all transports and forces the old always-`set_channel(...)` behavior before every channel send.
- Contacts persist canonical direct-route fields (`direct_path`, `direct_path_len`, `direct_path_hash_mode`) so contact sync and outbound DM routing reuse the exact stored hop width instead of inferring from path bytes.
diff --git a/app/events.py b/app/events.py
index 51c6ecb..35a9b87 100644
--- a/app/events.py
+++ b/app/events.py
@@ -44,6 +44,7 @@ class MessageAckedPayload(TypedDict):
message_id: int
ack_count: int
paths: NotRequired[list[MessagePath]]
+ packet_id: NotRequired[int | None]
class ToastPayload(TypedDict):
diff --git a/app/fanout/AGENTS_fanout.md b/app/fanout/AGENTS_fanout.md
index 30a4069..56280ac 100644
--- a/app/fanout/AGENTS_fanout.md
+++ b/app/fanout/AGENTS_fanout.md
@@ -89,6 +89,19 @@ Amazon SQS delivery. Config blob:
- Publishes a JSON envelope of the form `{"event_type":"message"|"raw_packet","data":...}`
- Supports both decoded messages and raw packets via normal scope selection
+### map_upload (map_upload.py)
+Uploads heard repeater and room-server advertisements to map.meshcore.dev. Config blob:
+- `api_url` (optional, default `""`) — upload endpoint; empty falls back to the public map.meshcore.dev API
+- `dry_run` (bool, default `true`) — when true, logs the payload at INFO level without sending
+- `geofence_enabled` (bool, default `false`) — when true, only uploads nodes within `geofence_radius_km` of the radio's own configured lat/lon
+- `geofence_radius_km` (float, default `0`) — filter radius in kilometres
+
+Geofence notes:
+- The reference center is always the radio's own `adv_lat`/`adv_lon` from `radio_runtime.meshcore.self_info`, read **live at upload time** — no lat/lon is stored in the fanout config itself.
+- If the radio's lat/lon is `(0, 0)` or the radio is not connected, the geofence check is silently skipped so uploads continue normally until coordinates are configured.
+- Requires the radio to have `ENABLE_PRIVATE_KEY_EXPORT=1` firmware to sign uploads.
+- Scope is always `{"messages": "none", "raw_packets": "all"}` — only raw RF packets are processed.
+
## Adding a New Integration Type
### Step-by-step checklist
@@ -291,6 +304,7 @@ Migrations:
- `app/fanout/webhook.py` — Webhook fanout module
- `app/fanout/apprise_mod.py` — Apprise fanout module
- `app/fanout/sqs.py` — Amazon SQS fanout module
+- `app/fanout/map_upload.py` — Map Upload fanout module
- `app/repository/fanout.py` — Database CRUD
- `app/routers/fanout.py` — REST API
- `app/websocket.py` — `broadcast_event()` dispatches to fanout
diff --git a/app/fanout/apprise_mod.py b/app/fanout/apprise_mod.py
index c463aee..9b71081 100644
--- a/app/fanout/apprise_mod.py
+++ b/app/fanout/apprise_mod.py
@@ -95,7 +95,6 @@ class AppriseModule(FanoutModule):
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
super().__init__(config_id, config, name=name)
- self._last_error: str | None = None
async def on_message(self, data: dict) -> None:
# Skip outgoing messages — only notify on incoming
@@ -114,17 +113,17 @@ class AppriseModule(FanoutModule):
success = await asyncio.to_thread(
_send_sync, urls, body, preserve_identity=preserve_identity
)
- self._last_error = None if success else "Apprise notify returned failure"
+ self._set_last_error(None if success else "Apprise notify returned failure")
if not success:
logger.warning("Apprise notification failed for module %s", self.config_id)
except Exception as exc:
- self._last_error = str(exc)
+ self._set_last_error(str(exc))
logger.exception("Apprise send error for module %s", self.config_id)
@property
def status(self) -> str:
if not self.config.get("urls", "").strip():
return "disconnected"
- if self._last_error:
+ if self.last_error:
return "error"
return "connected"
diff --git a/app/fanout/base.py b/app/fanout/base.py
index 3ad269f..efe2e49 100644
--- a/app/fanout/base.py
+++ b/app/fanout/base.py
@@ -3,6 +3,14 @@
from __future__ import annotations
+def _broadcast_fanout_health() -> None:
+ """Push updated fanout status to connected frontend clients."""
+ from app.services.radio_runtime import radio_runtime as radio_manager
+ from app.websocket import broadcast_health
+
+ broadcast_health(radio_manager.is_connected, radio_manager.connection_info)
+
+
class FanoutModule:
"""Base class for all fanout integrations.
@@ -16,6 +24,7 @@ class FanoutModule:
self.config_id = config_id
self.config = config
self.name = name
+ self._last_error: str | None = None
async def start(self) -> None:
"""Start the module (e.g. connect to broker). Override for persistent connections."""
@@ -34,6 +43,18 @@ class FanoutModule:
"""Return 'connected', 'disconnected', or 'error'."""
raise NotImplementedError
+ @property
+ def last_error(self) -> str | None:
+ """Return the most recent retained operator-facing error, if any."""
+ return self._last_error
+
+ def _set_last_error(self, value: str | None) -> None:
+ """Update the retained error and broadcast health when it changes."""
+ if self._last_error == value:
+ return
+ self._last_error = value
+ _broadcast_fanout_health()
+
def get_fanout_message_text(data: dict) -> str:
"""Return the best human-readable message body for fanout consumers.
diff --git a/app/fanout/community_mqtt.py b/app/fanout/community_mqtt.py
index b3af953..cd9eef1 100644
--- a/app/fanout/community_mqtt.py
+++ b/app/fanout/community_mqtt.py
@@ -20,9 +20,9 @@ from datetime import datetime
from typing import Any, Protocol
import aiomqtt
-import nacl.bindings
from app.fanout.mqtt_base import BaseMqttPublisher
+from app.keystore import ed25519_sign_expanded
from app.path_utils import parse_packet_envelope, split_path_hex
from app.version_info import get_app_build_info
@@ -40,9 +40,6 @@ _TOKEN_RENEWAL_THRESHOLD = _TOKEN_LIFETIME - 3600 # 23 hours
_STATS_REFRESH_INTERVAL = 300 # 5 minutes
_STATS_MIN_CACHE_SECS = 60 # Don't re-fetch stats within 60s
-# Ed25519 group order
-_L = 2**252 + 27742317777372353535851937790883648493
-
# Route type mapping: bottom 2 bits of first byte
_ROUTE_MAP = {0: "F", 1: "F", 2: "D", 3: "T"}
@@ -69,28 +66,6 @@ def _base64url_encode(data: bytes) -> str:
return base64.urlsafe_b64encode(data).rstrip(b"=").decode("ascii")
-def _ed25519_sign_expanded(
- message: bytes, scalar: bytes, prefix: bytes, public_key: bytes
-) -> bytes:
- """Sign a message using MeshCore's expanded Ed25519 key format.
-
- MeshCore stores 64-byte "orlp" format keys: scalar(32) || prefix(32).
- Standard Ed25519 libraries expect seed format and would re-SHA-512 the key.
- This performs the signing manually using the already-expanded key material.
-
- Port of meshcore-packet-capture's ed25519_sign_with_expanded_key().
- """
- # r = SHA-512(prefix || message) mod L
- r = int.from_bytes(hashlib.sha512(prefix + message).digest(), "little") % _L
- # R = r * B (base point multiplication)
- R = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(r.to_bytes(32, "little"))
- # k = SHA-512(R || public_key || message) mod L
- k = int.from_bytes(hashlib.sha512(R + public_key + message).digest(), "little") % _L
- # s = (r + k * scalar) mod L
- s = (r + k * int.from_bytes(scalar, "little")) % _L
- return R + s.to_bytes(32, "little")
-
-
def _generate_jwt_token(
private_key: bytes,
public_key: bytes,
@@ -127,7 +102,7 @@ def _generate_jwt_token(
scalar = private_key[:32]
prefix = private_key[32:]
- signature = _ed25519_sign_expanded(signing_input, scalar, prefix, public_key)
+ signature = ed25519_sign_expanded(signing_input, scalar, prefix, public_key)
return f"{header_b64}.{payload_b64}.{signature.hex()}"
@@ -200,11 +175,12 @@ def _format_raw_packet(data: dict[str, Any], device_name: str, public_key_hex: s
current_time = datetime.now()
ts_str = current_time.isoformat()
- # SNR/RSSI are always strings in reference output.
+ # Keep numeric telemetry numeric so downstream analyzers can ingest it.
+ # Preserve the existing "Unknown" fallback for missing values.
snr_val = data.get("snr")
rssi_val = data.get("rssi")
- snr = str(snr_val) if snr_val is not None else "Unknown"
- rssi = str(rssi_val) if rssi_val is not None else "Unknown"
+ snr: float | str = float(snr_val) if snr_val is not None else "Unknown"
+ rssi: int | str = int(rssi_val) if rssi_val is not None else "Unknown"
packet_hash = _calculate_packet_hash(raw_bytes)
diff --git a/app/fanout/manager.py b/app/fanout/manager.py
index b393a89..dcb7353 100644
--- a/app/fanout/manager.py
+++ b/app/fanout/manager.py
@@ -15,12 +15,21 @@ _DISPATCH_TIMEOUT_SECONDS = 30.0
_MODULE_TYPES: dict[str, type] = {}
+def _format_error_detail(exc: Exception) -> str:
+ """Return a short operator-facing error string."""
+ message = str(exc).strip()
+ if message:
+ return f"{type(exc).__name__}: {message}"
+ return type(exc).__name__
+
+
def _register_module_types() -> None:
"""Lazily populate the type registry to avoid circular imports."""
if _MODULE_TYPES:
return
from app.fanout.apprise_mod import AppriseModule
from app.fanout.bot import BotModule
+ from app.fanout.map_upload import MapUploadModule
from app.fanout.mqtt_community import MqttCommunityModule
from app.fanout.mqtt_private import MqttPrivateModule
from app.fanout.sqs import SqsModule
@@ -32,6 +41,7 @@ def _register_module_types() -> None:
_MODULE_TYPES["webhook"] = WebhookModule
_MODULE_TYPES["apprise"] = AppriseModule
_MODULE_TYPES["sqs"] = SqsModule
+ _MODULE_TYPES["map_upload"] = MapUploadModule
def _matches_filter(filter_value: Any, key: str) -> bool:
@@ -82,6 +92,38 @@ class FanoutManager:
def __init__(self) -> None:
self._modules: dict[str, tuple[FanoutModule, dict]] = {} # id -> (module, scope)
self._restart_locks: dict[str, asyncio.Lock] = {}
+ self._bots_disabled_until_restart = False
+ self._module_errors: dict[str, str] = {}
+
+ def _broadcast_health_update(self) -> None:
+ from app.services.radio_runtime import radio_runtime as radio_manager
+ from app.websocket import broadcast_health
+
+ broadcast_health(radio_manager.is_connected, radio_manager.connection_info)
+
+ def _set_module_error(self, config_id: str, error: str) -> None:
+ if self._module_errors.get(config_id) == error:
+ return
+ self._module_errors[config_id] = error
+ self._broadcast_health_update()
+
+ def _clear_module_error(self, config_id: str) -> None:
+ if self._module_errors.pop(config_id, None) is not None:
+ self._broadcast_health_update()
+
+ def get_bots_disabled_source(self) -> str | None:
+ """Return why bot modules are unavailable, if at all."""
+ from app.config import settings as server_settings
+
+ if server_settings.disable_bots:
+ return "env"
+ if self._bots_disabled_until_restart:
+ return "until_restart"
+ return None
+
+ def bots_disabled_effective(self) -> bool:
+ """Return True when bot modules should be treated as unavailable."""
+ return self.get_bots_disabled_source() is not None
async def load_from_db(self) -> None:
"""Read enabled fanout_configs and instantiate modules."""
@@ -99,13 +141,14 @@ class FanoutManager:
config_blob = cfg["config"]
scope = cfg["scope"]
- # Skip bot modules when bots are disabled server-wide
- if config_type == "bot":
- from app.config import settings as server_settings
-
- if server_settings.disable_bots:
- logger.info("Skipping bot module %s (bots disabled by server config)", config_id)
- return
+ # Skip bot modules when bots are disabled server-wide or until restart.
+ if config_type == "bot" and self.bots_disabled_effective():
+ logger.info(
+ "Skipping bot module %s (bots disabled: %s)",
+ config_id,
+ self.get_bots_disabled_source(),
+ )
+ return
cls = _MODULE_TYPES.get(config_type)
if cls is None:
@@ -116,11 +159,13 @@ class FanoutManager:
module = cls(config_id, config_blob, name=cfg.get("name", ""))
await module.start()
self._modules[config_id] = (module, scope)
+ self._clear_module_error(config_id)
logger.info(
"Started fanout module %s (type=%s)", cfg.get("name", config_id), config_type
)
- except Exception:
+ except Exception as exc:
logger.exception("Failed to start fanout module %s", config_id)
+ self._set_module_error(config_id, _format_error_detail(exc))
async def reload_config(self, config_id: str) -> None:
"""Stop old module (if any) and start updated config."""
@@ -144,6 +189,7 @@ class FanoutManager:
await module.stop()
except Exception:
logger.exception("Error stopping fanout module %s", config_id)
+ self._clear_module_error(config_id)
async def _dispatch_matching(
self,
@@ -173,7 +219,10 @@ class FanoutManager:
try:
handler = getattr(module, handler_name)
await asyncio.wait_for(handler(data), timeout=_DISPATCH_TIMEOUT_SECONDS)
+ self._clear_module_error(config_id)
except asyncio.TimeoutError:
+ timeout_error = f"{handler_name} timed out after {_DISPATCH_TIMEOUT_SECONDS:.1f}s"
+ self._set_module_error(config_id, timeout_error)
logger.error(
"Fanout %s %s timed out after %.1fs; restarting module",
config_id,
@@ -181,7 +230,8 @@ class FanoutManager:
_DISPATCH_TIMEOUT_SECONDS,
)
await self._restart_module(config_id, module)
- except Exception:
+ except Exception as exc:
+ self._set_module_error(config_id, _format_error_detail(exc))
logger.exception("Fanout %s %s error", config_id, log_label)
async def _restart_module(self, config_id: str, module: FanoutModule) -> None:
@@ -197,6 +247,10 @@ class FanoutManager:
except Exception:
logger.exception("Failed to restart timed-out fanout module %s", config_id)
self._modules.pop(config_id, None)
+ self._set_module_error(
+ config_id,
+ "Module restart failed after timeout",
+ )
async def broadcast_message(self, data: dict) -> None:
"""Dispatch a decoded message to modules whose scope matches."""
@@ -225,21 +279,62 @@ class FanoutManager:
logger.exception("Error stopping fanout module %s", config_id)
self._modules.clear()
self._restart_locks.clear()
+ self._module_errors.clear()
- def get_statuses(self) -> dict[str, dict[str, str]]:
+ def get_statuses(self) -> dict[str, dict[str, str | None]]:
"""Return status info for each active module."""
from app.repository.fanout import _configs_cache
- result: dict[str, dict[str, str]] = {}
- for config_id, (module, _) in self._modules.items():
+ result: dict[str, dict[str, str | None]] = {}
+ all_ids = set(_configs_cache) | set(self._modules) | set(self._module_errors)
+ for config_id in all_ids:
info = _configs_cache.get(config_id, {})
+ if info.get("enabled") is False:
+ continue
+
+ module_entry = self._modules.get(config_id)
+ module = module_entry[0] if module_entry is not None else None
+ last_error = module.last_error if module is not None else None
+ status = module.status if module is not None else "error"
+
+ manager_error = self._module_errors.get(config_id)
+ if manager_error is not None:
+ status = "error"
+ last_error = manager_error
+ elif last_error is not None and status != "error":
+ status = "error"
+
+ if module is None and last_error is None:
+ continue
+
result[config_id] = {
"name": info.get("name", config_id),
"type": info.get("type", "unknown"),
- "status": module.status,
+ "status": status,
+ "last_error": last_error,
}
return result
+ async def disable_bots_until_restart(self) -> str:
+ """Stop active bot modules and prevent them from starting again until restart."""
+ source = self.get_bots_disabled_source()
+ if source == "env":
+ return source
+
+ self._bots_disabled_until_restart = True
+
+ from app.repository.fanout import _configs_cache
+
+ bot_ids = [
+ config_id
+ for config_id in list(self._modules)
+ if _configs_cache.get(config_id, {}).get("type") == "bot"
+ ]
+ for config_id in bot_ids:
+ await self.remove_config(config_id)
+
+ return "until_restart"
+
# Module-level singleton
fanout_manager = FanoutManager()
diff --git a/app/fanout/map_upload.py b/app/fanout/map_upload.py
new file mode 100644
index 0000000..6876d17
--- /dev/null
+++ b/app/fanout/map_upload.py
@@ -0,0 +1,322 @@
+"""Fanout module for uploading heard advert packets to map.meshcore.dev.
+
+Mirrors the logic of the standalone map.meshcore.dev-uploader project:
+- Listens on raw RF packets via on_raw
+- Filters for ADVERT packets, only processes repeaters (role 2) and rooms (role 3)
+- Skips nodes with no valid location (lat/lon None)
+- Applies per-pubkey rate-limiting (1-hour window, matching the uploader)
+- Signs the upload request with the radio's own Ed25519 private key
+- POSTs to the map API (or logs in dry-run mode)
+
+Dry-run mode (default: True) logs the full would-be payload at INFO level
+without making any HTTP requests. Disable it only after verifying the log
+output looks correct — in particular the radio params (freq/bw/sf/cr) and
+the raw hex link.
+
+Config keys
+-----------
+api_url : str, default ""
+ Upload endpoint. Empty string falls back to the public map.meshcore.dev API.
+dry_run : bool, default True
+ When True, log the payload at INFO level instead of sending it.
+geofence_enabled : bool, default False
+ When True, only upload nodes whose location falls within geofence_radius_km of
+ the radio's own configured latitude/longitude (read live from the radio at upload
+ time — no lat/lon is stored in this config). When the radio's lat/lon is not set
+ (0, 0) or unavailable, the geofence check is silently skipped so uploads continue
+ normally until coordinates are configured.
+geofence_radius_km : float, default 0.0
+ Radius of the geofence in kilometres. Nodes further than this distance
+ from the radio's own position are skipped.
+"""
+
+from __future__ import annotations
+
+import hashlib
+import json
+import logging
+import math
+
+import httpx
+
+from app.decoder import parse_advertisement, parse_packet
+from app.fanout.base import FanoutModule
+from app.keystore import ed25519_sign_expanded, get_private_key, get_public_key
+from app.services.radio_runtime import radio_runtime
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_API_URL = "https://map.meshcore.dev/api/v1/uploader/node"
+
+# Re-upload guard: skip re-uploading a pubkey seen within this window (AU parity)
+_REUPLOAD_SECONDS = 3600
+
+# Only upload repeaters (2) and rooms (3). Any other role — including future
+# roles not yet defined — is rejected. An allowlist is used rather than a
+# blocklist so that new roles cannot accidentally start populating the map.
+_ALLOWED_DEVICE_ROLES = {2, 3}
+
+
+def _get_radio_params() -> dict:
+ """Read radio frequency parameters from the connected radio's self_info.
+
+ The Python meshcore library returns radio_freq in MHz (e.g. 910.525) and
+ radio_bw in kHz (e.g. 62.5). These are exactly the units the map API
+ expects, matching what the JS reference uploader produces after its own
+ /1000 division on raw integer values. No further scaling is applied here.
+ """
+ try:
+ mc = radio_runtime.meshcore
+ if not mc:
+ return {"freq": 0, "cr": 0, "sf": 0, "bw": 0}
+ info = mc.self_info
+ if not isinstance(info, dict):
+ return {"freq": 0, "cr": 0, "sf": 0, "bw": 0}
+ freq = info.get("radio_freq", 0) or 0
+ bw = info.get("radio_bw", 0) or 0
+ sf = info.get("radio_sf", 0) or 0
+ cr = info.get("radio_cr", 0) or 0
+ return {
+ "freq": freq,
+ "cr": cr,
+ "sf": sf,
+ "bw": bw,
+ }
+ except Exception as exc:
+ logger.debug("MapUpload: could not read radio params: %s", exc)
+ return {"freq": 0, "cr": 0, "sf": 0, "bw": 0}
+
+
+_ROLE_NAMES: dict[int, str] = {2: "repeater", 3: "room"}
+
+
+def _haversine_km(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
+ """Return the great-circle distance in kilometres between two lat/lon points."""
+ r = 6371.0
+ phi1, phi2 = math.radians(lat1), math.radians(lat2)
+ dphi = math.radians(lat2 - lat1)
+ dlam = math.radians(lon2 - lon1)
+ a = math.sin(dphi / 2) ** 2 + math.cos(phi1) * math.cos(phi2) * math.sin(dlam / 2) ** 2
+ return 2 * r * math.asin(math.sqrt(a))
+
+
+class MapUploadModule(FanoutModule):
+ """Uploads heard ADVERT packets to the MeshCore community map."""
+
+ def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
+ super().__init__(config_id, config, name=name)
+ self._client: httpx.AsyncClient | None = None
+ # Per-pubkey rate limiting: pubkey_hex -> last_uploaded_advert_timestamp
+ self._seen: dict[str, int] = {}
+
+ async def start(self) -> None:
+ self._client = httpx.AsyncClient(
+ timeout=httpx.Timeout(15.0),
+ follow_redirects=True,
+ )
+ self._last_error = None
+ self._seen.clear()
+
+ async def stop(self) -> None:
+ if self._client:
+ await self._client.aclose()
+ self._client = None
+ self._last_error = None
+
+ async def on_raw(self, data: dict) -> None:
+ if data.get("payload_type") != "ADVERT":
+ return
+
+ raw_hex = data.get("data", "")
+ if not raw_hex:
+ return
+
+ try:
+ raw_bytes = bytes.fromhex(raw_hex)
+ except ValueError:
+ return
+
+ packet_info = parse_packet(raw_bytes)
+ if packet_info is None:
+ return
+
+ advert = parse_advertisement(packet_info.payload, raw_packet=raw_bytes)
+ if advert is None:
+ return
+
+ # TODO: advert Ed25519 signature verification is skipped here.
+ # The radio has already validated the packet before passing it to RT,
+ # so re-verification is redundant in practice. If added, verify that
+ # nacl.bindings.crypto_sign_open(sig + (pubkey_bytes || timestamp_bytes),
+ # advert.public_key_bytes) succeeds before proceeding.
+
+ # Only process repeaters (2) and rooms (3) — any other role is rejected
+ if advert.device_role not in _ALLOWED_DEVICE_ROLES:
+ return
+
+ # Skip nodes with no valid location — the decoder already nulls out
+ # impossible values, so None means either no location flag or bad coords.
+ if advert.lat is None or advert.lon is None:
+ logger.debug(
+ "MapUpload: skipping %s — no valid location",
+ advert.public_key[:12],
+ )
+ return
+
+ pubkey = advert.public_key.lower()
+
+ # Rate-limit: skip if this pubkey's timestamp hasn't advanced enough
+ last_seen = self._seen.get(pubkey)
+ if last_seen is not None:
+ if last_seen >= advert.timestamp:
+ logger.debug(
+ "MapUpload: skipping %s — possible replay (last=%d, advert=%d)",
+ pubkey[:12],
+ last_seen,
+ advert.timestamp,
+ )
+ return
+ if advert.timestamp < last_seen + _REUPLOAD_SECONDS:
+ logger.debug(
+ "MapUpload: skipping %s — within 1-hr rate-limit window (delta=%ds)",
+ pubkey[:12],
+ advert.timestamp - last_seen,
+ )
+ return
+
+ await self._upload(
+ pubkey, advert.timestamp, advert.device_role, raw_hex, advert.lat, advert.lon
+ )
+
+ async def _upload(
+ self,
+ pubkey: str,
+ advert_timestamp: int,
+ device_role: int,
+ raw_hex: str,
+ lat: float,
+ lon: float,
+ ) -> None:
+ # Geofence check: if enabled, skip nodes outside the configured radius.
+ # The reference center is the radio's own lat/lon read live from self_info —
+ # no coordinates are stored in the fanout config. If the radio lat/lon is
+ # (0, 0) or unavailable the check is skipped transparently so uploads
+ # continue normally until the operator sets coordinates in radio settings.
+ geofence_dist_km: float | None = None
+ if self.config.get("geofence_enabled"):
+ try:
+ mc = radio_runtime.meshcore
+ sinfo = mc.self_info if mc else None
+ fence_lat = float((sinfo or {}).get("adv_lat", 0) or 0)
+ fence_lon = float((sinfo or {}).get("adv_lon", 0) or 0)
+ except Exception as exc:
+ logger.debug("MapUpload: could not read radio lat/lon for geofence: %s", exc)
+ fence_lat = 0.0
+ fence_lon = 0.0
+
+ if fence_lat == 0.0 and fence_lon == 0.0:
+ logger.debug(
+ "MapUpload: geofence skipped for %s — radio lat/lon not configured",
+ pubkey[:12],
+ )
+ else:
+ fence_radius_km = float(self.config.get("geofence_radius_km", 0) or 0)
+ geofence_dist_km = _haversine_km(fence_lat, fence_lon, lat, lon)
+ if geofence_dist_km > fence_radius_km:
+ logger.debug(
+ "MapUpload: skipping %s — outside geofence (%.2f km > %.2f km)",
+ pubkey[:12],
+ geofence_dist_km,
+ fence_radius_km,
+ )
+ return
+
+ private_key = get_private_key()
+ public_key = get_public_key()
+
+ if private_key is None or public_key is None:
+ logger.warning(
+ "MapUpload: private key not available — cannot sign upload for %s. "
+ "Ensure radio firmware has ENABLE_PRIVATE_KEY_EXPORT=1.",
+ pubkey[:12],
+ )
+ return
+
+ api_url = str(self.config.get("api_url", "") or _DEFAULT_API_URL).strip()
+ dry_run = bool(self.config.get("dry_run", True))
+ role_name = _ROLE_NAMES.get(device_role, f"role={device_role}")
+
+ params = _get_radio_params()
+ upload_data = {
+ "params": params,
+ "links": [f"meshcore://{raw_hex}"],
+ }
+
+ # Sign: SHA-256 the compact JSON, then Ed25519-sign the hash
+ json_str = json.dumps(upload_data, separators=(",", ":"))
+ data_hash = hashlib.sha256(json_str.encode()).digest()
+ scalar = private_key[:32]
+ prefix_bytes = private_key[32:]
+ signature = ed25519_sign_expanded(data_hash, scalar, prefix_bytes, public_key)
+
+ request_payload = {
+ "data": json_str,
+ "signature": signature.hex(),
+ "publicKey": public_key.hex(),
+ }
+
+ if dry_run:
+ geofence_note = (
+ f" | geofence: {geofence_dist_km:.2f} km from observer"
+ if geofence_dist_km is not None
+ else ""
+ )
+ logger.info(
+ "MapUpload [DRY RUN] %s (%s)%s → would POST to %s\n payload: %s",
+ pubkey[:12],
+ role_name,
+ geofence_note,
+ api_url,
+ json.dumps(request_payload, separators=(",", ":")),
+ )
+ # Still update _seen so rate-limiting works during dry-run testing
+ self._seen[pubkey] = advert_timestamp
+ return
+
+ if not self._client:
+ return
+
+ try:
+ resp = await self._client.post(
+ api_url,
+ content=json.dumps(request_payload, separators=(",", ":")),
+ headers={"Content-Type": "application/json"},
+ )
+ resp.raise_for_status()
+ self._seen[pubkey] = advert_timestamp
+ self._set_last_error(None)
+ logger.info(
+ "MapUpload: uploaded %s (%s) → HTTP %d",
+ pubkey[:12],
+ role_name,
+ resp.status_code,
+ )
+ except httpx.HTTPStatusError as exc:
+ self._set_last_error(f"HTTP {exc.response.status_code}")
+ logger.warning(
+ "MapUpload: server returned %d for %s: %s",
+ exc.response.status_code,
+ pubkey[:12],
+ exc.response.text[:200],
+ )
+ except httpx.RequestError as exc:
+ self._set_last_error(str(exc))
+ logger.warning("MapUpload: request error for %s: %s", pubkey[:12], exc)
+
+ @property
+ def status(self) -> str:
+ if self._client is None:
+ return "disconnected"
+ if self.last_error:
+ return "error"
+ return "connected"
diff --git a/app/fanout/mqtt_base.py b/app/fanout/mqtt_base.py
index f9c725d..467b3b2 100644
--- a/app/fanout/mqtt_base.py
+++ b/app/fanout/mqtt_base.py
@@ -23,6 +23,14 @@ logger = logging.getLogger(__name__)
_BACKOFF_MIN = 5
+def _format_error_detail(exc: Exception) -> str:
+ """Return a short operator-facing error string."""
+ message = str(exc).strip()
+ if message:
+ return message
+ return type(exc).__name__
+
+
def _broadcast_health() -> None:
"""Push updated health (including MQTT status) to all WS clients."""
from app.services.radio_runtime import radio_runtime as radio_manager
@@ -55,6 +63,7 @@ class BaseMqttPublisher(ABC):
self._version_event: asyncio.Event = asyncio.Event()
self.connected: bool = False
self.integration_name: str = ""
+ self._last_error: str | None = None
def set_integration_name(self, name: str) -> None:
"""Attach the configured fanout-module name for operator-facing logs."""
@@ -66,11 +75,17 @@ class BaseMqttPublisher(ABC):
return f"{self._log_prefix} [{self.integration_name}]"
return self._log_prefix
+ @property
+ def last_error(self) -> str | None:
+ """Return the most recent retained connection/publish error."""
+ return self._last_error
+
# ── Lifecycle ──────────────────────────────────────────────────────
async def start(self, settings: object) -> None:
"""Start the background connection loop."""
self._settings = settings
+ self._last_error = None
self._settings_version += 1
self._version_event.set()
if self._task is None or self._task.done():
@@ -87,6 +102,7 @@ class BaseMqttPublisher(ABC):
self._task = None
self._client = None
self.connected = False
+ self._last_error = None
async def restart(self, settings: object) -> None:
"""Called when settings change — stop + start."""
@@ -102,13 +118,14 @@ class BaseMqttPublisher(ABC):
except Exception as e:
logger.warning(
"%s publish failed on %s. This is usually transient network noise; "
- "if it self-resolves and reconnects, it is generally not a concern: %s",
+ "if it self-resolves and reconnects, it is generally not a concern. Persistent errors may indicate a problem with your network connection or MQTT broker. Original error: %s",
self._integration_label(),
topic,
e,
exc_info=True,
)
self.connected = False
+ self._last_error = _format_error_detail(e)
# Wake the connection loop so it exits the wait and reconnects
self._settings_version += 1
self._version_event.set()
@@ -198,6 +215,7 @@ class BaseMqttPublisher(ABC):
async with aiomqtt.Client(**client_kwargs) as client:
self._client = client
self.connected = True
+ self._last_error = None
backoff = _BACKOFF_MIN
title, detail = self._on_connected(settings)
@@ -232,6 +250,7 @@ class BaseMqttPublisher(ABC):
except Exception as e:
self.connected = False
self._client = None
+ self._last_error = _format_error_detail(e)
title, detail = self._on_error()
broadcast_error(title, detail)
@@ -239,7 +258,7 @@ class BaseMqttPublisher(ABC):
logger.warning(
"%s connection error. This is usually transient network noise; "
"if it self-resolves, it is generally not a concern: %s "
- "(reconnecting in %ds)",
+ "(reconnecting in %ds). If this error persists, check your network connection and MQTT broker status.",
self._integration_label(),
e,
backoff,
diff --git a/app/fanout/mqtt_community.py b/app/fanout/mqtt_community.py
index d17971b..9c4dc13 100644
--- a/app/fanout/mqtt_community.py
+++ b/app/fanout/mqtt_community.py
@@ -98,9 +98,15 @@ class MqttCommunityModule(FanoutModule):
@property
def status(self) -> str:
if self._publisher._is_configured():
+ if self._publisher.last_error:
+ return "error"
return "connected" if self._publisher.connected else "disconnected"
return "disconnected"
+ @property
+ def last_error(self) -> str | None:
+ return self._publisher.last_error
+
async def _publish_community_packet(
publisher: CommunityMqttPublisher,
diff --git a/app/fanout/mqtt_private.py b/app/fanout/mqtt_private.py
index 19e49ae..a679f01 100644
--- a/app/fanout/mqtt_private.py
+++ b/app/fanout/mqtt_private.py
@@ -59,4 +59,10 @@ class MqttPrivateModule(FanoutModule):
def status(self) -> str:
if not self.config.get("broker_host"):
return "disconnected"
+ if self._publisher.last_error:
+ return "error"
return "connected" if self._publisher.connected else "disconnected"
+
+ @property
+ def last_error(self) -> str | None:
+ return self._publisher.last_error
diff --git a/app/fanout/sqs.py b/app/fanout/sqs.py
index 79f3822..3f06e3f 100644
--- a/app/fanout/sqs.py
+++ b/app/fanout/sqs.py
@@ -84,7 +84,6 @@ class SqsModule(FanoutModule):
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
super().__init__(config_id, config, name=name)
self._client = None
- self._last_error: str | None = None
async def start(self) -> None:
kwargs: dict[str, str] = {}
@@ -147,18 +146,18 @@ class SqsModule(FanoutModule):
try:
await asyncio.to_thread(partial(self._client.send_message, **request_kwargs))
- self._last_error = None
+ self._set_last_error(None)
except (ClientError, BotoCoreError) as exc:
- self._last_error = str(exc)
+ self._set_last_error(str(exc))
logger.warning("SQS %s send error: %s", self.config_id, exc)
except Exception as exc:
- self._last_error = str(exc)
+ self._set_last_error(str(exc))
logger.exception("Unexpected SQS send error for %s", self.config_id)
@property
def status(self) -> str:
if not str(self.config.get("queue_url", "")).strip():
return "disconnected"
- if self._last_error:
+ if self.last_error:
return "error"
return "connected"
diff --git a/app/fanout/webhook.py b/app/fanout/webhook.py
index 0ec9c28..84b7846 100644
--- a/app/fanout/webhook.py
+++ b/app/fanout/webhook.py
@@ -20,7 +20,6 @@ class WebhookModule(FanoutModule):
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
super().__init__(config_id, config, name=name)
self._client: httpx.AsyncClient | None = None
- self._last_error: str | None = None
async def start(self) -> None:
self._client = httpx.AsyncClient(timeout=httpx.Timeout(10.0))
@@ -62,9 +61,9 @@ class WebhookModule(FanoutModule):
try:
resp = await self._client.request(method, url, content=body_bytes, headers=headers)
resp.raise_for_status()
- self._last_error = None
+ self._set_last_error(None)
except httpx.HTTPStatusError as exc:
- self._last_error = f"HTTP {exc.response.status_code}"
+ self._set_last_error(f"HTTP {exc.response.status_code}")
logger.warning(
"Webhook %s returned %s for %s",
self.config_id,
@@ -72,13 +71,13 @@ class WebhookModule(FanoutModule):
url,
)
except httpx.RequestError as exc:
- self._last_error = str(exc)
+ self._set_last_error(str(exc))
logger.warning("Webhook %s request error: %s", self.config_id, exc)
@property
def status(self) -> str:
if not self.config.get("url"):
return "disconnected"
- if self._last_error:
+ if self.last_error:
return "error"
return "connected"
diff --git a/app/frontend_static.py b/app/frontend_static.py
index 7505faa..49e8551 100644
--- a/app/frontend_static.py
+++ b/app/frontend_static.py
@@ -139,6 +139,18 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
@app.get("/{path:path}")
async def serve_frontend(path: str):
"""Serve frontend files, falling back to index.html for SPA routing."""
+ if path == "api" or path.startswith("api/"):
+ return JSONResponse(
+ status_code=404,
+ content={
+ "detail": (
+ "API endpoint not found. If you are seeing this in response to a "
+ "frontend request, you may be running a newer frontend with an older "
+ "backend or vice versa. A full update is suggested."
+ )
+ },
+ )
+
file_path = (frontend_dir / path).resolve()
try:
file_path.relative_to(frontend_dir)
diff --git a/app/keystore.py b/app/keystore.py
index 4a1a86c..28031e9 100644
--- a/app/keystore.py
+++ b/app/keystore.py
@@ -1,14 +1,18 @@
"""
-Ephemeral keystore for storing sensitive keys in memory.
+Ephemeral keystore for storing sensitive keys in memory, plus the Ed25519
+signing primitive used by fanout modules that need to sign requests with the
+radio's own key.
The private key is stored in memory only and is never persisted to disk.
It's exported from the radio on startup and reconnect, then used for
server-side decryption of direct messages.
"""
+import hashlib
import logging
from typing import TYPE_CHECKING
+import nacl.bindings
from meshcore import EventType
from app.decoder import derive_public_key
@@ -25,11 +29,30 @@ NO_EVENT_RECEIVED_GUIDANCE = (
"issue commands to the radio."
)
+# Ed25519 group order (L) — used in the expanded signing primitive below
+_L = 2**252 + 27742317777372353535851937790883648493
+
# In-memory storage for the private key and derived public key
_private_key: bytes | None = None
_public_key: bytes | None = None
+def ed25519_sign_expanded(message: bytes, scalar: bytes, prefix: bytes, public_key: bytes) -> bytes:
+ """Sign a message using MeshCore's expanded Ed25519 key format.
+
+ MeshCore stores 64-byte keys as scalar(32) || prefix(32). Standard
+ Ed25519 libraries expect seed format and would re-SHA-512 the key, so we
+ perform the signing manually using the already-expanded key material.
+
+ Port of meshcore-packet-capture's ed25519_sign_with_expanded_key().
+ """
+ r = int.from_bytes(hashlib.sha512(prefix + message).digest(), "little") % _L
+ R = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(r.to_bytes(32, "little"))
+ k = int.from_bytes(hashlib.sha512(R + public_key + message).digest(), "little") % _L
+ s = (r + k * int.from_bytes(scalar, "little")) % _L
+ return R + s.to_bytes(32, "little")
+
+
def clear_keys() -> None:
"""Clear any stored private/public key material from memory."""
global _private_key, _public_key
diff --git a/app/main.py b/app/main.py
index 911635b..ddbe0bc 100644
--- a/app/main.py
+++ b/app/main.py
@@ -17,6 +17,7 @@ from app.frontend_static import (
)
from app.radio import RadioDisconnectedError
from app.radio_sync import (
+ stop_background_contact_reconciliation,
stop_message_polling,
stop_periodic_advert,
stop_periodic_sync,
@@ -95,6 +96,7 @@ async def lifespan(app: FastAPI):
pass
await fanout_manager.stop_all()
await radio_manager.stop_connection_monitor()
+ await stop_background_contact_reconciliation()
await stop_message_polling()
await stop_periodic_advert()
await stop_periodic_sync()
diff --git a/app/models.py b/app/models.py
index 8aad0bb..d0f0918 100644
--- a/app/models.py
+++ b/app/models.py
@@ -266,7 +266,7 @@ class ContactNameHistory(BaseModel):
class ContactActiveRoom(BaseModel):
- """A channel/room where a contact has been active."""
+ """A channel where a contact has been active."""
channel_key: str
channel_name: str
@@ -413,6 +413,10 @@ class Message(BaseModel):
acked: int = 0
sender_name: str | None = None
channel_name: str | None = None
+ packet_id: int | None = Field(
+ default=None,
+ description="Representative raw packet row ID when archival raw bytes exist",
+ )
class MessagesAroundResponse(BaseModel):
@@ -458,6 +462,21 @@ class RawPacketBroadcast(BaseModel):
decrypted_info: RawPacketDecryptedInfo | None = None
+class RawPacketDetail(BaseModel):
+ """Stored raw-packet detail returned by the packet API."""
+
+ id: int
+ timestamp: int
+ data: str = Field(description="Hex-encoded packet data")
+ payload_type: str = Field(description="Packet type name (e.g. GROUP_TEXT, ADVERT)")
+ snr: float | None = Field(default=None, description="Signal-to-noise ratio in dB if available")
+ rssi: int | None = Field(
+ default=None, description="Received signal strength in dBm if available"
+ )
+ decrypted: bool = False
+ decrypted_info: RawPacketDecryptedInfo | None = None
+
+
class SendMessageRequest(BaseModel):
text: str = Field(min_length=1)
@@ -818,6 +837,7 @@ class StatisticsResponse(BaseModel):
total_outgoing: int
contacts_heard: ContactActivityCounts
repeaters_heard: ContactActivityCounts
+ known_channels_active: ContactActivityCounts
path_hash_width_24h: PathHashWidthStats
diff --git a/app/radio.py b/app/radio.py
index a9e8c22..6f12d1f 100644
--- a/app/radio.py
+++ b/app/radio.py
@@ -548,11 +548,14 @@ class RadioManager:
async def disconnect(self) -> None:
"""Disconnect from the radio."""
+ from app.radio_sync import stop_background_contact_reconciliation
+
clear_keys()
self._reset_reconnect_error_broadcasts()
if self._meshcore is None:
return
+ await stop_background_contact_reconciliation()
await self._acquire_operation_lock("disconnect", blocking=True)
try:
mc = self._meshcore
diff --git a/app/radio_sync.py b/app/radio_sync.py
index 9894bbd..311a574 100644
--- a/app/radio_sync.py
+++ b/app/radio_sync.py
@@ -175,6 +175,9 @@ async def pause_polling():
# Background task handle
_sync_task: asyncio.Task | None = None
+# Startup/background contact reconciliation task handle
+_contact_reconcile_task: asyncio.Task | None = None
+
# Periodic maintenance check interval in seconds (5 minutes)
SYNC_INTERVAL = 300
@@ -275,30 +278,7 @@ async def sync_and_offload_contacts(mc: MeshCore) -> dict:
remove_result = await mc.commands.remove_contact(contact_data)
if remove_result.type == EventType.OK:
removed += 1
-
- # LIBRARY INTERNAL FIXUP: The MeshCore library's
- # commands.remove_contact() sends the remove command over
- # the wire but does NOT update the library's in-memory
- # contact cache (mc._contacts). This is a gap in the
- # library — there's no public API to clear a single
- # contact from the cache, and the library only refreshes
- # it on a full get_contacts() call.
- #
- # Why this matters: sync_recent_contacts_to_radio() uses
- # mc.get_contact_by_key_prefix() to check whether a
- # contact is already loaded on the radio. That method
- # searches mc._contacts. If we don't evict the removed
- # contact from the cache here, get_contact_by_key_prefix()
- # will still find it and skip the add_contact() call —
- # meaning contacts never get loaded back onto the radio
- # after offload. The result: no DM ACKs, degraded routing
- # for potentially minutes until the next periodic sync
- # refreshes the cache from the (now-empty) radio.
- #
- # We access mc._contacts directly because the library
- # exposes it as a read-only property (mc.contacts) with
- # no removal API. The dict is keyed by public_key string.
- mc._contacts.pop(public_key, None)
+ _evict_removed_contact_from_library_cache(mc, public_key)
else:
logger.warning(
"Failed to remove contact %s: %s", public_key[:12], remove_result.payload
@@ -470,28 +450,28 @@ async def ensure_default_channels() -> None:
async def sync_and_offload_all(mc: MeshCore) -> dict:
- """Sync and offload both contacts and channels, then ensure defaults exist."""
+ """Run fast startup sync, then background contact reconcile."""
logger.info("Starting full radio sync and offload")
# Contact on_radio is legacy/stale metadata. Clear it during the offload/reload
# cycle so old rows stop claiming radio residency we do not actively track.
await ContactRepository.clear_on_radio_except([])
- contacts_result = await sync_and_offload_contacts(mc)
+ contacts_result = await sync_contacts_from_radio(mc)
channels_result = await sync_and_offload_channels(mc)
# Ensure default channels exist
await ensure_default_channels()
- # Reload favorites plus a working-set fill back onto the radio immediately.
- # Pass mc directly since the caller already holds the radio operation lock
- # (asyncio.Lock is not reentrant).
- reload_result = await sync_recent_contacts_to_radio(force=True, mc=mc)
+ start_background_contact_reconciliation(
+ initial_radio_contacts=contacts_result.get("radio_contacts", {}),
+ expected_mc=mc,
+ )
return {
"contacts": contacts_result,
"channels": channels_result,
- "reloaded": reload_result,
+ "contact_reconcile_started": True,
}
@@ -1137,6 +1117,270 @@ async def stop_periodic_sync():
# Throttling for contact sync to radio
_last_contact_sync: float = 0.0
CONTACT_SYNC_THROTTLE_SECONDS = 30 # Don't sync more than once per 30 seconds
+CONTACT_RECONCILE_BATCH_SIZE = 2
+CONTACT_RECONCILE_YIELD_SECONDS = 0.05
+
+
+def _evict_removed_contact_from_library_cache(mc: MeshCore, public_key: str) -> None:
+ """Keep the library's contact cache consistent after a successful removal."""
+ # LIBRARY INTERNAL FIXUP: The MeshCore library's remove_contact() sends the
+ # remove command over the wire but does NOT update the library's in-memory
+ # contact cache (mc._contacts). This is a gap in the library — there's no
+ # public API to clear a single contact from the cache, and the library only
+ # refreshes it on a full get_contacts() call.
+ #
+ # Why this matters: contact sync and targeted ensure/load paths use
+ # mc.get_contact_by_key_prefix() to check whether a contact is already
+ # loaded on the radio. That method searches mc._contacts. If we don't evict
+ # the removed contact from the cache here, later syncs will still find it
+ # and skip add_contact() calls, leaving the radio without the contact even
+ # though the app thinks it is resident.
+ mc._contacts.pop(public_key, None)
+
+
+def _normalize_radio_contacts_payload(contacts: dict | None) -> dict[str, dict]:
+ """Return radio contacts keyed by normalized lowercase full public key."""
+ normalized: dict[str, dict] = {}
+ for public_key, contact_data in (contacts or {}).items():
+ normalized[str(public_key).lower()] = contact_data
+ return normalized
+
+
+async def sync_contacts_from_radio(mc: MeshCore) -> dict:
+ """Pull contacts from the radio and persist them to the database without removing them."""
+ synced = 0
+
+ try:
+ result = await mc.commands.get_contacts()
+
+ if result is None or result.type == EventType.ERROR:
+ logger.error(
+ "Failed to get contacts from radio: %s. "
+ "If you see this repeatedly, the radio may be visible on the "
+ "serial/TCP/BLE port but not responding to commands. Check for "
+ "another process with the serial port open (other RemoteTerm "
+ "instances, serial monitors, etc.), verify the firmware is "
+ "up-to-date and in client mode (not repeater), or try a "
+ "power cycle.",
+ result,
+ )
+ return {"synced": 0, "radio_contacts": {}, "error": str(result)}
+
+ contacts = _normalize_radio_contacts_payload(result.payload)
+ logger.info("Found %d contacts on radio", len(contacts))
+
+ for public_key, contact_data in contacts.items():
+ await ContactRepository.upsert(
+ ContactUpsert.from_radio_dict(public_key, contact_data, on_radio=False)
+ )
+ asyncio.create_task(
+ _reconcile_contact_messages_background(
+ public_key,
+ contact_data.get("adv_name"),
+ )
+ )
+ synced += 1
+
+ logger.info("Synced %d contacts from radio snapshot", synced)
+ return {"synced": synced, "radio_contacts": contacts}
+ except Exception as e:
+ logger.error("Error during contact snapshot sync: %s", e)
+ return {"synced": synced, "radio_contacts": {}, "error": str(e)}
+
+
+async def _reconcile_radio_contacts_in_background(
+ *,
+ initial_radio_contacts: dict[str, dict],
+ expected_mc: MeshCore,
+) -> None:
+ """Converge radio contacts toward the desired favorites+recents working set."""
+ radio_contacts = dict(initial_radio_contacts)
+ removed = 0
+ loaded = 0
+ failed = 0
+
+ try:
+ while True:
+ if not radio_manager.is_connected or radio_manager.meshcore is not expected_mc:
+ logger.info("Stopping background contact reconcile: radio transport changed")
+ break
+
+ selected_contacts = await get_contacts_selected_for_radio_sync()
+ desired_contacts = {
+ contact.public_key.lower(): contact
+ for contact in selected_contacts
+ if len(contact.public_key) >= 64
+ }
+ removable_keys = [key for key in radio_contacts if key not in desired_contacts]
+ missing_contacts = [
+ contact for key, contact in desired_contacts.items() if key not in radio_contacts
+ ]
+
+ if not removable_keys and not missing_contacts:
+ logger.info(
+ "Background contact reconcile complete: %d contacts on radio working set",
+ len(radio_contacts),
+ )
+ break
+
+ progressed = False
+ try:
+ async with radio_manager.radio_operation(
+ "background_contact_reconcile",
+ blocking=False,
+ ) as mc:
+ if mc is not expected_mc:
+ logger.info(
+ "Stopping background contact reconcile: radio transport changed"
+ )
+ break
+
+ budget = CONTACT_RECONCILE_BATCH_SIZE
+ selected_contacts = await get_contacts_selected_for_radio_sync()
+ desired_contacts = {
+ contact.public_key.lower(): contact
+ for contact in selected_contacts
+ if len(contact.public_key) >= 64
+ }
+
+ for public_key in list(radio_contacts):
+ if budget <= 0:
+ break
+ if public_key in desired_contacts:
+ continue
+
+ remove_payload = (
+ mc.get_contact_by_key_prefix(public_key[:12])
+ or radio_contacts.get(public_key)
+ or {"public_key": public_key}
+ )
+ try:
+ remove_result = await mc.commands.remove_contact(remove_payload)
+ except Exception as exc:
+ failed += 1
+ budget -= 1
+ logger.warning(
+ "Error removing contact %s during background reconcile: %s",
+ public_key[:12],
+ exc,
+ )
+ continue
+
+ budget -= 1
+ if remove_result.type == EventType.OK:
+ radio_contacts.pop(public_key, None)
+ _evict_removed_contact_from_library_cache(mc, public_key)
+ removed += 1
+ progressed = True
+ else:
+ failed += 1
+ logger.warning(
+ "Failed to remove contact %s during background reconcile: %s",
+ public_key[:12],
+ remove_result.payload,
+ )
+
+ if budget > 0:
+ for public_key, contact in desired_contacts.items():
+ if budget <= 0:
+ break
+ if public_key in radio_contacts:
+ continue
+
+ if mc.get_contact_by_key_prefix(public_key[:12]):
+ radio_contacts[public_key] = {"public_key": public_key}
+ continue
+
+ try:
+ add_payload = contact.to_radio_dict()
+ add_result = await mc.commands.add_contact(add_payload)
+ except Exception as exc:
+ failed += 1
+ budget -= 1
+ logger.warning(
+ "Error adding contact %s during background reconcile: %s",
+ public_key[:12],
+ exc,
+ exc_info=True,
+ )
+ continue
+
+ budget -= 1
+ if add_result.type == EventType.OK:
+ radio_contacts[public_key] = add_payload
+ loaded += 1
+ progressed = True
+ else:
+ failed += 1
+ reason = add_result.payload
+ hint = ""
+ if reason is None:
+ hint = (
+ " (no response from radio — if this repeats, check for "
+ "serial port contention from another process or try a "
+ "power cycle)"
+ )
+ logger.warning(
+ "Failed to add contact %s during background reconcile: %s%s",
+ public_key[:12],
+ reason,
+ hint,
+ )
+ except RadioOperationBusyError:
+ logger.debug("Background contact reconcile yielding: radio busy")
+
+ await asyncio.sleep(CONTACT_RECONCILE_YIELD_SECONDS)
+ if not progressed:
+ continue
+ except asyncio.CancelledError:
+ logger.info("Background contact reconcile task cancelled")
+ raise
+ except Exception as exc:
+ logger.error("Background contact reconcile failed: %s", exc, exc_info=True)
+ finally:
+ if removed > 0 or loaded > 0 or failed > 0:
+ logger.info(
+ "Background contact reconcile summary: removed %d, loaded %d, failed %d",
+ removed,
+ loaded,
+ failed,
+ )
+
+
+def start_background_contact_reconciliation(
+ *,
+ initial_radio_contacts: dict[str, dict],
+ expected_mc: MeshCore,
+) -> None:
+ """Start or replace the background contact reconcile task for the current radio."""
+ global _contact_reconcile_task
+
+ if _contact_reconcile_task is not None and not _contact_reconcile_task.done():
+ _contact_reconcile_task.cancel()
+
+ _contact_reconcile_task = asyncio.create_task(
+ _reconcile_radio_contacts_in_background(
+ initial_radio_contacts=initial_radio_contacts,
+ expected_mc=expected_mc,
+ )
+ )
+ logger.info(
+ "Started background contact reconcile for %d radio contact(s)",
+ len(initial_radio_contacts),
+ )
+
+
+async def stop_background_contact_reconciliation() -> None:
+ """Stop the background contact reconcile task."""
+ global _contact_reconcile_task
+
+ if _contact_reconcile_task and not _contact_reconcile_task.done():
+ _contact_reconcile_task.cancel()
+ try:
+ await _contact_reconcile_task
+ except asyncio.CancelledError:
+ pass
+ _contact_reconcile_task = None
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
diff --git a/app/repository/messages.py b/app/repository/messages.py
index 945017e..28e7d59 100644
--- a/app/repository/messages.py
+++ b/app/repository/messages.py
@@ -331,6 +331,12 @@ class MessageRepository:
@staticmethod
def _row_to_message(row: Any) -> Message:
"""Convert a database row to a Message model."""
+ packet_id = None
+ if hasattr(row, "keys"):
+ row_keys = row.keys()
+ if "packet_id" in row_keys:
+ packet_id = row["packet_id"]
+
return Message(
id=row["id"],
type=row["type"],
@@ -345,6 +351,14 @@ class MessageRepository:
outgoing=bool(row["outgoing"]),
acked=row["acked"],
sender_name=row["sender_name"],
+ packet_id=packet_id,
+ )
+
+ @staticmethod
+ def _message_select(message_alias: str = "messages") -> str:
+ return (
+ f"{message_alias}.*, "
+ f"(SELECT MIN(id) FROM raw_packets WHERE message_id = {message_alias}.id) AS packet_id"
)
@staticmethod
@@ -363,7 +377,7 @@ class MessageRepository:
) -> list[Message]:
search_query = MessageRepository._parse_search_query(q) if q else None
query = (
- "SELECT messages.* FROM messages "
+ f"SELECT {MessageRepository._message_select('messages')} FROM messages "
"LEFT JOIN contacts ON messages.type = 'PRIV' "
"AND LOWER(messages.conversation_key) = LOWER(contacts.public_key) "
"LEFT JOIN channels ON messages.type = 'CHAN' "
@@ -470,7 +484,8 @@ class MessageRepository:
# 1. Get the target message (must satisfy filters if provided)
target_cursor = await db.conn.execute(
- f"SELECT * FROM messages WHERE id = ? AND {where_sql}",
+ f"SELECT {MessageRepository._message_select('messages')} "
+ f"FROM messages WHERE id = ? AND {where_sql}",
(message_id, *base_params),
)
target_row = await target_cursor.fetchone()
@@ -481,7 +496,7 @@ class MessageRepository:
# 2. Get context_size+1 messages before target (DESC)
before_query = f"""
- SELECT * FROM messages WHERE {where_sql}
+ SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
AND (received_at < ? OR (received_at = ? AND id < ?))
ORDER BY received_at DESC, id DESC LIMIT ?
"""
@@ -500,7 +515,7 @@ class MessageRepository:
# 3. Get context_size+1 messages after target (ASC)
after_query = f"""
- SELECT * FROM messages WHERE {where_sql}
+ SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
AND (received_at > ? OR (received_at = ? AND id > ?))
ORDER BY received_at ASC, id ASC LIMIT ?
"""
@@ -545,7 +560,7 @@ class MessageRepository:
async def get_by_id(message_id: int) -> "Message | None":
"""Look up a message by its ID."""
cursor = await db.conn.execute(
- "SELECT * FROM messages WHERE id = ?",
+ f"SELECT {MessageRepository._message_select('messages')} FROM messages WHERE id = ?",
(message_id,),
)
row = await cursor.fetchone()
@@ -570,7 +585,9 @@ class MessageRepository:
) -> "Message | None":
"""Look up a message by its unique content fields."""
query = """
- SELECT * FROM messages
+ SELECT messages.*,
+ (SELECT MIN(id) FROM raw_packets WHERE message_id = messages.id) AS packet_id
+ FROM messages
WHERE type = ? AND conversation_key = ? AND text = ?
AND (sender_timestamp = ? OR (sender_timestamp IS NULL AND ? IS NULL))
"""
@@ -706,6 +723,11 @@ class MessageRepository:
state_key = f"{prefix}-{row['conversation_key']}"
last_message_times[state_key] = row["last_message_time"]
+ # Only include last_read_ats for conversations that actually have messages.
+ # Without this filter, every contact heard via advertisement (even without
+ # any DMs) bloats the payload — 391KB down to ~46KB on a typical database.
+ last_read_ats = {k: v for k, v in last_read_ats.items() if k in last_message_times}
+
return {
"counts": counts,
"mentions": mention_flags,
diff --git a/app/repository/raw_packets.py b/app/repository/raw_packets.py
index 3a31e23..c773a67 100644
--- a/app/repository/raw_packets.py
+++ b/app/repository/raw_packets.py
@@ -121,6 +121,18 @@ class RawPacketRepository:
return None
return row["message_id"]
+ @staticmethod
+ async def get_by_id(packet_id: int) -> tuple[int, bytes, int, int | None] | None:
+ """Return a raw packet row as (id, data, timestamp, message_id)."""
+ cursor = await db.conn.execute(
+ "SELECT id, data, timestamp, message_id FROM raw_packets WHERE id = ?",
+ (packet_id,),
+ )
+ row = await cursor.fetchone()
+ if not row:
+ return None
+ return (row["id"], bytes(row["data"]), row["timestamp"], row["message_id"])
+
@staticmethod
async def prune_old_undecrypted(max_age_days: int) -> int:
"""Delete undecrypted packets older than max_age_days. Returns count deleted."""
diff --git a/app/repository/settings.py b/app/repository/settings.py
index 91e8720..0afc43a 100644
--- a/app/repository/settings.py
+++ b/app/repository/settings.py
@@ -295,6 +295,30 @@ class StatisticsRepository:
"last_week": row["last_week"] or 0,
}
+ @staticmethod
+ async def _known_channels_active() -> dict[str, int]:
+ """Count distinct known channel keys with channel traffic in each time window."""
+ now = int(time.time())
+ cursor = await db.conn.execute(
+ """
+ SELECT
+ COUNT(DISTINCT CASE WHEN m.received_at >= ? THEN m.conversation_key END) AS last_hour,
+ COUNT(DISTINCT CASE WHEN m.received_at >= ? THEN m.conversation_key END) AS last_24_hours,
+ COUNT(DISTINCT CASE WHEN m.received_at >= ? THEN m.conversation_key END) AS last_week
+ FROM messages m
+ INNER JOIN channels c ON UPPER(m.conversation_key) = UPPER(c.key)
+ WHERE m.type = 'CHAN'
+ """,
+ (now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D),
+ )
+ row = await cursor.fetchone()
+ assert row is not None
+ return {
+ "last_hour": row["last_hour"] or 0,
+ "last_24_hours": row["last_24_hours"] or 0,
+ "last_week": row["last_week"] or 0,
+ }
+
@staticmethod
async def _path_hash_width_24h() -> dict[str, int | float]:
"""Count parsed raw packets from the last 24h by hop hash width."""
@@ -421,6 +445,7 @@ class StatisticsRepository:
# Activity windows
contacts_heard = await StatisticsRepository._activity_counts(contact_type=2, exclude=True)
repeaters_heard = await StatisticsRepository._activity_counts(contact_type=2)
+ known_channels_active = await StatisticsRepository._known_channels_active()
path_hash_width_24h = await StatisticsRepository._path_hash_width_24h()
return {
@@ -436,5 +461,6 @@ class StatisticsRepository:
"total_outgoing": total_outgoing,
"contacts_heard": contacts_heard,
"repeaters_heard": repeaters_heard,
+ "known_channels_active": known_channels_active,
"path_hash_width_24h": path_hash_width_24h,
}
diff --git a/app/routers/channels.py b/app/routers/channels.py
index ca4de07..fb3f94f 100644
--- a/app/routers/channels.py
+++ b/app/routers/channels.py
@@ -71,7 +71,7 @@ async def create_channel(request: CreateChannelRequest) -> Channel:
requested_name = request.name
is_hashtag = requested_name.startswith("#")
- # Reserve the canonical Public room so it cannot drift to another key,
+ # Reserve the canonical Public channel so it cannot drift to another key,
# and the well-known Public key cannot be renamed to something else.
if is_public_channel_name(requested_name):
if request.key:
diff --git a/app/routers/fanout.py b/app/routers/fanout.py
index 36c7d40..f2ec2de 100644
--- a/app/routers/fanout.py
+++ b/app/routers/fanout.py
@@ -9,14 +9,14 @@ import string
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel, Field
-from app.config import settings as server_settings
from app.fanout.bot_exec import _analyze_bot_signature
+from app.fanout.manager import fanout_manager
from app.repository.fanout import FanoutConfigRepository
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/fanout", tags=["fanout"])
-_VALID_TYPES = {"mqtt_private", "mqtt_community", "bot", "webhook", "apprise", "sqs"}
+_VALID_TYPES = {"mqtt_private", "mqtt_community", "bot", "webhook", "apprise", "sqs", "map_upload"}
_IATA_RE = re.compile(r"^[A-Z]{3}$")
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
@@ -94,6 +94,8 @@ def _validate_and_normalize_config(config_type: str, config: dict) -> dict:
_validate_apprise_config(normalized)
elif config_type == "sqs":
_validate_sqs_config(normalized)
+ elif config_type == "map_upload":
+ _validate_map_upload_config(normalized)
return normalized
@@ -295,10 +297,33 @@ def _validate_sqs_config(config: dict) -> None:
)
+def _validate_map_upload_config(config: dict) -> None:
+ """Validate and normalize map_upload config blob."""
+ api_url = str(config.get("api_url", "")).strip()
+ if api_url and not api_url.startswith(("http://", "https://")):
+ raise HTTPException(
+ status_code=400,
+ detail="api_url must start with http:// or https://",
+ )
+ # Persist the cleaned value (empty string means use the module default)
+ config["api_url"] = api_url
+ config["dry_run"] = bool(config.get("dry_run", True))
+ config["geofence_enabled"] = bool(config.get("geofence_enabled", False))
+ try:
+ radius = float(config.get("geofence_radius_km", 0) or 0)
+ except (TypeError, ValueError):
+ raise HTTPException(status_code=400, detail="geofence_radius_km must be a number") from None
+ if radius < 0:
+ raise HTTPException(status_code=400, detail="geofence_radius_km must be >= 0")
+ config["geofence_radius_km"] = radius
+
+
def _enforce_scope(config_type: str, scope: dict) -> dict:
"""Enforce type-specific scope constraints. Returns normalized scope."""
if config_type == "mqtt_community":
return {"messages": "none", "raw_packets": "all"}
+ if config_type == "map_upload":
+ return {"messages": "none", "raw_packets": "all"}
if config_type == "bot":
return {"messages": "all", "raw_packets": "none"}
if config_type in ("webhook", "apprise"):
@@ -325,6 +350,15 @@ def _enforce_scope(config_type: str, scope: dict) -> dict:
return {"messages": messages, "raw_packets": raw_packets}
+def _bot_system_disabled_detail() -> str | None:
+ source = fanout_manager.get_bots_disabled_source()
+ if source == "env":
+ return "Bot system disabled by server configuration (MESHCORE_DISABLE_BOTS)"
+ if source == "until_restart":
+ return "Bot system disabled until the server restarts"
+ return None
+
+
@router.get("")
async def list_fanout_configs() -> list[dict]:
"""List all fanout configs."""
@@ -340,8 +374,10 @@ async def create_fanout_config(body: FanoutConfigCreate) -> dict:
detail=f"Invalid type '{body.type}'. Must be one of: {', '.join(sorted(_VALID_TYPES))}",
)
- if body.type == "bot" and server_settings.disable_bots:
- raise HTTPException(status_code=403, detail="Bot system disabled by server configuration")
+ if body.type == "bot":
+ disabled_detail = _bot_system_disabled_detail()
+ if disabled_detail:
+ raise HTTPException(status_code=403, detail=disabled_detail)
normalized_config = _validate_and_normalize_config(body.type, body.config)
scope = _enforce_scope(body.type, body.scope)
@@ -356,8 +392,6 @@ async def create_fanout_config(body: FanoutConfigCreate) -> dict:
# Start the module if enabled
if cfg["enabled"]:
- from app.fanout.manager import fanout_manager
-
await fanout_manager.reload_config(cfg["id"])
logger.info("Created fanout config %s (type=%s, name=%s)", cfg["id"], body.type, body.name)
@@ -371,8 +405,10 @@ async def update_fanout_config(config_id: str, body: FanoutConfigUpdate) -> dict
if existing is None:
raise HTTPException(status_code=404, detail="Fanout config not found")
- if existing["type"] == "bot" and server_settings.disable_bots:
- raise HTTPException(status_code=403, detail="Bot system disabled by server configuration")
+ if existing["type"] == "bot":
+ disabled_detail = _bot_system_disabled_detail()
+ if disabled_detail:
+ raise HTTPException(status_code=403, detail=disabled_detail)
kwargs = {}
if body.name is not None:
@@ -390,8 +426,6 @@ async def update_fanout_config(config_id: str, body: FanoutConfigUpdate) -> dict
raise HTTPException(status_code=404, detail="Fanout config not found")
# Reload the module to pick up changes
- from app.fanout.manager import fanout_manager
-
await fanout_manager.reload_config(config_id)
logger.info("Updated fanout config %s", config_id)
@@ -406,10 +440,24 @@ async def delete_fanout_config(config_id: str) -> dict:
raise HTTPException(status_code=404, detail="Fanout config not found")
# Stop the module first
- from app.fanout.manager import fanout_manager
-
await fanout_manager.remove_config(config_id)
await FanoutConfigRepository.delete(config_id)
logger.info("Deleted fanout config %s", config_id)
return {"deleted": True}
+
+
+@router.post("/bots/disable-until-restart")
+async def disable_bots_until_restart() -> dict:
+ """Stop active bot modules and prevent them from running again until restart."""
+ source = await fanout_manager.disable_bots_until_restart()
+
+ from app.services.radio_runtime import radio_runtime as radio_manager
+ from app.websocket import broadcast_health
+
+ broadcast_health(radio_manager.is_connected, radio_manager.connection_info)
+ return {
+ "status": "ok",
+ "bots_disabled": True,
+ "bots_disabled_source": source,
+ }
diff --git a/app/routers/health.py b/app/routers/health.py
index 2ceb39e..744e327 100644
--- a/app/routers/health.py
+++ b/app/routers/health.py
@@ -1,8 +1,8 @@
import os
-from typing import Any
+from typing import Any, Literal
from fastapi import APIRouter
-from pydantic import BaseModel
+from pydantic import BaseModel, Field
from app.config import settings
from app.repository import RawPacketRepository
@@ -25,6 +25,13 @@ class AppInfoResponse(BaseModel):
commit_hash: str | None = None
+class FanoutStatusResponse(BaseModel):
+ name: str
+ type: str
+ status: str
+ last_error: str | None = None
+
+
class HealthResponse(BaseModel):
status: str
radio_connected: bool
@@ -35,8 +42,10 @@ class HealthResponse(BaseModel):
radio_device_info: RadioDeviceInfoResponse | None = None
database_size_mb: float
oldest_undecrypted_timestamp: int | None
- fanout_statuses: dict[str, dict[str, str]] = {}
+ fanout_statuses: dict[str, FanoutStatusResponse] = Field(default_factory=dict)
bots_disabled: bool = False
+ bots_disabled_source: Literal["env", "until_restart"] | None = None
+ basic_auth_enabled: bool = False
def _clean_optional_str(value: object) -> str | None:
@@ -46,6 +55,11 @@ def _clean_optional_str(value: object) -> str | None:
return cleaned or None
+def _read_optional_bool_setting(name: str) -> bool:
+ value = getattr(settings, name, False)
+ return value if isinstance(value, bool) else False
+
+
async def build_health_data(radio_connected: bool, connection_info: str | None) -> dict:
"""Build the health status payload used by REST endpoint and WebSocket broadcasts."""
app_build_info = get_app_build_info()
@@ -64,10 +78,14 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
# Fanout module statuses
fanout_statuses: dict[str, Any] = {}
+ bots_disabled_source = "env" if _read_optional_bool_setting("disable_bots") else None
try:
from app.fanout.manager import fanout_manager
fanout_statuses = fanout_manager.get_statuses()
+ manager_bots_disabled_source = fanout_manager.get_bots_disabled_source()
+ if manager_bots_disabled_source is not None:
+ bots_disabled_source = manager_bots_disabled_source
except Exception:
pass
@@ -118,7 +136,9 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
"database_size_mb": db_size_mb,
"oldest_undecrypted_timestamp": oldest_ts,
"fanout_statuses": fanout_statuses,
- "bots_disabled": settings.disable_bots,
+ "bots_disabled": bots_disabled_source is not None,
+ "bots_disabled_source": bots_disabled_source,
+ "basic_auth_enabled": _read_optional_bool_setting("basic_auth_enabled"),
}
diff --git a/app/routers/packets.py b/app/routers/packets.py
index 00316ee..4c6374c 100644
--- a/app/routers/packets.py
+++ b/app/routers/packets.py
@@ -8,8 +8,9 @@ from pydantic import BaseModel, Field
from app.database import db
from app.decoder import parse_packet, try_decrypt_packet_with_channel_key
+from app.models import RawPacketDecryptedInfo, RawPacketDetail
from app.packet_processor import create_message_from_decrypted, run_historical_dm_decryption
-from app.repository import ChannelRepository, RawPacketRepository
+from app.repository import ChannelRepository, MessageRepository, RawPacketRepository
from app.websocket import broadcast_success
logger = logging.getLogger(__name__)
@@ -102,6 +103,45 @@ async def get_undecrypted_count() -> dict:
return {"count": count}
+@router.get("/{packet_id}", response_model=RawPacketDetail)
+async def get_raw_packet(packet_id: int) -> RawPacketDetail:
+ """Fetch one stored raw packet by row ID for on-demand inspection."""
+ packet_row = await RawPacketRepository.get_by_id(packet_id)
+ if packet_row is None:
+ raise HTTPException(status_code=404, detail="Raw packet not found")
+
+ stored_packet_id, packet_data, packet_timestamp, message_id = packet_row
+ packet_info = parse_packet(packet_data)
+ payload_type_name = packet_info.payload_type.name if packet_info else "Unknown"
+
+ decrypted_info: RawPacketDecryptedInfo | None = None
+ if message_id is not None:
+ message = await MessageRepository.get_by_id(message_id)
+ if message is not None:
+ if message.type == "CHAN":
+ channel = await ChannelRepository.get_by_key(message.conversation_key)
+ decrypted_info = RawPacketDecryptedInfo(
+ channel_name=channel.name if channel else None,
+ sender=message.sender_name,
+ channel_key=message.conversation_key,
+ contact_key=message.sender_key,
+ )
+ else:
+ decrypted_info = RawPacketDecryptedInfo(
+ sender=message.sender_name,
+ contact_key=message.conversation_key,
+ )
+
+ return RawPacketDetail(
+ id=stored_packet_id,
+ timestamp=packet_timestamp,
+ data=packet_data.hex(),
+ payload_type=payload_type_name,
+ decrypted=message_id is not None,
+ decrypted_info=decrypted_info,
+ )
+
+
@router.post("/decrypt/historical", response_model=DecryptResult)
async def decrypt_historical_packets(
request: DecryptRequest, background_tasks: BackgroundTasks, response: Response
diff --git a/app/routers/server_control.py b/app/routers/server_control.py
index 7919775..b3f9e9e 100644
--- a/app/routers/server_control.py
+++ b/app/routers/server_control.py
@@ -62,7 +62,7 @@ def _login_rejected_message(label: str) -> str:
def _login_send_failed_message(label: str) -> str:
return (
f"The login request could not be sent to the {label}. "
- f"The control panel is still available, but authenticated actions may fail until a login succeeds."
+ f"You're free to attempt interaction; try logging in again if authenticated actions fail."
)
@@ -70,7 +70,7 @@ def _login_timeout_message(label: str) -> str:
return (
f"No login confirmation was heard from the {label}. "
"That can mean the password was wrong or the reply was missed in transit. "
- "The control panel is still available; try logging in again if authenticated actions fail."
+ "You're free to attempt interaction; try logging in again if authenticated actions fail."
)
diff --git a/app/services/dm_ingest.py b/app/services/dm_ingest.py
index df01e46..bfd09ca 100644
--- a/app/services/dm_ingest.py
+++ b/app/services/dm_ingest.py
@@ -238,6 +238,7 @@ async def _store_direct_message(
sender_key=sender_key,
outgoing=outgoing,
sender_name=sender_name,
+ packet_id=packet_id,
)
broadcast_message(message=message, broadcast_fn=broadcast_fn, realtime=realtime)
diff --git a/app/services/messages.py b/app/services/messages.py
index 5508a6a..f5d1ea9 100644
--- a/app/services/messages.py
+++ b/app/services/messages.py
@@ -62,6 +62,7 @@ def build_message_model(
acked: int = 0,
sender_name: str | None = None,
channel_name: str | None = None,
+ packet_id: int | None = None,
) -> Message:
"""Build a Message model with the canonical backend payload shape."""
return Message(
@@ -79,6 +80,7 @@ def build_message_model(
acked=acked,
sender_name=sender_name,
channel_name=channel_name,
+ packet_id=packet_id,
)
@@ -131,6 +133,7 @@ def broadcast_message_acked(
message_id: int,
ack_count: int,
paths: list[MessagePath] | None,
+ packet_id: int | None,
broadcast_fn: BroadcastFn,
) -> None:
"""Broadcast a message_acked payload."""
@@ -140,6 +143,7 @@ def broadcast_message_acked(
"message_id": message_id,
"ack_count": ack_count,
"paths": [path.model_dump() for path in paths] if paths else [],
+ "packet_id": packet_id,
},
)
@@ -182,11 +186,16 @@ async def reconcile_duplicate_message(
else:
ack_count = existing_msg.acked
+ representative_packet_id = (
+ existing_msg.packet_id if existing_msg.packet_id is not None else packet_id
+ )
+
if existing_msg.outgoing or path is not None:
broadcast_message_acked(
message_id=existing_msg.id,
ack_count=ack_count,
paths=paths,
+ packet_id=representative_packet_id,
broadcast_fn=broadcast_fn,
)
@@ -307,6 +316,7 @@ async def create_message_from_decrypted(
sender_name=sender,
sender_key=resolved_sender_key,
channel_name=channel_name,
+ packet_id=packet_id,
),
broadcast_fn=broadcast_fn,
realtime=realtime,
diff --git a/docker-compose.yaml b/docker-compose.yaml
index ee79598..4ed71c0 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -18,15 +18,18 @@ services:
environment:
MESHCORE_DATABASE_PATH: data/meshcore.db
# Radio connection -- optional if you map just a single serial device above, as the app will autodetect
-
# Serial (USB)
# MESHCORE_SERIAL_PORT: /dev/ttyUSB0
# MESHCORE_SERIAL_BAUDRATE: 115200
-
# TCP
# MESHCORE_TCP_HOST: 192.168.1.100
# MESHCORE_TCP_PORT: 4000
+ # Security
+ # MESHCORE_DISABLE_BOTS: "true"
+ # MESHCORE_BASIC_AUTH_USERNAME: changeme
+ # MESHCORE_BASIC_AUTH_PASSWORD: changeme
+
# Logging
# MESHCORE_LOG_LEVEL: INFO
restart: unless-stopped
diff --git a/frontend/index.html b/frontend/index.html
index 216d532..37f1246 100644
--- a/frontend/index.html
+++ b/frontend/index.html
@@ -9,8 +9,8 @@
RemoteTerm for MeshCore
-
+
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index 9aa9db1..3c1f138 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "remoteterm-meshcore-frontend",
- "version": "3.5.0",
+ "version": "3.6.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "remoteterm-meshcore-frontend",
- "version": "3.5.0",
+ "version": "3.6.2",
"dependencies": {
"@codemirror/lang-python": "^6.2.1",
"@codemirror/theme-one-dark": "^6.1.3",
@@ -29,6 +29,7 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-leaflet": "^4.2.1",
+ "react-swipeable": "^7.0.2",
"sonner": "^2.0.7",
"tailwind-merge": "^3.4.0",
"tailwindcss-animate": "^1.0.7",
@@ -5696,6 +5697,15 @@
}
}
},
+ "node_modules/react-swipeable": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/react-swipeable/-/react-swipeable-7.0.2.tgz",
+ "integrity": "sha512-v1Qx1l+aC2fdxKa9aKJiaU/ZxmJ5o98RMoFwUqAAzVWUcxgfHFXDDruCKXhw6zIYXm6V64JiHgP9f6mlME5l8w==",
+ "license": "MIT",
+ "peerDependencies": {
+ "react": "^16.8.3 || ^17 || ^18 || ^19.0.0 || ^19.0.0-rc"
+ }
+ },
"node_modules/read-cache": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz",
diff --git a/frontend/package.json b/frontend/package.json
index 092d46f..c320635 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,7 +1,7 @@
{
"name": "remoteterm-meshcore-frontend",
"private": true,
- "version": "3.5.0",
+ "version": "3.6.2",
"type": "module",
"scripts": {
"dev": "vite",
@@ -37,6 +37,7 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-leaflet": "^4.2.1",
+ "react-swipeable": "^7.0.2",
"sonner": "^2.0.7",
"tailwind-merge": "^3.4.0",
"tailwindcss-animate": "^1.0.7",
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 2b43110..601f458 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -1,4 +1,4 @@
-import { useEffect, useCallback, useRef, useState } from 'react';
+import { useEffect, useCallback, useRef, useState, useMemo } from 'react';
import { api } from './api';
import { takePrefetchOrFetch } from './prefetch';
import { useWebSocket } from './useWebSocket';
@@ -14,6 +14,8 @@ import {
useConversationNavigation,
useRealtimeAppState,
useBrowserNotifications,
+ useFaviconBadge,
+ useUnreadTitle,
useRawPacketStatsSession,
} from './hooks';
import { AppShell } from './components/AppShell';
@@ -22,6 +24,7 @@ import { DistanceUnitProvider } from './contexts/DistanceUnitContext';
import { messageContainsMention } from './utils/messageParser';
import { getStateKey } from './utils/conversationState';
import type { Conversation, Message, RawPacket } from './types';
+import { CONTACT_TYPE_ROOM } from './types';
interface ChannelUnreadMarker {
channelId: string;
@@ -249,6 +252,21 @@ export function App() {
} = useConversationMessages(activeConversation, targetMessageId);
removeConversationMessagesRef.current = removeConversationMessages;
+ // Room servers replay stored history as a burst of DMs, all arriving with similar received_at
+ // but spanning a wide range of sender_timestamps. Sort by sender_timestamp for room contacts
+ // so the display reflects the original send order rather than our radio's receipt order.
+ const activeContactIsRoom =
+ activeConversation?.type === 'contact' &&
+ contacts.find((c) => c.public_key === activeConversation.id)?.type === CONTACT_TYPE_ROOM;
+ const sortedMessages = useMemo(() => {
+ if (!activeContactIsRoom || messages.length === 0) return messages;
+ return [...messages].sort((a, b) => {
+ const aTs = a.sender_timestamp ?? a.received_at;
+ const bTs = b.sender_timestamp ?? b.received_at;
+ return aTs !== bTs ? aTs - bTs : a.id - b.id;
+ });
+ }, [activeContactIsRoom, messages]);
+
const {
unreadCounts,
mentions,
@@ -259,6 +277,8 @@ export function App() {
markAllRead,
refreshUnreads,
} = useUnreadCounts(channels, contacts, activeConversation);
+ useFaviconBadge(unreadCounts, mentions, favorites);
+ useUnreadTitle(unreadCounts, favorites);
useEffect(() => {
if (activeConversation?.type !== 'channel') {
@@ -423,7 +443,7 @@ export function App() {
config,
health,
favorites,
- messages,
+ messages: sortedMessages,
messagesLoading,
loadingOlder,
hasOlderMessages,
@@ -502,9 +522,7 @@ export function App() {
onChannelCreate: handleCreateCrackedChannel,
};
const newMessageModalProps = {
- contacts,
undecryptedCount,
- onSelectConversation: handleSelectConversationWithTargetReset,
onCreateContact: handleCreateContact,
onCreateChannel: handleCreateChannel,
onCreateHashtagChannel: handleCreateHashtagChannel,
diff --git a/frontend/src/api.ts b/frontend/src/api.ts
index a2dbc4e..8c7a5f5 100644
--- a/frontend/src/api.ts
+++ b/frontend/src/api.ts
@@ -15,6 +15,7 @@ import type {
MessagesAroundResponse,
MigratePreferencesRequest,
MigratePreferencesResponse,
+ RawPacket,
RadioAdvertMode,
RadioConfig,
RadioConfigUpdate,
@@ -248,6 +249,7 @@ export const api = {
),
// Packets
+ getPacket: (packetId: number) => fetchJson(`/packets/${packetId}`),
getUndecryptedPacketCount: () => fetchJson<{ count: number }>('/packets/undecrypted/count'),
decryptHistoricalPackets: (params: {
key_type: 'channel' | 'contact';
@@ -347,6 +349,14 @@ export const api = {
fetchJson<{ deleted: boolean }>(`/fanout/${id}`, {
method: 'DELETE',
}),
+ disableBotsUntilRestart: () =>
+ fetchJson<{
+ status: string;
+ bots_disabled: boolean;
+ bots_disabled_source: 'env' | 'until_restart';
+ }>('/fanout/bots/disable-until-restart', {
+ method: 'POST',
+ }),
// Statistics
getStatistics: () => fetchJson('/statistics'),
diff --git a/frontend/src/components/AppShell.tsx b/frontend/src/components/AppShell.tsx
index 1da675c..e72ede6 100644
--- a/frontend/src/components/AppShell.tsx
+++ b/frontend/src/components/AppShell.tsx
@@ -1,4 +1,5 @@
import { lazy, Suspense, useRef, type ComponentProps } from 'react';
+import { useSwipeable } from 'react-swipeable';
import { StatusBar } from './StatusBar';
import { Sidebar } from './Sidebar';
@@ -6,6 +7,7 @@ import { ConversationPane } from './ConversationPane';
import { NewMessageModal } from './NewMessageModal';
import { ContactInfoPane } from './ContactInfoPane';
import { ChannelInfoPane } from './ChannelInfoPane';
+import { SecurityWarningModal } from './SecurityWarningModal';
import { Toaster } from './ui/sonner';
import { Sheet, SheetContent, SheetDescription, SheetHeader, SheetTitle } from './ui/sheet';
import {
@@ -88,6 +90,24 @@ export function AppShell({
contactInfoPaneProps,
channelInfoPaneProps,
}: AppShellProps) {
+ const swipeHandlers = useSwipeable({
+ onSwipedRight: ({ initial }) => {
+ if (initial[0] < 30 && !sidebarOpen && window.innerWidth < 768) {
+ onSidebarOpenChange(true);
+ }
+ },
+ trackTouch: true,
+ trackMouse: false,
+ preventScrollOnSwipe: true,
+ });
+
+ const closeSwipeHandlers = useSwipeable({
+ onSwipedLeft: () => onSidebarOpenChange(false),
+ trackTouch: true,
+ trackMouse: false,
+ preventScrollOnSwipe: false,
+ });
+
const searchMounted = useRef(false);
if (conversationPaneProps.activeConversation?.type === 'search') {
searchMounted.current = true;
@@ -152,7 +172,7 @@ export function AppShell({
);
return (
-
))}
@@ -604,16 +626,17 @@ export function CrackerPanel({
For unknown-keyed GroupText packets, this will attempt to dictionary attack, then brute
- force payloads as they arrive, testing room names up to the specified length to discover
- active rooms on the local mesh (GroupText packets may not be hashtag messages; we have no
+ force payloads as they arrive, testing channel names up to the specified length to discover
+ active channels on the local mesh (GroupText packets may not be hashtag messages; we have no
way of knowing but try as if they are).
Retry failed at n+1 will let the cracker return to the failed queue and
pick up messages it couldn't crack, attempting them at one longer length.
Try word pairs will also try every combination of two dictionary words
concatenated together (e.g. "hello" + "world" = "#helloworld") after the single-word
- dictionary pass; this can substantially increase search time.
- Decrypt historical will run an async job on any room name it finds to see
- if any historically captured packets will decrypt with that key.
+ dictionary pass; this can substantially increase search time and also result in
+ false-positives.
+ Decrypt historical will run an async job on any channel name it finds to
+ see if any historically captured packets will decrypt with that key.
Turbo mode will push your GPU to the max (target dispatch time of 10s) and
may allow accelerated cracking and/or system instability.
diff --git a/frontend/src/components/MessageList.tsx b/frontend/src/components/MessageList.tsx
index fedcbaf..06f46f8 100644
--- a/frontend/src/components/MessageList.tsx
+++ b/frontend/src/components/MessageList.tsx
@@ -8,19 +8,23 @@ import {
useState,
type ReactNode,
} from 'react';
-import type { Contact, Message, MessagePath, RadioConfig } from '../types';
+import type { Channel, Contact, Message, MessagePath, RadioConfig, RawPacket } from '../types';
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_ROOM } from '../types';
+import { api } from '../api';
import { formatTime, parseSenderFromText } from '../utils/messageParser';
import { formatHopCounts, type SenderInfo } from '../utils/pathUtils';
import { getDirectContactRoute } from '../utils/pathUtils';
import { ContactAvatar } from './ContactAvatar';
import { PathModal } from './PathModal';
+import { RawPacketInspectorDialog } from './RawPacketDetailModal';
+import { toast } from './ui/sonner';
import { handleKeyboardActivate } from '../utils/a11y';
import { cn } from '@/lib/utils';
interface MessageListProps {
messages: Message[];
contacts: Contact[];
+ channels?: Channel[];
loading: boolean;
loadingOlder?: boolean;
hasOlderMessages?: boolean;
@@ -153,6 +157,8 @@ function HopCountBadge({ paths, onClick, variant }: HopCountBadgeProps) {
const RESEND_WINDOW_SECONDS = 30;
const CORRUPT_SENDER_LABEL = '';
+const ANALYZE_PACKET_NOTICE =
+ 'This analyzer shows one stored full packet copy only. When multiple receives have identical payloads, the backend deduplicates them to a single stored packet and appends any additional receive paths onto the message path history instead of storing multiple full packet copies.';
function hasUnexpectedControlChars(text: string): boolean {
for (const char of text) {
@@ -173,6 +179,7 @@ function hasUnexpectedControlChars(text: string): boolean {
export function MessageList({
messages,
contacts,
+ channels = [],
loading,
loadingOlder = false,
hasOlderMessages = false,
@@ -199,10 +206,18 @@ export function MessageList({
paths: MessagePath[];
senderInfo: SenderInfo;
messageId?: number;
+ packetId?: number | null;
isOutgoingChan?: boolean;
} | null>(null);
const [resendableIds, setResendableIds] = useState>(new Set());
const resendTimersRef = useRef