mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-05-13 12:56:05 +02:00
Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8ee08ff44a | |||
| 6d9ea552bd | |||
| 2cd71bf086 | |||
| 08d55dec72 | |||
| 20532f70a3 | |||
| 659370e1eb | |||
| 7151cf3846 | |||
| 6e5256acce | |||
| 7d27567ae9 | |||
| 5f0d042252 | |||
| 6f68dfc609 | |||
| a32ddda79d | |||
| ac6a5774af | |||
| b12e612596 | |||
| d1499ad75f | |||
| 79d5e69ee0 | |||
| 1405df6039 | |||
| ac5e71d6f2 | |||
| 650a24a68c | |||
| 53f122e503 | |||
| efeb047116 | |||
| b7972f50a8 | |||
| bab1693c82 | |||
| f93844a01b | |||
| e15e6d83f7 | |||
| f9ca35b3ae | |||
| 7c4a244e05 | |||
| 6eab75ec7e | |||
| 95c874e643 |
@@ -1,3 +1,28 @@
|
|||||||
|
## [3.6.2] - 2026-03-29
|
||||||
|
|
||||||
|
Feature: Be more flexible about timing and volume of full contact offload
|
||||||
|
Feature: Improve room server and repeater ops to be much more clearer about auth status
|
||||||
|
Feature: Show last error status on integrations
|
||||||
|
Feature: Push multi-platform docker builds
|
||||||
|
Bugfix: Fix advert interval time unit display
|
||||||
|
Bugfix: Don't cast RSSI/SNR to string for community MQTT
|
||||||
|
Bugfix: Map uploader follows redirect
|
||||||
|
Misc: Thin out unnecessary cruft in unreads endpoint
|
||||||
|
Misc: Fall back gracefully if linked to an unknown contact
|
||||||
|
|
||||||
|
## [3.6.1] - 2026-03-26
|
||||||
|
|
||||||
|
Feature: MeshCore Map integration
|
||||||
|
Feature: Add warning screen about bots
|
||||||
|
Feature: Favicon reflects unread message state
|
||||||
|
Feature: Show hop map in larger modal
|
||||||
|
Feature: Add prebuilt frontend install script
|
||||||
|
Feature: Add clean service installer script
|
||||||
|
Feature: Swipe in to show menu
|
||||||
|
Bugfix: Invalid backend API path serves error, not fallback index
|
||||||
|
Bugfix: Fix some spacing/page height issues
|
||||||
|
Misc: Misc. bugfixes and performance and test improvements
|
||||||
|
|
||||||
## [3.6.0] - 2026-03-22
|
## [3.6.0] - 2026-03-22
|
||||||
|
|
||||||
Feature: Add incoming-packet analytics
|
Feature: Add incoming-packet analytics
|
||||||
|
|||||||
+33
@@ -1592,6 +1592,39 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
|
|||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
### react-swipeable (7.0.2) — MIT
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Full license text</summary>
|
||||||
|
|
||||||
|
```
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (C) 2014-2022 Josh Perez
|
||||||
|
Copyright (C) 2014-2022 Brian Emil Hartz
|
||||||
|
Copyright (C) 2022 Formidable Labs, Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
### sonner (2.0.7) — MIT
|
### sonner (2.0.7) — MIT
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
|
|||||||
@@ -117,6 +117,8 @@ Alternatively, if you have already cloned the repo, you can fetch just the prebu
|
|||||||
|
|
||||||
> **Warning:** Docker has had reports intermittent issues with serial event subscriptions. The native method above is more reliable.
|
> **Warning:** Docker has had reports intermittent issues with serial event subscriptions. The native method above is more reliable.
|
||||||
|
|
||||||
|
Local Docker builds are architecture-native by default. On Apple Silicon Macs and ARM64 Linux hosts such as Raspberry Pi, `docker compose build` / `docker compose up --build` will produce an ARM64 image unless you override the platform.
|
||||||
|
|
||||||
Edit `docker-compose.yaml` to set a serial device for passthrough, or uncomment your transport (serial or TCP). Then:
|
Edit `docker-compose.yaml` to set a serial device for passthrough, or uncomment your transport (serial or TCP). Then:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -148,6 +150,15 @@ docker compose pull
|
|||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Published Docker tags are intended to be multi-arch (`linux/amd64` and `linux/arm64`). If you are building and publishing manually, use Docker Buildx:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker buildx build \
|
||||||
|
--platform linux/amd64,linux/arm64 \
|
||||||
|
-t jkingsman/remoteterm-meshcore:latest \
|
||||||
|
--push .
|
||||||
|
```
|
||||||
|
|
||||||
The container runs as root by default for maximum serial passthrough compatibility across host setups. On Linux, if you switch between native and Docker runs, `./data` can end up root-owned. If you do not need that serial compatibility behavior, you can enable the optional `user: "${UID:-1000}:${GID:-1000}"` line in `docker-compose.yaml` to keep ownership aligned with your host user.
|
The container runs as root by default for maximum serial passthrough compatibility across host setups. On Linux, if you switch between native and Docker runs, `./data` can end up root-owned. If you do not need that serial compatibility behavior, you can enable the optional `user: "${UID:-1000}:${GID:-1000}"` line in `docker-compose.yaml` to keep ownership aligned with your host user.
|
||||||
|
|
||||||
To stop:
|
To stop:
|
||||||
|
|||||||
@@ -89,6 +89,19 @@ Amazon SQS delivery. Config blob:
|
|||||||
- Publishes a JSON envelope of the form `{"event_type":"message"|"raw_packet","data":...}`
|
- Publishes a JSON envelope of the form `{"event_type":"message"|"raw_packet","data":...}`
|
||||||
- Supports both decoded messages and raw packets via normal scope selection
|
- Supports both decoded messages and raw packets via normal scope selection
|
||||||
|
|
||||||
|
### map_upload (map_upload.py)
|
||||||
|
Uploads heard repeater and room-server advertisements to map.meshcore.dev. Config blob:
|
||||||
|
- `api_url` (optional, default `""`) — upload endpoint; empty falls back to the public map.meshcore.dev API
|
||||||
|
- `dry_run` (bool, default `true`) — when true, logs the payload at INFO level without sending
|
||||||
|
- `geofence_enabled` (bool, default `false`) — when true, only uploads nodes within `geofence_radius_km` of the radio's own configured lat/lon
|
||||||
|
- `geofence_radius_km` (float, default `0`) — filter radius in kilometres
|
||||||
|
|
||||||
|
Geofence notes:
|
||||||
|
- The reference center is always the radio's own `adv_lat`/`adv_lon` from `radio_runtime.meshcore.self_info`, read **live at upload time** — no lat/lon is stored in the fanout config itself.
|
||||||
|
- If the radio's lat/lon is `(0, 0)` or the radio is not connected, the geofence check is silently skipped so uploads continue normally until coordinates are configured.
|
||||||
|
- Requires the radio to have `ENABLE_PRIVATE_KEY_EXPORT=1` firmware to sign uploads.
|
||||||
|
- Scope is always `{"messages": "none", "raw_packets": "all"}` — only raw RF packets are processed.
|
||||||
|
|
||||||
## Adding a New Integration Type
|
## Adding a New Integration Type
|
||||||
|
|
||||||
### Step-by-step checklist
|
### Step-by-step checklist
|
||||||
@@ -291,6 +304,7 @@ Migrations:
|
|||||||
- `app/fanout/webhook.py` — Webhook fanout module
|
- `app/fanout/webhook.py` — Webhook fanout module
|
||||||
- `app/fanout/apprise_mod.py` — Apprise fanout module
|
- `app/fanout/apprise_mod.py` — Apprise fanout module
|
||||||
- `app/fanout/sqs.py` — Amazon SQS fanout module
|
- `app/fanout/sqs.py` — Amazon SQS fanout module
|
||||||
|
- `app/fanout/map_upload.py` — Map Upload fanout module
|
||||||
- `app/repository/fanout.py` — Database CRUD
|
- `app/repository/fanout.py` — Database CRUD
|
||||||
- `app/routers/fanout.py` — REST API
|
- `app/routers/fanout.py` — REST API
|
||||||
- `app/websocket.py` — `broadcast_event()` dispatches to fanout
|
- `app/websocket.py` — `broadcast_event()` dispatches to fanout
|
||||||
|
|||||||
@@ -95,7 +95,6 @@ class AppriseModule(FanoutModule):
|
|||||||
|
|
||||||
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
||||||
super().__init__(config_id, config, name=name)
|
super().__init__(config_id, config, name=name)
|
||||||
self._last_error: str | None = None
|
|
||||||
|
|
||||||
async def on_message(self, data: dict) -> None:
|
async def on_message(self, data: dict) -> None:
|
||||||
# Skip outgoing messages — only notify on incoming
|
# Skip outgoing messages — only notify on incoming
|
||||||
@@ -114,17 +113,17 @@ class AppriseModule(FanoutModule):
|
|||||||
success = await asyncio.to_thread(
|
success = await asyncio.to_thread(
|
||||||
_send_sync, urls, body, preserve_identity=preserve_identity
|
_send_sync, urls, body, preserve_identity=preserve_identity
|
||||||
)
|
)
|
||||||
self._last_error = None if success else "Apprise notify returned failure"
|
self._set_last_error(None if success else "Apprise notify returned failure")
|
||||||
if not success:
|
if not success:
|
||||||
logger.warning("Apprise notification failed for module %s", self.config_id)
|
logger.warning("Apprise notification failed for module %s", self.config_id)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self._last_error = str(exc)
|
self._set_last_error(str(exc))
|
||||||
logger.exception("Apprise send error for module %s", self.config_id)
|
logger.exception("Apprise send error for module %s", self.config_id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
if not self.config.get("urls", "").strip():
|
if not self.config.get("urls", "").strip():
|
||||||
return "disconnected"
|
return "disconnected"
|
||||||
if self._last_error:
|
if self.last_error:
|
||||||
return "error"
|
return "error"
|
||||||
return "connected"
|
return "connected"
|
||||||
|
|||||||
@@ -3,6 +3,14 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
def _broadcast_fanout_health() -> None:
|
||||||
|
"""Push updated fanout status to connected frontend clients."""
|
||||||
|
from app.services.radio_runtime import radio_runtime as radio_manager
|
||||||
|
from app.websocket import broadcast_health
|
||||||
|
|
||||||
|
broadcast_health(radio_manager.is_connected, radio_manager.connection_info)
|
||||||
|
|
||||||
|
|
||||||
class FanoutModule:
|
class FanoutModule:
|
||||||
"""Base class for all fanout integrations.
|
"""Base class for all fanout integrations.
|
||||||
|
|
||||||
@@ -16,6 +24,7 @@ class FanoutModule:
|
|||||||
self.config_id = config_id
|
self.config_id = config_id
|
||||||
self.config = config
|
self.config = config
|
||||||
self.name = name
|
self.name = name
|
||||||
|
self._last_error: str | None = None
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
"""Start the module (e.g. connect to broker). Override for persistent connections."""
|
"""Start the module (e.g. connect to broker). Override for persistent connections."""
|
||||||
@@ -34,6 +43,18 @@ class FanoutModule:
|
|||||||
"""Return 'connected', 'disconnected', or 'error'."""
|
"""Return 'connected', 'disconnected', or 'error'."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_error(self) -> str | None:
|
||||||
|
"""Return the most recent retained operator-facing error, if any."""
|
||||||
|
return self._last_error
|
||||||
|
|
||||||
|
def _set_last_error(self, value: str | None) -> None:
|
||||||
|
"""Update the retained error and broadcast health when it changes."""
|
||||||
|
if self._last_error == value:
|
||||||
|
return
|
||||||
|
self._last_error = value
|
||||||
|
_broadcast_fanout_health()
|
||||||
|
|
||||||
|
|
||||||
def get_fanout_message_text(data: dict) -> str:
|
def get_fanout_message_text(data: dict) -> str:
|
||||||
"""Return the best human-readable message body for fanout consumers.
|
"""Return the best human-readable message body for fanout consumers.
|
||||||
|
|||||||
@@ -20,9 +20,9 @@ from datetime import datetime
|
|||||||
from typing import Any, Protocol
|
from typing import Any, Protocol
|
||||||
|
|
||||||
import aiomqtt
|
import aiomqtt
|
||||||
import nacl.bindings
|
|
||||||
|
|
||||||
from app.fanout.mqtt_base import BaseMqttPublisher
|
from app.fanout.mqtt_base import BaseMqttPublisher
|
||||||
|
from app.keystore import ed25519_sign_expanded
|
||||||
from app.path_utils import parse_packet_envelope, split_path_hex
|
from app.path_utils import parse_packet_envelope, split_path_hex
|
||||||
from app.version_info import get_app_build_info
|
from app.version_info import get_app_build_info
|
||||||
|
|
||||||
@@ -40,9 +40,6 @@ _TOKEN_RENEWAL_THRESHOLD = _TOKEN_LIFETIME - 3600 # 23 hours
|
|||||||
_STATS_REFRESH_INTERVAL = 300 # 5 minutes
|
_STATS_REFRESH_INTERVAL = 300 # 5 minutes
|
||||||
_STATS_MIN_CACHE_SECS = 60 # Don't re-fetch stats within 60s
|
_STATS_MIN_CACHE_SECS = 60 # Don't re-fetch stats within 60s
|
||||||
|
|
||||||
# Ed25519 group order
|
|
||||||
_L = 2**252 + 27742317777372353535851937790883648493
|
|
||||||
|
|
||||||
# Route type mapping: bottom 2 bits of first byte
|
# Route type mapping: bottom 2 bits of first byte
|
||||||
_ROUTE_MAP = {0: "F", 1: "F", 2: "D", 3: "T"}
|
_ROUTE_MAP = {0: "F", 1: "F", 2: "D", 3: "T"}
|
||||||
|
|
||||||
@@ -69,28 +66,6 @@ def _base64url_encode(data: bytes) -> str:
|
|||||||
return base64.urlsafe_b64encode(data).rstrip(b"=").decode("ascii")
|
return base64.urlsafe_b64encode(data).rstrip(b"=").decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
def _ed25519_sign_expanded(
|
|
||||||
message: bytes, scalar: bytes, prefix: bytes, public_key: bytes
|
|
||||||
) -> bytes:
|
|
||||||
"""Sign a message using MeshCore's expanded Ed25519 key format.
|
|
||||||
|
|
||||||
MeshCore stores 64-byte "orlp" format keys: scalar(32) || prefix(32).
|
|
||||||
Standard Ed25519 libraries expect seed format and would re-SHA-512 the key.
|
|
||||||
This performs the signing manually using the already-expanded key material.
|
|
||||||
|
|
||||||
Port of meshcore-packet-capture's ed25519_sign_with_expanded_key().
|
|
||||||
"""
|
|
||||||
# r = SHA-512(prefix || message) mod L
|
|
||||||
r = int.from_bytes(hashlib.sha512(prefix + message).digest(), "little") % _L
|
|
||||||
# R = r * B (base point multiplication)
|
|
||||||
R = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(r.to_bytes(32, "little"))
|
|
||||||
# k = SHA-512(R || public_key || message) mod L
|
|
||||||
k = int.from_bytes(hashlib.sha512(R + public_key + message).digest(), "little") % _L
|
|
||||||
# s = (r + k * scalar) mod L
|
|
||||||
s = (r + k * int.from_bytes(scalar, "little")) % _L
|
|
||||||
return R + s.to_bytes(32, "little")
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_jwt_token(
|
def _generate_jwt_token(
|
||||||
private_key: bytes,
|
private_key: bytes,
|
||||||
public_key: bytes,
|
public_key: bytes,
|
||||||
@@ -127,7 +102,7 @@ def _generate_jwt_token(
|
|||||||
|
|
||||||
scalar = private_key[:32]
|
scalar = private_key[:32]
|
||||||
prefix = private_key[32:]
|
prefix = private_key[32:]
|
||||||
signature = _ed25519_sign_expanded(signing_input, scalar, prefix, public_key)
|
signature = ed25519_sign_expanded(signing_input, scalar, prefix, public_key)
|
||||||
|
|
||||||
return f"{header_b64}.{payload_b64}.{signature.hex()}"
|
return f"{header_b64}.{payload_b64}.{signature.hex()}"
|
||||||
|
|
||||||
@@ -200,11 +175,12 @@ def _format_raw_packet(data: dict[str, Any], device_name: str, public_key_hex: s
|
|||||||
current_time = datetime.now()
|
current_time = datetime.now()
|
||||||
ts_str = current_time.isoformat()
|
ts_str = current_time.isoformat()
|
||||||
|
|
||||||
# SNR/RSSI are always strings in reference output.
|
# Keep numeric telemetry numeric so downstream analyzers can ingest it.
|
||||||
|
# Preserve the existing "Unknown" fallback for missing values.
|
||||||
snr_val = data.get("snr")
|
snr_val = data.get("snr")
|
||||||
rssi_val = data.get("rssi")
|
rssi_val = data.get("rssi")
|
||||||
snr = str(snr_val) if snr_val is not None else "Unknown"
|
snr: float | str = float(snr_val) if snr_val is not None else "Unknown"
|
||||||
rssi = str(rssi_val) if rssi_val is not None else "Unknown"
|
rssi: int | str = int(rssi_val) if rssi_val is not None else "Unknown"
|
||||||
|
|
||||||
packet_hash = _calculate_packet_hash(raw_bytes)
|
packet_hash = _calculate_packet_hash(raw_bytes)
|
||||||
|
|
||||||
|
|||||||
+65
-6
@@ -15,12 +15,21 @@ _DISPATCH_TIMEOUT_SECONDS = 30.0
|
|||||||
_MODULE_TYPES: dict[str, type] = {}
|
_MODULE_TYPES: dict[str, type] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _format_error_detail(exc: Exception) -> str:
|
||||||
|
"""Return a short operator-facing error string."""
|
||||||
|
message = str(exc).strip()
|
||||||
|
if message:
|
||||||
|
return f"{type(exc).__name__}: {message}"
|
||||||
|
return type(exc).__name__
|
||||||
|
|
||||||
|
|
||||||
def _register_module_types() -> None:
|
def _register_module_types() -> None:
|
||||||
"""Lazily populate the type registry to avoid circular imports."""
|
"""Lazily populate the type registry to avoid circular imports."""
|
||||||
if _MODULE_TYPES:
|
if _MODULE_TYPES:
|
||||||
return
|
return
|
||||||
from app.fanout.apprise_mod import AppriseModule
|
from app.fanout.apprise_mod import AppriseModule
|
||||||
from app.fanout.bot import BotModule
|
from app.fanout.bot import BotModule
|
||||||
|
from app.fanout.map_upload import MapUploadModule
|
||||||
from app.fanout.mqtt_community import MqttCommunityModule
|
from app.fanout.mqtt_community import MqttCommunityModule
|
||||||
from app.fanout.mqtt_private import MqttPrivateModule
|
from app.fanout.mqtt_private import MqttPrivateModule
|
||||||
from app.fanout.sqs import SqsModule
|
from app.fanout.sqs import SqsModule
|
||||||
@@ -32,6 +41,7 @@ def _register_module_types() -> None:
|
|||||||
_MODULE_TYPES["webhook"] = WebhookModule
|
_MODULE_TYPES["webhook"] = WebhookModule
|
||||||
_MODULE_TYPES["apprise"] = AppriseModule
|
_MODULE_TYPES["apprise"] = AppriseModule
|
||||||
_MODULE_TYPES["sqs"] = SqsModule
|
_MODULE_TYPES["sqs"] = SqsModule
|
||||||
|
_MODULE_TYPES["map_upload"] = MapUploadModule
|
||||||
|
|
||||||
|
|
||||||
def _matches_filter(filter_value: Any, key: str) -> bool:
|
def _matches_filter(filter_value: Any, key: str) -> bool:
|
||||||
@@ -83,6 +93,23 @@ class FanoutManager:
|
|||||||
self._modules: dict[str, tuple[FanoutModule, dict]] = {} # id -> (module, scope)
|
self._modules: dict[str, tuple[FanoutModule, dict]] = {} # id -> (module, scope)
|
||||||
self._restart_locks: dict[str, asyncio.Lock] = {}
|
self._restart_locks: dict[str, asyncio.Lock] = {}
|
||||||
self._bots_disabled_until_restart = False
|
self._bots_disabled_until_restart = False
|
||||||
|
self._module_errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
def _broadcast_health_update(self) -> None:
|
||||||
|
from app.services.radio_runtime import radio_runtime as radio_manager
|
||||||
|
from app.websocket import broadcast_health
|
||||||
|
|
||||||
|
broadcast_health(radio_manager.is_connected, radio_manager.connection_info)
|
||||||
|
|
||||||
|
def _set_module_error(self, config_id: str, error: str) -> None:
|
||||||
|
if self._module_errors.get(config_id) == error:
|
||||||
|
return
|
||||||
|
self._module_errors[config_id] = error
|
||||||
|
self._broadcast_health_update()
|
||||||
|
|
||||||
|
def _clear_module_error(self, config_id: str) -> None:
|
||||||
|
if self._module_errors.pop(config_id, None) is not None:
|
||||||
|
self._broadcast_health_update()
|
||||||
|
|
||||||
def get_bots_disabled_source(self) -> str | None:
|
def get_bots_disabled_source(self) -> str | None:
|
||||||
"""Return why bot modules are unavailable, if at all."""
|
"""Return why bot modules are unavailable, if at all."""
|
||||||
@@ -132,11 +159,13 @@ class FanoutManager:
|
|||||||
module = cls(config_id, config_blob, name=cfg.get("name", ""))
|
module = cls(config_id, config_blob, name=cfg.get("name", ""))
|
||||||
await module.start()
|
await module.start()
|
||||||
self._modules[config_id] = (module, scope)
|
self._modules[config_id] = (module, scope)
|
||||||
|
self._clear_module_error(config_id)
|
||||||
logger.info(
|
logger.info(
|
||||||
"Started fanout module %s (type=%s)", cfg.get("name", config_id), config_type
|
"Started fanout module %s (type=%s)", cfg.get("name", config_id), config_type
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception as exc:
|
||||||
logger.exception("Failed to start fanout module %s", config_id)
|
logger.exception("Failed to start fanout module %s", config_id)
|
||||||
|
self._set_module_error(config_id, _format_error_detail(exc))
|
||||||
|
|
||||||
async def reload_config(self, config_id: str) -> None:
|
async def reload_config(self, config_id: str) -> None:
|
||||||
"""Stop old module (if any) and start updated config."""
|
"""Stop old module (if any) and start updated config."""
|
||||||
@@ -160,6 +189,7 @@ class FanoutManager:
|
|||||||
await module.stop()
|
await module.stop()
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Error stopping fanout module %s", config_id)
|
logger.exception("Error stopping fanout module %s", config_id)
|
||||||
|
self._clear_module_error(config_id)
|
||||||
|
|
||||||
async def _dispatch_matching(
|
async def _dispatch_matching(
|
||||||
self,
|
self,
|
||||||
@@ -189,7 +219,10 @@ class FanoutManager:
|
|||||||
try:
|
try:
|
||||||
handler = getattr(module, handler_name)
|
handler = getattr(module, handler_name)
|
||||||
await asyncio.wait_for(handler(data), timeout=_DISPATCH_TIMEOUT_SECONDS)
|
await asyncio.wait_for(handler(data), timeout=_DISPATCH_TIMEOUT_SECONDS)
|
||||||
|
self._clear_module_error(config_id)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
|
timeout_error = f"{handler_name} timed out after {_DISPATCH_TIMEOUT_SECONDS:.1f}s"
|
||||||
|
self._set_module_error(config_id, timeout_error)
|
||||||
logger.error(
|
logger.error(
|
||||||
"Fanout %s %s timed out after %.1fs; restarting module",
|
"Fanout %s %s timed out after %.1fs; restarting module",
|
||||||
config_id,
|
config_id,
|
||||||
@@ -197,7 +230,8 @@ class FanoutManager:
|
|||||||
_DISPATCH_TIMEOUT_SECONDS,
|
_DISPATCH_TIMEOUT_SECONDS,
|
||||||
)
|
)
|
||||||
await self._restart_module(config_id, module)
|
await self._restart_module(config_id, module)
|
||||||
except Exception:
|
except Exception as exc:
|
||||||
|
self._set_module_error(config_id, _format_error_detail(exc))
|
||||||
logger.exception("Fanout %s %s error", config_id, log_label)
|
logger.exception("Fanout %s %s error", config_id, log_label)
|
||||||
|
|
||||||
async def _restart_module(self, config_id: str, module: FanoutModule) -> None:
|
async def _restart_module(self, config_id: str, module: FanoutModule) -> None:
|
||||||
@@ -213,6 +247,10 @@ class FanoutManager:
|
|||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Failed to restart timed-out fanout module %s", config_id)
|
logger.exception("Failed to restart timed-out fanout module %s", config_id)
|
||||||
self._modules.pop(config_id, None)
|
self._modules.pop(config_id, None)
|
||||||
|
self._set_module_error(
|
||||||
|
config_id,
|
||||||
|
"Module restart failed after timeout",
|
||||||
|
)
|
||||||
|
|
||||||
async def broadcast_message(self, data: dict) -> None:
|
async def broadcast_message(self, data: dict) -> None:
|
||||||
"""Dispatch a decoded message to modules whose scope matches."""
|
"""Dispatch a decoded message to modules whose scope matches."""
|
||||||
@@ -241,18 +279,39 @@ class FanoutManager:
|
|||||||
logger.exception("Error stopping fanout module %s", config_id)
|
logger.exception("Error stopping fanout module %s", config_id)
|
||||||
self._modules.clear()
|
self._modules.clear()
|
||||||
self._restart_locks.clear()
|
self._restart_locks.clear()
|
||||||
|
self._module_errors.clear()
|
||||||
|
|
||||||
def get_statuses(self) -> dict[str, dict[str, str]]:
|
def get_statuses(self) -> dict[str, dict[str, str | None]]:
|
||||||
"""Return status info for each active module."""
|
"""Return status info for each active module."""
|
||||||
from app.repository.fanout import _configs_cache
|
from app.repository.fanout import _configs_cache
|
||||||
|
|
||||||
result: dict[str, dict[str, str]] = {}
|
result: dict[str, dict[str, str | None]] = {}
|
||||||
for config_id, (module, _) in self._modules.items():
|
all_ids = set(_configs_cache) | set(self._modules) | set(self._module_errors)
|
||||||
|
for config_id in all_ids:
|
||||||
info = _configs_cache.get(config_id, {})
|
info = _configs_cache.get(config_id, {})
|
||||||
|
if info.get("enabled") is False:
|
||||||
|
continue
|
||||||
|
|
||||||
|
module_entry = self._modules.get(config_id)
|
||||||
|
module = module_entry[0] if module_entry is not None else None
|
||||||
|
last_error = module.last_error if module is not None else None
|
||||||
|
status = module.status if module is not None else "error"
|
||||||
|
|
||||||
|
manager_error = self._module_errors.get(config_id)
|
||||||
|
if manager_error is not None:
|
||||||
|
status = "error"
|
||||||
|
last_error = manager_error
|
||||||
|
elif last_error is not None and status != "error":
|
||||||
|
status = "error"
|
||||||
|
|
||||||
|
if module is None and last_error is None:
|
||||||
|
continue
|
||||||
|
|
||||||
result[config_id] = {
|
result[config_id] = {
|
||||||
"name": info.get("name", config_id),
|
"name": info.get("name", config_id),
|
||||||
"type": info.get("type", "unknown"),
|
"type": info.get("type", "unknown"),
|
||||||
"status": module.status,
|
"status": status,
|
||||||
|
"last_error": last_error,
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,322 @@
|
|||||||
|
"""Fanout module for uploading heard advert packets to map.meshcore.dev.
|
||||||
|
|
||||||
|
Mirrors the logic of the standalone map.meshcore.dev-uploader project:
|
||||||
|
- Listens on raw RF packets via on_raw
|
||||||
|
- Filters for ADVERT packets, only processes repeaters (role 2) and rooms (role 3)
|
||||||
|
- Skips nodes with no valid location (lat/lon None)
|
||||||
|
- Applies per-pubkey rate-limiting (1-hour window, matching the uploader)
|
||||||
|
- Signs the upload request with the radio's own Ed25519 private key
|
||||||
|
- POSTs to the map API (or logs in dry-run mode)
|
||||||
|
|
||||||
|
Dry-run mode (default: True) logs the full would-be payload at INFO level
|
||||||
|
without making any HTTP requests. Disable it only after verifying the log
|
||||||
|
output looks correct — in particular the radio params (freq/bw/sf/cr) and
|
||||||
|
the raw hex link.
|
||||||
|
|
||||||
|
Config keys
|
||||||
|
-----------
|
||||||
|
api_url : str, default ""
|
||||||
|
Upload endpoint. Empty string falls back to the public map.meshcore.dev API.
|
||||||
|
dry_run : bool, default True
|
||||||
|
When True, log the payload at INFO level instead of sending it.
|
||||||
|
geofence_enabled : bool, default False
|
||||||
|
When True, only upload nodes whose location falls within geofence_radius_km of
|
||||||
|
the radio's own configured latitude/longitude (read live from the radio at upload
|
||||||
|
time — no lat/lon is stored in this config). When the radio's lat/lon is not set
|
||||||
|
(0, 0) or unavailable, the geofence check is silently skipped so uploads continue
|
||||||
|
normally until coordinates are configured.
|
||||||
|
geofence_radius_km : float, default 0.0
|
||||||
|
Radius of the geofence in kilometres. Nodes further than this distance
|
||||||
|
from the radio's own position are skipped.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import math
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from app.decoder import parse_advertisement, parse_packet
|
||||||
|
from app.fanout.base import FanoutModule
|
||||||
|
from app.keystore import ed25519_sign_expanded, get_private_key, get_public_key
|
||||||
|
from app.services.radio_runtime import radio_runtime
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_DEFAULT_API_URL = "https://map.meshcore.dev/api/v1/uploader/node"
|
||||||
|
|
||||||
|
# Re-upload guard: skip re-uploading a pubkey seen within this window (AU parity)
|
||||||
|
_REUPLOAD_SECONDS = 3600
|
||||||
|
|
||||||
|
# Only upload repeaters (2) and rooms (3). Any other role — including future
|
||||||
|
# roles not yet defined — is rejected. An allowlist is used rather than a
|
||||||
|
# blocklist so that new roles cannot accidentally start populating the map.
|
||||||
|
_ALLOWED_DEVICE_ROLES = {2, 3}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_radio_params() -> dict:
|
||||||
|
"""Read radio frequency parameters from the connected radio's self_info.
|
||||||
|
|
||||||
|
The Python meshcore library returns radio_freq in MHz (e.g. 910.525) and
|
||||||
|
radio_bw in kHz (e.g. 62.5). These are exactly the units the map API
|
||||||
|
expects, matching what the JS reference uploader produces after its own
|
||||||
|
/1000 division on raw integer values. No further scaling is applied here.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
mc = radio_runtime.meshcore
|
||||||
|
if not mc:
|
||||||
|
return {"freq": 0, "cr": 0, "sf": 0, "bw": 0}
|
||||||
|
info = mc.self_info
|
||||||
|
if not isinstance(info, dict):
|
||||||
|
return {"freq": 0, "cr": 0, "sf": 0, "bw": 0}
|
||||||
|
freq = info.get("radio_freq", 0) or 0
|
||||||
|
bw = info.get("radio_bw", 0) or 0
|
||||||
|
sf = info.get("radio_sf", 0) or 0
|
||||||
|
cr = info.get("radio_cr", 0) or 0
|
||||||
|
return {
|
||||||
|
"freq": freq,
|
||||||
|
"cr": cr,
|
||||||
|
"sf": sf,
|
||||||
|
"bw": bw,
|
||||||
|
}
|
||||||
|
except Exception as exc:
|
||||||
|
logger.debug("MapUpload: could not read radio params: %s", exc)
|
||||||
|
return {"freq": 0, "cr": 0, "sf": 0, "bw": 0}
|
||||||
|
|
||||||
|
|
||||||
|
_ROLE_NAMES: dict[int, str] = {2: "repeater", 3: "room"}
|
||||||
|
|
||||||
|
|
||||||
|
def _haversine_km(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||||
|
"""Return the great-circle distance in kilometres between two lat/lon points."""
|
||||||
|
r = 6371.0
|
||||||
|
phi1, phi2 = math.radians(lat1), math.radians(lat2)
|
||||||
|
dphi = math.radians(lat2 - lat1)
|
||||||
|
dlam = math.radians(lon2 - lon1)
|
||||||
|
a = math.sin(dphi / 2) ** 2 + math.cos(phi1) * math.cos(phi2) * math.sin(dlam / 2) ** 2
|
||||||
|
return 2 * r * math.asin(math.sqrt(a))
|
||||||
|
|
||||||
|
|
||||||
|
class MapUploadModule(FanoutModule):
|
||||||
|
"""Uploads heard ADVERT packets to the MeshCore community map."""
|
||||||
|
|
||||||
|
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
||||||
|
super().__init__(config_id, config, name=name)
|
||||||
|
self._client: httpx.AsyncClient | None = None
|
||||||
|
# Per-pubkey rate limiting: pubkey_hex -> last_uploaded_advert_timestamp
|
||||||
|
self._seen: dict[str, int] = {}
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
self._client = httpx.AsyncClient(
|
||||||
|
timeout=httpx.Timeout(15.0),
|
||||||
|
follow_redirects=True,
|
||||||
|
)
|
||||||
|
self._last_error = None
|
||||||
|
self._seen.clear()
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
if self._client:
|
||||||
|
await self._client.aclose()
|
||||||
|
self._client = None
|
||||||
|
self._last_error = None
|
||||||
|
|
||||||
|
async def on_raw(self, data: dict) -> None:
|
||||||
|
if data.get("payload_type") != "ADVERT":
|
||||||
|
return
|
||||||
|
|
||||||
|
raw_hex = data.get("data", "")
|
||||||
|
if not raw_hex:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_bytes = bytes.fromhex(raw_hex)
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
|
|
||||||
|
packet_info = parse_packet(raw_bytes)
|
||||||
|
if packet_info is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
advert = parse_advertisement(packet_info.payload, raw_packet=raw_bytes)
|
||||||
|
if advert is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: advert Ed25519 signature verification is skipped here.
|
||||||
|
# The radio has already validated the packet before passing it to RT,
|
||||||
|
# so re-verification is redundant in practice. If added, verify that
|
||||||
|
# nacl.bindings.crypto_sign_open(sig + (pubkey_bytes || timestamp_bytes),
|
||||||
|
# advert.public_key_bytes) succeeds before proceeding.
|
||||||
|
|
||||||
|
# Only process repeaters (2) and rooms (3) — any other role is rejected
|
||||||
|
if advert.device_role not in _ALLOWED_DEVICE_ROLES:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip nodes with no valid location — the decoder already nulls out
|
||||||
|
# impossible values, so None means either no location flag or bad coords.
|
||||||
|
if advert.lat is None or advert.lon is None:
|
||||||
|
logger.debug(
|
||||||
|
"MapUpload: skipping %s — no valid location",
|
||||||
|
advert.public_key[:12],
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
pubkey = advert.public_key.lower()
|
||||||
|
|
||||||
|
# Rate-limit: skip if this pubkey's timestamp hasn't advanced enough
|
||||||
|
last_seen = self._seen.get(pubkey)
|
||||||
|
if last_seen is not None:
|
||||||
|
if last_seen >= advert.timestamp:
|
||||||
|
logger.debug(
|
||||||
|
"MapUpload: skipping %s — possible replay (last=%d, advert=%d)",
|
||||||
|
pubkey[:12],
|
||||||
|
last_seen,
|
||||||
|
advert.timestamp,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
if advert.timestamp < last_seen + _REUPLOAD_SECONDS:
|
||||||
|
logger.debug(
|
||||||
|
"MapUpload: skipping %s — within 1-hr rate-limit window (delta=%ds)",
|
||||||
|
pubkey[:12],
|
||||||
|
advert.timestamp - last_seen,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._upload(
|
||||||
|
pubkey, advert.timestamp, advert.device_role, raw_hex, advert.lat, advert.lon
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _upload(
|
||||||
|
self,
|
||||||
|
pubkey: str,
|
||||||
|
advert_timestamp: int,
|
||||||
|
device_role: int,
|
||||||
|
raw_hex: str,
|
||||||
|
lat: float,
|
||||||
|
lon: float,
|
||||||
|
) -> None:
|
||||||
|
# Geofence check: if enabled, skip nodes outside the configured radius.
|
||||||
|
# The reference center is the radio's own lat/lon read live from self_info —
|
||||||
|
# no coordinates are stored in the fanout config. If the radio lat/lon is
|
||||||
|
# (0, 0) or unavailable the check is skipped transparently so uploads
|
||||||
|
# continue normally until the operator sets coordinates in radio settings.
|
||||||
|
geofence_dist_km: float | None = None
|
||||||
|
if self.config.get("geofence_enabled"):
|
||||||
|
try:
|
||||||
|
mc = radio_runtime.meshcore
|
||||||
|
sinfo = mc.self_info if mc else None
|
||||||
|
fence_lat = float((sinfo or {}).get("adv_lat", 0) or 0)
|
||||||
|
fence_lon = float((sinfo or {}).get("adv_lon", 0) or 0)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.debug("MapUpload: could not read radio lat/lon for geofence: %s", exc)
|
||||||
|
fence_lat = 0.0
|
||||||
|
fence_lon = 0.0
|
||||||
|
|
||||||
|
if fence_lat == 0.0 and fence_lon == 0.0:
|
||||||
|
logger.debug(
|
||||||
|
"MapUpload: geofence skipped for %s — radio lat/lon not configured",
|
||||||
|
pubkey[:12],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
fence_radius_km = float(self.config.get("geofence_radius_km", 0) or 0)
|
||||||
|
geofence_dist_km = _haversine_km(fence_lat, fence_lon, lat, lon)
|
||||||
|
if geofence_dist_km > fence_radius_km:
|
||||||
|
logger.debug(
|
||||||
|
"MapUpload: skipping %s — outside geofence (%.2f km > %.2f km)",
|
||||||
|
pubkey[:12],
|
||||||
|
geofence_dist_km,
|
||||||
|
fence_radius_km,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
private_key = get_private_key()
|
||||||
|
public_key = get_public_key()
|
||||||
|
|
||||||
|
if private_key is None or public_key is None:
|
||||||
|
logger.warning(
|
||||||
|
"MapUpload: private key not available — cannot sign upload for %s. "
|
||||||
|
"Ensure radio firmware has ENABLE_PRIVATE_KEY_EXPORT=1.",
|
||||||
|
pubkey[:12],
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
api_url = str(self.config.get("api_url", "") or _DEFAULT_API_URL).strip()
|
||||||
|
dry_run = bool(self.config.get("dry_run", True))
|
||||||
|
role_name = _ROLE_NAMES.get(device_role, f"role={device_role}")
|
||||||
|
|
||||||
|
params = _get_radio_params()
|
||||||
|
upload_data = {
|
||||||
|
"params": params,
|
||||||
|
"links": [f"meshcore://{raw_hex}"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sign: SHA-256 the compact JSON, then Ed25519-sign the hash
|
||||||
|
json_str = json.dumps(upload_data, separators=(",", ":"))
|
||||||
|
data_hash = hashlib.sha256(json_str.encode()).digest()
|
||||||
|
scalar = private_key[:32]
|
||||||
|
prefix_bytes = private_key[32:]
|
||||||
|
signature = ed25519_sign_expanded(data_hash, scalar, prefix_bytes, public_key)
|
||||||
|
|
||||||
|
request_payload = {
|
||||||
|
"data": json_str,
|
||||||
|
"signature": signature.hex(),
|
||||||
|
"publicKey": public_key.hex(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
geofence_note = (
|
||||||
|
f" | geofence: {geofence_dist_km:.2f} km from observer"
|
||||||
|
if geofence_dist_km is not None
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"MapUpload [DRY RUN] %s (%s)%s → would POST to %s\n payload: %s",
|
||||||
|
pubkey[:12],
|
||||||
|
role_name,
|
||||||
|
geofence_note,
|
||||||
|
api_url,
|
||||||
|
json.dumps(request_payload, separators=(",", ":")),
|
||||||
|
)
|
||||||
|
# Still update _seen so rate-limiting works during dry-run testing
|
||||||
|
self._seen[pubkey] = advert_timestamp
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self._client:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = await self._client.post(
|
||||||
|
api_url,
|
||||||
|
content=json.dumps(request_payload, separators=(",", ":")),
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
self._seen[pubkey] = advert_timestamp
|
||||||
|
self._set_last_error(None)
|
||||||
|
logger.info(
|
||||||
|
"MapUpload: uploaded %s (%s) → HTTP %d",
|
||||||
|
pubkey[:12],
|
||||||
|
role_name,
|
||||||
|
resp.status_code,
|
||||||
|
)
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
self._set_last_error(f"HTTP {exc.response.status_code}")
|
||||||
|
logger.warning(
|
||||||
|
"MapUpload: server returned %d for %s: %s",
|
||||||
|
exc.response.status_code,
|
||||||
|
pubkey[:12],
|
||||||
|
exc.response.text[:200],
|
||||||
|
)
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
self._set_last_error(str(exc))
|
||||||
|
logger.warning("MapUpload: request error for %s: %s", pubkey[:12], exc)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> str:
|
||||||
|
if self._client is None:
|
||||||
|
return "disconnected"
|
||||||
|
if self.last_error:
|
||||||
|
return "error"
|
||||||
|
return "connected"
|
||||||
@@ -23,6 +23,14 @@ logger = logging.getLogger(__name__)
|
|||||||
_BACKOFF_MIN = 5
|
_BACKOFF_MIN = 5
|
||||||
|
|
||||||
|
|
||||||
|
def _format_error_detail(exc: Exception) -> str:
|
||||||
|
"""Return a short operator-facing error string."""
|
||||||
|
message = str(exc).strip()
|
||||||
|
if message:
|
||||||
|
return message
|
||||||
|
return type(exc).__name__
|
||||||
|
|
||||||
|
|
||||||
def _broadcast_health() -> None:
|
def _broadcast_health() -> None:
|
||||||
"""Push updated health (including MQTT status) to all WS clients."""
|
"""Push updated health (including MQTT status) to all WS clients."""
|
||||||
from app.services.radio_runtime import radio_runtime as radio_manager
|
from app.services.radio_runtime import radio_runtime as radio_manager
|
||||||
@@ -55,6 +63,7 @@ class BaseMqttPublisher(ABC):
|
|||||||
self._version_event: asyncio.Event = asyncio.Event()
|
self._version_event: asyncio.Event = asyncio.Event()
|
||||||
self.connected: bool = False
|
self.connected: bool = False
|
||||||
self.integration_name: str = ""
|
self.integration_name: str = ""
|
||||||
|
self._last_error: str | None = None
|
||||||
|
|
||||||
def set_integration_name(self, name: str) -> None:
|
def set_integration_name(self, name: str) -> None:
|
||||||
"""Attach the configured fanout-module name for operator-facing logs."""
|
"""Attach the configured fanout-module name for operator-facing logs."""
|
||||||
@@ -66,11 +75,17 @@ class BaseMqttPublisher(ABC):
|
|||||||
return f"{self._log_prefix} [{self.integration_name}]"
|
return f"{self._log_prefix} [{self.integration_name}]"
|
||||||
return self._log_prefix
|
return self._log_prefix
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_error(self) -> str | None:
|
||||||
|
"""Return the most recent retained connection/publish error."""
|
||||||
|
return self._last_error
|
||||||
|
|
||||||
# ── Lifecycle ──────────────────────────────────────────────────────
|
# ── Lifecycle ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
async def start(self, settings: object) -> None:
|
async def start(self, settings: object) -> None:
|
||||||
"""Start the background connection loop."""
|
"""Start the background connection loop."""
|
||||||
self._settings = settings
|
self._settings = settings
|
||||||
|
self._last_error = None
|
||||||
self._settings_version += 1
|
self._settings_version += 1
|
||||||
self._version_event.set()
|
self._version_event.set()
|
||||||
if self._task is None or self._task.done():
|
if self._task is None or self._task.done():
|
||||||
@@ -87,6 +102,7 @@ class BaseMqttPublisher(ABC):
|
|||||||
self._task = None
|
self._task = None
|
||||||
self._client = None
|
self._client = None
|
||||||
self.connected = False
|
self.connected = False
|
||||||
|
self._last_error = None
|
||||||
|
|
||||||
async def restart(self, settings: object) -> None:
|
async def restart(self, settings: object) -> None:
|
||||||
"""Called when settings change — stop + start."""
|
"""Called when settings change — stop + start."""
|
||||||
@@ -109,6 +125,7 @@ class BaseMqttPublisher(ABC):
|
|||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
self.connected = False
|
self.connected = False
|
||||||
|
self._last_error = _format_error_detail(e)
|
||||||
# Wake the connection loop so it exits the wait and reconnects
|
# Wake the connection loop so it exits the wait and reconnects
|
||||||
self._settings_version += 1
|
self._settings_version += 1
|
||||||
self._version_event.set()
|
self._version_event.set()
|
||||||
@@ -198,6 +215,7 @@ class BaseMqttPublisher(ABC):
|
|||||||
async with aiomqtt.Client(**client_kwargs) as client:
|
async with aiomqtt.Client(**client_kwargs) as client:
|
||||||
self._client = client
|
self._client = client
|
||||||
self.connected = True
|
self.connected = True
|
||||||
|
self._last_error = None
|
||||||
backoff = _BACKOFF_MIN
|
backoff = _BACKOFF_MIN
|
||||||
|
|
||||||
title, detail = self._on_connected(settings)
|
title, detail = self._on_connected(settings)
|
||||||
@@ -232,6 +250,7 @@ class BaseMqttPublisher(ABC):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.connected = False
|
self.connected = False
|
||||||
self._client = None
|
self._client = None
|
||||||
|
self._last_error = _format_error_detail(e)
|
||||||
|
|
||||||
title, detail = self._on_error()
|
title, detail = self._on_error()
|
||||||
broadcast_error(title, detail)
|
broadcast_error(title, detail)
|
||||||
|
|||||||
@@ -98,9 +98,15 @@ class MqttCommunityModule(FanoutModule):
|
|||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
if self._publisher._is_configured():
|
if self._publisher._is_configured():
|
||||||
|
if self._publisher.last_error:
|
||||||
|
return "error"
|
||||||
return "connected" if self._publisher.connected else "disconnected"
|
return "connected" if self._publisher.connected else "disconnected"
|
||||||
return "disconnected"
|
return "disconnected"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_error(self) -> str | None:
|
||||||
|
return self._publisher.last_error
|
||||||
|
|
||||||
|
|
||||||
async def _publish_community_packet(
|
async def _publish_community_packet(
|
||||||
publisher: CommunityMqttPublisher,
|
publisher: CommunityMqttPublisher,
|
||||||
|
|||||||
@@ -59,4 +59,10 @@ class MqttPrivateModule(FanoutModule):
|
|||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
if not self.config.get("broker_host"):
|
if not self.config.get("broker_host"):
|
||||||
return "disconnected"
|
return "disconnected"
|
||||||
|
if self._publisher.last_error:
|
||||||
|
return "error"
|
||||||
return "connected" if self._publisher.connected else "disconnected"
|
return "connected" if self._publisher.connected else "disconnected"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_error(self) -> str | None:
|
||||||
|
return self._publisher.last_error
|
||||||
|
|||||||
+4
-5
@@ -84,7 +84,6 @@ class SqsModule(FanoutModule):
|
|||||||
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
||||||
super().__init__(config_id, config, name=name)
|
super().__init__(config_id, config, name=name)
|
||||||
self._client = None
|
self._client = None
|
||||||
self._last_error: str | None = None
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
kwargs: dict[str, str] = {}
|
kwargs: dict[str, str] = {}
|
||||||
@@ -147,18 +146,18 @@ class SqsModule(FanoutModule):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
await asyncio.to_thread(partial(self._client.send_message, **request_kwargs))
|
await asyncio.to_thread(partial(self._client.send_message, **request_kwargs))
|
||||||
self._last_error = None
|
self._set_last_error(None)
|
||||||
except (ClientError, BotoCoreError) as exc:
|
except (ClientError, BotoCoreError) as exc:
|
||||||
self._last_error = str(exc)
|
self._set_last_error(str(exc))
|
||||||
logger.warning("SQS %s send error: %s", self.config_id, exc)
|
logger.warning("SQS %s send error: %s", self.config_id, exc)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self._last_error = str(exc)
|
self._set_last_error(str(exc))
|
||||||
logger.exception("Unexpected SQS send error for %s", self.config_id)
|
logger.exception("Unexpected SQS send error for %s", self.config_id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
if not str(self.config.get("queue_url", "")).strip():
|
if not str(self.config.get("queue_url", "")).strip():
|
||||||
return "disconnected"
|
return "disconnected"
|
||||||
if self._last_error:
|
if self.last_error:
|
||||||
return "error"
|
return "error"
|
||||||
return "connected"
|
return "connected"
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ class WebhookModule(FanoutModule):
|
|||||||
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
||||||
super().__init__(config_id, config, name=name)
|
super().__init__(config_id, config, name=name)
|
||||||
self._client: httpx.AsyncClient | None = None
|
self._client: httpx.AsyncClient | None = None
|
||||||
self._last_error: str | None = None
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
self._client = httpx.AsyncClient(timeout=httpx.Timeout(10.0))
|
self._client = httpx.AsyncClient(timeout=httpx.Timeout(10.0))
|
||||||
@@ -62,9 +61,9 @@ class WebhookModule(FanoutModule):
|
|||||||
try:
|
try:
|
||||||
resp = await self._client.request(method, url, content=body_bytes, headers=headers)
|
resp = await self._client.request(method, url, content=body_bytes, headers=headers)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
self._last_error = None
|
self._set_last_error(None)
|
||||||
except httpx.HTTPStatusError as exc:
|
except httpx.HTTPStatusError as exc:
|
||||||
self._last_error = f"HTTP {exc.response.status_code}"
|
self._set_last_error(f"HTTP {exc.response.status_code}")
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Webhook %s returned %s for %s",
|
"Webhook %s returned %s for %s",
|
||||||
self.config_id,
|
self.config_id,
|
||||||
@@ -72,13 +71,13 @@ class WebhookModule(FanoutModule):
|
|||||||
url,
|
url,
|
||||||
)
|
)
|
||||||
except httpx.RequestError as exc:
|
except httpx.RequestError as exc:
|
||||||
self._last_error = str(exc)
|
self._set_last_error(str(exc))
|
||||||
logger.warning("Webhook %s request error: %s", self.config_id, exc)
|
logger.warning("Webhook %s request error: %s", self.config_id, exc)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
if not self.config.get("url"):
|
if not self.config.get("url"):
|
||||||
return "disconnected"
|
return "disconnected"
|
||||||
if self._last_error:
|
if self.last_error:
|
||||||
return "error"
|
return "error"
|
||||||
return "connected"
|
return "connected"
|
||||||
|
|||||||
+24
-1
@@ -1,14 +1,18 @@
|
|||||||
"""
|
"""
|
||||||
Ephemeral keystore for storing sensitive keys in memory.
|
Ephemeral keystore for storing sensitive keys in memory, plus the Ed25519
|
||||||
|
signing primitive used by fanout modules that need to sign requests with the
|
||||||
|
radio's own key.
|
||||||
|
|
||||||
The private key is stored in memory only and is never persisted to disk.
|
The private key is stored in memory only and is never persisted to disk.
|
||||||
It's exported from the radio on startup and reconnect, then used for
|
It's exported from the radio on startup and reconnect, then used for
|
||||||
server-side decryption of direct messages.
|
server-side decryption of direct messages.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import nacl.bindings
|
||||||
from meshcore import EventType
|
from meshcore import EventType
|
||||||
|
|
||||||
from app.decoder import derive_public_key
|
from app.decoder import derive_public_key
|
||||||
@@ -25,11 +29,30 @@ NO_EVENT_RECEIVED_GUIDANCE = (
|
|||||||
"issue commands to the radio."
|
"issue commands to the radio."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Ed25519 group order (L) — used in the expanded signing primitive below
|
||||||
|
_L = 2**252 + 27742317777372353535851937790883648493
|
||||||
|
|
||||||
# In-memory storage for the private key and derived public key
|
# In-memory storage for the private key and derived public key
|
||||||
_private_key: bytes | None = None
|
_private_key: bytes | None = None
|
||||||
_public_key: bytes | None = None
|
_public_key: bytes | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def ed25519_sign_expanded(message: bytes, scalar: bytes, prefix: bytes, public_key: bytes) -> bytes:
|
||||||
|
"""Sign a message using MeshCore's expanded Ed25519 key format.
|
||||||
|
|
||||||
|
MeshCore stores 64-byte keys as scalar(32) || prefix(32). Standard
|
||||||
|
Ed25519 libraries expect seed format and would re-SHA-512 the key, so we
|
||||||
|
perform the signing manually using the already-expanded key material.
|
||||||
|
|
||||||
|
Port of meshcore-packet-capture's ed25519_sign_with_expanded_key().
|
||||||
|
"""
|
||||||
|
r = int.from_bytes(hashlib.sha512(prefix + message).digest(), "little") % _L
|
||||||
|
R = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(r.to_bytes(32, "little"))
|
||||||
|
k = int.from_bytes(hashlib.sha512(R + public_key + message).digest(), "little") % _L
|
||||||
|
s = (r + k * int.from_bytes(scalar, "little")) % _L
|
||||||
|
return R + s.to_bytes(32, "little")
|
||||||
|
|
||||||
|
|
||||||
def clear_keys() -> None:
|
def clear_keys() -> None:
|
||||||
"""Clear any stored private/public key material from memory."""
|
"""Clear any stored private/public key material from memory."""
|
||||||
global _private_key, _public_key
|
global _private_key, _public_key
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from app.frontend_static import (
|
|||||||
)
|
)
|
||||||
from app.radio import RadioDisconnectedError
|
from app.radio import RadioDisconnectedError
|
||||||
from app.radio_sync import (
|
from app.radio_sync import (
|
||||||
|
stop_background_contact_reconciliation,
|
||||||
stop_message_polling,
|
stop_message_polling,
|
||||||
stop_periodic_advert,
|
stop_periodic_advert,
|
||||||
stop_periodic_sync,
|
stop_periodic_sync,
|
||||||
@@ -95,6 +96,7 @@ async def lifespan(app: FastAPI):
|
|||||||
pass
|
pass
|
||||||
await fanout_manager.stop_all()
|
await fanout_manager.stop_all()
|
||||||
await radio_manager.stop_connection_monitor()
|
await radio_manager.stop_connection_monitor()
|
||||||
|
await stop_background_contact_reconciliation()
|
||||||
await stop_message_polling()
|
await stop_message_polling()
|
||||||
await stop_periodic_advert()
|
await stop_periodic_advert()
|
||||||
await stop_periodic_sync()
|
await stop_periodic_sync()
|
||||||
|
|||||||
@@ -548,11 +548,14 @@ class RadioManager:
|
|||||||
|
|
||||||
async def disconnect(self) -> None:
|
async def disconnect(self) -> None:
|
||||||
"""Disconnect from the radio."""
|
"""Disconnect from the radio."""
|
||||||
|
from app.radio_sync import stop_background_contact_reconciliation
|
||||||
|
|
||||||
clear_keys()
|
clear_keys()
|
||||||
self._reset_reconnect_error_broadcasts()
|
self._reset_reconnect_error_broadcasts()
|
||||||
if self._meshcore is None:
|
if self._meshcore is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
await stop_background_contact_reconciliation()
|
||||||
await self._acquire_operation_lock("disconnect", blocking=True)
|
await self._acquire_operation_lock("disconnect", blocking=True)
|
||||||
try:
|
try:
|
||||||
mc = self._meshcore
|
mc = self._meshcore
|
||||||
|
|||||||
+275
-31
@@ -166,6 +166,9 @@ async def pause_polling():
|
|||||||
# Background task handle
|
# Background task handle
|
||||||
_sync_task: asyncio.Task | None = None
|
_sync_task: asyncio.Task | None = None
|
||||||
|
|
||||||
|
# Startup/background contact reconciliation task handle
|
||||||
|
_contact_reconcile_task: asyncio.Task | None = None
|
||||||
|
|
||||||
# Periodic maintenance check interval in seconds (5 minutes)
|
# Periodic maintenance check interval in seconds (5 minutes)
|
||||||
SYNC_INTERVAL = 300
|
SYNC_INTERVAL = 300
|
||||||
|
|
||||||
@@ -266,30 +269,7 @@ async def sync_and_offload_contacts(mc: MeshCore) -> dict:
|
|||||||
remove_result = await mc.commands.remove_contact(contact_data)
|
remove_result = await mc.commands.remove_contact(contact_data)
|
||||||
if remove_result.type == EventType.OK:
|
if remove_result.type == EventType.OK:
|
||||||
removed += 1
|
removed += 1
|
||||||
|
_evict_removed_contact_from_library_cache(mc, public_key)
|
||||||
# LIBRARY INTERNAL FIXUP: The MeshCore library's
|
|
||||||
# commands.remove_contact() sends the remove command over
|
|
||||||
# the wire but does NOT update the library's in-memory
|
|
||||||
# contact cache (mc._contacts). This is a gap in the
|
|
||||||
# library — there's no public API to clear a single
|
|
||||||
# contact from the cache, and the library only refreshes
|
|
||||||
# it on a full get_contacts() call.
|
|
||||||
#
|
|
||||||
# Why this matters: sync_recent_contacts_to_radio() uses
|
|
||||||
# mc.get_contact_by_key_prefix() to check whether a
|
|
||||||
# contact is already loaded on the radio. That method
|
|
||||||
# searches mc._contacts. If we don't evict the removed
|
|
||||||
# contact from the cache here, get_contact_by_key_prefix()
|
|
||||||
# will still find it and skip the add_contact() call —
|
|
||||||
# meaning contacts never get loaded back onto the radio
|
|
||||||
# after offload. The result: no DM ACKs, degraded routing
|
|
||||||
# for potentially minutes until the next periodic sync
|
|
||||||
# refreshes the cache from the (now-empty) radio.
|
|
||||||
#
|
|
||||||
# We access mc._contacts directly because the library
|
|
||||||
# exposes it as a read-only property (mc.contacts) with
|
|
||||||
# no removal API. The dict is keyed by public_key string.
|
|
||||||
mc._contacts.pop(public_key, None)
|
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to remove contact %s: %s", public_key[:12], remove_result.payload
|
"Failed to remove contact %s: %s", public_key[:12], remove_result.payload
|
||||||
@@ -461,28 +441,28 @@ async def ensure_default_channels() -> None:
|
|||||||
|
|
||||||
|
|
||||||
async def sync_and_offload_all(mc: MeshCore) -> dict:
|
async def sync_and_offload_all(mc: MeshCore) -> dict:
|
||||||
"""Sync and offload both contacts and channels, then ensure defaults exist."""
|
"""Run fast startup sync, then background contact reconcile."""
|
||||||
logger.info("Starting full radio sync and offload")
|
logger.info("Starting full radio sync and offload")
|
||||||
|
|
||||||
# Contact on_radio is legacy/stale metadata. Clear it during the offload/reload
|
# Contact on_radio is legacy/stale metadata. Clear it during the offload/reload
|
||||||
# cycle so old rows stop claiming radio residency we do not actively track.
|
# cycle so old rows stop claiming radio residency we do not actively track.
|
||||||
await ContactRepository.clear_on_radio_except([])
|
await ContactRepository.clear_on_radio_except([])
|
||||||
|
|
||||||
contacts_result = await sync_and_offload_contacts(mc)
|
contacts_result = await sync_contacts_from_radio(mc)
|
||||||
channels_result = await sync_and_offload_channels(mc)
|
channels_result = await sync_and_offload_channels(mc)
|
||||||
|
|
||||||
# Ensure default channels exist
|
# Ensure default channels exist
|
||||||
await ensure_default_channels()
|
await ensure_default_channels()
|
||||||
|
|
||||||
# Reload favorites plus a working-set fill back onto the radio immediately.
|
start_background_contact_reconciliation(
|
||||||
# Pass mc directly since the caller already holds the radio operation lock
|
initial_radio_contacts=contacts_result.get("radio_contacts", {}),
|
||||||
# (asyncio.Lock is not reentrant).
|
expected_mc=mc,
|
||||||
reload_result = await sync_recent_contacts_to_radio(force=True, mc=mc)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"contacts": contacts_result,
|
"contacts": contacts_result,
|
||||||
"channels": channels_result,
|
"channels": channels_result,
|
||||||
"reloaded": reload_result,
|
"contact_reconcile_started": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -1036,6 +1016,270 @@ async def stop_periodic_sync():
|
|||||||
# Throttling for contact sync to radio
|
# Throttling for contact sync to radio
|
||||||
_last_contact_sync: float = 0.0
|
_last_contact_sync: float = 0.0
|
||||||
CONTACT_SYNC_THROTTLE_SECONDS = 30 # Don't sync more than once per 30 seconds
|
CONTACT_SYNC_THROTTLE_SECONDS = 30 # Don't sync more than once per 30 seconds
|
||||||
|
CONTACT_RECONCILE_BATCH_SIZE = 2
|
||||||
|
CONTACT_RECONCILE_YIELD_SECONDS = 0.05
|
||||||
|
|
||||||
|
|
||||||
|
def _evict_removed_contact_from_library_cache(mc: MeshCore, public_key: str) -> None:
|
||||||
|
"""Keep the library's contact cache consistent after a successful removal."""
|
||||||
|
# LIBRARY INTERNAL FIXUP: The MeshCore library's remove_contact() sends the
|
||||||
|
# remove command over the wire but does NOT update the library's in-memory
|
||||||
|
# contact cache (mc._contacts). This is a gap in the library — there's no
|
||||||
|
# public API to clear a single contact from the cache, and the library only
|
||||||
|
# refreshes it on a full get_contacts() call.
|
||||||
|
#
|
||||||
|
# Why this matters: contact sync and targeted ensure/load paths use
|
||||||
|
# mc.get_contact_by_key_prefix() to check whether a contact is already
|
||||||
|
# loaded on the radio. That method searches mc._contacts. If we don't evict
|
||||||
|
# the removed contact from the cache here, later syncs will still find it
|
||||||
|
# and skip add_contact() calls, leaving the radio without the contact even
|
||||||
|
# though the app thinks it is resident.
|
||||||
|
mc._contacts.pop(public_key, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_radio_contacts_payload(contacts: dict | None) -> dict[str, dict]:
|
||||||
|
"""Return radio contacts keyed by normalized lowercase full public key."""
|
||||||
|
normalized: dict[str, dict] = {}
|
||||||
|
for public_key, contact_data in (contacts or {}).items():
|
||||||
|
normalized[str(public_key).lower()] = contact_data
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
|
async def sync_contacts_from_radio(mc: MeshCore) -> dict:
|
||||||
|
"""Pull contacts from the radio and persist them to the database without removing them."""
|
||||||
|
synced = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await mc.commands.get_contacts()
|
||||||
|
|
||||||
|
if result is None or result.type == EventType.ERROR:
|
||||||
|
logger.error(
|
||||||
|
"Failed to get contacts from radio: %s. "
|
||||||
|
"If you see this repeatedly, the radio may be visible on the "
|
||||||
|
"serial/TCP/BLE port but not responding to commands. Check for "
|
||||||
|
"another process with the serial port open (other RemoteTerm "
|
||||||
|
"instances, serial monitors, etc.), verify the firmware is "
|
||||||
|
"up-to-date and in client mode (not repeater), or try a "
|
||||||
|
"power cycle.",
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
return {"synced": 0, "radio_contacts": {}, "error": str(result)}
|
||||||
|
|
||||||
|
contacts = _normalize_radio_contacts_payload(result.payload)
|
||||||
|
logger.info("Found %d contacts on radio", len(contacts))
|
||||||
|
|
||||||
|
for public_key, contact_data in contacts.items():
|
||||||
|
await ContactRepository.upsert(
|
||||||
|
ContactUpsert.from_radio_dict(public_key, contact_data, on_radio=False)
|
||||||
|
)
|
||||||
|
asyncio.create_task(
|
||||||
|
_reconcile_contact_messages_background(
|
||||||
|
public_key,
|
||||||
|
contact_data.get("adv_name"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
synced += 1
|
||||||
|
|
||||||
|
logger.info("Synced %d contacts from radio snapshot", synced)
|
||||||
|
return {"synced": synced, "radio_contacts": contacts}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Error during contact snapshot sync: %s", e)
|
||||||
|
return {"synced": synced, "radio_contacts": {}, "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
async def _reconcile_radio_contacts_in_background(
|
||||||
|
*,
|
||||||
|
initial_radio_contacts: dict[str, dict],
|
||||||
|
expected_mc: MeshCore,
|
||||||
|
) -> None:
|
||||||
|
"""Converge radio contacts toward the desired favorites+recents working set."""
|
||||||
|
radio_contacts = dict(initial_radio_contacts)
|
||||||
|
removed = 0
|
||||||
|
loaded = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
if not radio_manager.is_connected or radio_manager.meshcore is not expected_mc:
|
||||||
|
logger.info("Stopping background contact reconcile: radio transport changed")
|
||||||
|
break
|
||||||
|
|
||||||
|
selected_contacts = await get_contacts_selected_for_radio_sync()
|
||||||
|
desired_contacts = {
|
||||||
|
contact.public_key.lower(): contact
|
||||||
|
for contact in selected_contacts
|
||||||
|
if len(contact.public_key) >= 64
|
||||||
|
}
|
||||||
|
removable_keys = [key for key in radio_contacts if key not in desired_contacts]
|
||||||
|
missing_contacts = [
|
||||||
|
contact for key, contact in desired_contacts.items() if key not in radio_contacts
|
||||||
|
]
|
||||||
|
|
||||||
|
if not removable_keys and not missing_contacts:
|
||||||
|
logger.info(
|
||||||
|
"Background contact reconcile complete: %d contacts on radio working set",
|
||||||
|
len(radio_contacts),
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
progressed = False
|
||||||
|
try:
|
||||||
|
async with radio_manager.radio_operation(
|
||||||
|
"background_contact_reconcile",
|
||||||
|
blocking=False,
|
||||||
|
) as mc:
|
||||||
|
if mc is not expected_mc:
|
||||||
|
logger.info(
|
||||||
|
"Stopping background contact reconcile: radio transport changed"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
budget = CONTACT_RECONCILE_BATCH_SIZE
|
||||||
|
selected_contacts = await get_contacts_selected_for_radio_sync()
|
||||||
|
desired_contacts = {
|
||||||
|
contact.public_key.lower(): contact
|
||||||
|
for contact in selected_contacts
|
||||||
|
if len(contact.public_key) >= 64
|
||||||
|
}
|
||||||
|
|
||||||
|
for public_key in list(radio_contacts):
|
||||||
|
if budget <= 0:
|
||||||
|
break
|
||||||
|
if public_key in desired_contacts:
|
||||||
|
continue
|
||||||
|
|
||||||
|
remove_payload = (
|
||||||
|
mc.get_contact_by_key_prefix(public_key[:12])
|
||||||
|
or radio_contacts.get(public_key)
|
||||||
|
or {"public_key": public_key}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
remove_result = await mc.commands.remove_contact(remove_payload)
|
||||||
|
except Exception as exc:
|
||||||
|
failed += 1
|
||||||
|
budget -= 1
|
||||||
|
logger.warning(
|
||||||
|
"Error removing contact %s during background reconcile: %s",
|
||||||
|
public_key[:12],
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
budget -= 1
|
||||||
|
if remove_result.type == EventType.OK:
|
||||||
|
radio_contacts.pop(public_key, None)
|
||||||
|
_evict_removed_contact_from_library_cache(mc, public_key)
|
||||||
|
removed += 1
|
||||||
|
progressed = True
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
logger.warning(
|
||||||
|
"Failed to remove contact %s during background reconcile: %s",
|
||||||
|
public_key[:12],
|
||||||
|
remove_result.payload,
|
||||||
|
)
|
||||||
|
|
||||||
|
if budget > 0:
|
||||||
|
for public_key, contact in desired_contacts.items():
|
||||||
|
if budget <= 0:
|
||||||
|
break
|
||||||
|
if public_key in radio_contacts:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if mc.get_contact_by_key_prefix(public_key[:12]):
|
||||||
|
radio_contacts[public_key] = {"public_key": public_key}
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
add_payload = contact.to_radio_dict()
|
||||||
|
add_result = await mc.commands.add_contact(add_payload)
|
||||||
|
except Exception as exc:
|
||||||
|
failed += 1
|
||||||
|
budget -= 1
|
||||||
|
logger.warning(
|
||||||
|
"Error adding contact %s during background reconcile: %s",
|
||||||
|
public_key[:12],
|
||||||
|
exc,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
budget -= 1
|
||||||
|
if add_result.type == EventType.OK:
|
||||||
|
radio_contacts[public_key] = add_payload
|
||||||
|
loaded += 1
|
||||||
|
progressed = True
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
reason = add_result.payload
|
||||||
|
hint = ""
|
||||||
|
if reason is None:
|
||||||
|
hint = (
|
||||||
|
" (no response from radio — if this repeats, check for "
|
||||||
|
"serial port contention from another process or try a "
|
||||||
|
"power cycle)"
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"Failed to add contact %s during background reconcile: %s%s",
|
||||||
|
public_key[:12],
|
||||||
|
reason,
|
||||||
|
hint,
|
||||||
|
)
|
||||||
|
except RadioOperationBusyError:
|
||||||
|
logger.debug("Background contact reconcile yielding: radio busy")
|
||||||
|
|
||||||
|
await asyncio.sleep(CONTACT_RECONCILE_YIELD_SECONDS)
|
||||||
|
if not progressed:
|
||||||
|
continue
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.info("Background contact reconcile task cancelled")
|
||||||
|
raise
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Background contact reconcile failed: %s", exc, exc_info=True)
|
||||||
|
finally:
|
||||||
|
if removed > 0 or loaded > 0 or failed > 0:
|
||||||
|
logger.info(
|
||||||
|
"Background contact reconcile summary: removed %d, loaded %d, failed %d",
|
||||||
|
removed,
|
||||||
|
loaded,
|
||||||
|
failed,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def start_background_contact_reconciliation(
|
||||||
|
*,
|
||||||
|
initial_radio_contacts: dict[str, dict],
|
||||||
|
expected_mc: MeshCore,
|
||||||
|
) -> None:
|
||||||
|
"""Start or replace the background contact reconcile task for the current radio."""
|
||||||
|
global _contact_reconcile_task
|
||||||
|
|
||||||
|
if _contact_reconcile_task is not None and not _contact_reconcile_task.done():
|
||||||
|
_contact_reconcile_task.cancel()
|
||||||
|
|
||||||
|
_contact_reconcile_task = asyncio.create_task(
|
||||||
|
_reconcile_radio_contacts_in_background(
|
||||||
|
initial_radio_contacts=initial_radio_contacts,
|
||||||
|
expected_mc=expected_mc,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Started background contact reconcile for %d radio contact(s)",
|
||||||
|
len(initial_radio_contacts),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def stop_background_contact_reconciliation() -> None:
|
||||||
|
"""Stop the background contact reconcile task."""
|
||||||
|
global _contact_reconcile_task
|
||||||
|
|
||||||
|
if _contact_reconcile_task and not _contact_reconcile_task.done():
|
||||||
|
_contact_reconcile_task.cancel()
|
||||||
|
try:
|
||||||
|
await _contact_reconcile_task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
_contact_reconcile_task = None
|
||||||
|
|
||||||
|
|
||||||
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
||||||
|
|||||||
@@ -723,6 +723,11 @@ class MessageRepository:
|
|||||||
state_key = f"{prefix}-{row['conversation_key']}"
|
state_key = f"{prefix}-{row['conversation_key']}"
|
||||||
last_message_times[state_key] = row["last_message_time"]
|
last_message_times[state_key] = row["last_message_time"]
|
||||||
|
|
||||||
|
# Only include last_read_ats for conversations that actually have messages.
|
||||||
|
# Without this filter, every contact heard via advertisement (even without
|
||||||
|
# any DMs) bloats the payload — 391KB down to ~46KB on a typical database.
|
||||||
|
last_read_ats = {k: v for k, v in last_read_ats.items() if k in last_message_times}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"counts": counts,
|
"counts": counts,
|
||||||
"mentions": mention_flags,
|
"mentions": mention_flags,
|
||||||
|
|||||||
+26
-1
@@ -16,7 +16,7 @@ from app.repository.fanout import FanoutConfigRepository
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter(prefix="/fanout", tags=["fanout"])
|
router = APIRouter(prefix="/fanout", tags=["fanout"])
|
||||||
|
|
||||||
_VALID_TYPES = {"mqtt_private", "mqtt_community", "bot", "webhook", "apprise", "sqs"}
|
_VALID_TYPES = {"mqtt_private", "mqtt_community", "bot", "webhook", "apprise", "sqs", "map_upload"}
|
||||||
|
|
||||||
_IATA_RE = re.compile(r"^[A-Z]{3}$")
|
_IATA_RE = re.compile(r"^[A-Z]{3}$")
|
||||||
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
|
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
|
||||||
@@ -94,6 +94,8 @@ def _validate_and_normalize_config(config_type: str, config: dict) -> dict:
|
|||||||
_validate_apprise_config(normalized)
|
_validate_apprise_config(normalized)
|
||||||
elif config_type == "sqs":
|
elif config_type == "sqs":
|
||||||
_validate_sqs_config(normalized)
|
_validate_sqs_config(normalized)
|
||||||
|
elif config_type == "map_upload":
|
||||||
|
_validate_map_upload_config(normalized)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@@ -295,10 +297,33 @@ def _validate_sqs_config(config: dict) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_map_upload_config(config: dict) -> None:
|
||||||
|
"""Validate and normalize map_upload config blob."""
|
||||||
|
api_url = str(config.get("api_url", "")).strip()
|
||||||
|
if api_url and not api_url.startswith(("http://", "https://")):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="api_url must start with http:// or https://",
|
||||||
|
)
|
||||||
|
# Persist the cleaned value (empty string means use the module default)
|
||||||
|
config["api_url"] = api_url
|
||||||
|
config["dry_run"] = bool(config.get("dry_run", True))
|
||||||
|
config["geofence_enabled"] = bool(config.get("geofence_enabled", False))
|
||||||
|
try:
|
||||||
|
radius = float(config.get("geofence_radius_km", 0) or 0)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise HTTPException(status_code=400, detail="geofence_radius_km must be a number") from None
|
||||||
|
if radius < 0:
|
||||||
|
raise HTTPException(status_code=400, detail="geofence_radius_km must be >= 0")
|
||||||
|
config["geofence_radius_km"] = radius
|
||||||
|
|
||||||
|
|
||||||
def _enforce_scope(config_type: str, scope: dict) -> dict:
|
def _enforce_scope(config_type: str, scope: dict) -> dict:
|
||||||
"""Enforce type-specific scope constraints. Returns normalized scope."""
|
"""Enforce type-specific scope constraints. Returns normalized scope."""
|
||||||
if config_type == "mqtt_community":
|
if config_type == "mqtt_community":
|
||||||
return {"messages": "none", "raw_packets": "all"}
|
return {"messages": "none", "raw_packets": "all"}
|
||||||
|
if config_type == "map_upload":
|
||||||
|
return {"messages": "none", "raw_packets": "all"}
|
||||||
if config_type == "bot":
|
if config_type == "bot":
|
||||||
return {"messages": "all", "raw_packets": "none"}
|
return {"messages": "all", "raw_packets": "none"}
|
||||||
if config_type in ("webhook", "apprise"):
|
if config_type in ("webhook", "apprise"):
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import os
|
|||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
|
|
||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.repository import RawPacketRepository
|
from app.repository import RawPacketRepository
|
||||||
@@ -25,6 +25,13 @@ class AppInfoResponse(BaseModel):
|
|||||||
commit_hash: str | None = None
|
commit_hash: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class FanoutStatusResponse(BaseModel):
|
||||||
|
name: str
|
||||||
|
type: str
|
||||||
|
status: str
|
||||||
|
last_error: str | None = None
|
||||||
|
|
||||||
|
|
||||||
class HealthResponse(BaseModel):
|
class HealthResponse(BaseModel):
|
||||||
status: str
|
status: str
|
||||||
radio_connected: bool
|
radio_connected: bool
|
||||||
@@ -35,7 +42,7 @@ class HealthResponse(BaseModel):
|
|||||||
radio_device_info: RadioDeviceInfoResponse | None = None
|
radio_device_info: RadioDeviceInfoResponse | None = None
|
||||||
database_size_mb: float
|
database_size_mb: float
|
||||||
oldest_undecrypted_timestamp: int | None
|
oldest_undecrypted_timestamp: int | None
|
||||||
fanout_statuses: dict[str, dict[str, str]] = {}
|
fanout_statuses: dict[str, FanoutStatusResponse] = Field(default_factory=dict)
|
||||||
bots_disabled: bool = False
|
bots_disabled: bool = False
|
||||||
bots_disabled_source: Literal["env", "until_restart"] | None = None
|
bots_disabled_source: Literal["env", "until_restart"] | None = None
|
||||||
basic_auth_enabled: bool = False
|
basic_auth_enabled: bool = False
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ def _login_rejected_message(label: str) -> str:
|
|||||||
def _login_send_failed_message(label: str) -> str:
|
def _login_send_failed_message(label: str) -> str:
|
||||||
return (
|
return (
|
||||||
f"The login request could not be sent to the {label}. "
|
f"The login request could not be sent to the {label}. "
|
||||||
f"The control panel is still available, but authenticated actions may fail until a login succeeds."
|
f"You're free to attempt interaction; try logging in again if authenticated actions fail."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -70,7 +70,7 @@ def _login_timeout_message(label: str) -> str:
|
|||||||
return (
|
return (
|
||||||
f"No login confirmation was heard from the {label}. "
|
f"No login confirmation was heard from the {label}. "
|
||||||
"That can mean the password was wrong or the reply was missed in transit. "
|
"That can mean the password was wrong or the reply was missed in transit. "
|
||||||
"The control panel is still available; try logging in again if authenticated actions fail."
|
"You're free to attempt interaction; try logging in again if authenticated actions fail."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Generated
+2
-2
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"version": "3.6.0",
|
"version": "3.6.1",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"version": "3.6.0",
|
"version": "3.6.1",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/lang-python": "^6.2.1",
|
"@codemirror/lang-python": "^6.2.1",
|
||||||
"@codemirror/theme-one-dark": "^6.1.3",
|
"@codemirror/theme-one-dark": "^6.1.3",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "3.6.0",
|
"version": "3.6.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
|
|||||||
+18
-2
@@ -1,4 +1,4 @@
|
|||||||
import { useEffect, useCallback, useRef, useState } from 'react';
|
import { useEffect, useCallback, useRef, useState, useMemo } from 'react';
|
||||||
import { api } from './api';
|
import { api } from './api';
|
||||||
import { takePrefetchOrFetch } from './prefetch';
|
import { takePrefetchOrFetch } from './prefetch';
|
||||||
import { useWebSocket } from './useWebSocket';
|
import { useWebSocket } from './useWebSocket';
|
||||||
@@ -24,6 +24,7 @@ import { DistanceUnitProvider } from './contexts/DistanceUnitContext';
|
|||||||
import { messageContainsMention } from './utils/messageParser';
|
import { messageContainsMention } from './utils/messageParser';
|
||||||
import { getStateKey } from './utils/conversationState';
|
import { getStateKey } from './utils/conversationState';
|
||||||
import type { Conversation, Message, RawPacket } from './types';
|
import type { Conversation, Message, RawPacket } from './types';
|
||||||
|
import { CONTACT_TYPE_ROOM } from './types';
|
||||||
|
|
||||||
interface ChannelUnreadMarker {
|
interface ChannelUnreadMarker {
|
||||||
channelId: string;
|
channelId: string;
|
||||||
@@ -251,6 +252,21 @@ export function App() {
|
|||||||
} = useConversationMessages(activeConversation, targetMessageId);
|
} = useConversationMessages(activeConversation, targetMessageId);
|
||||||
removeConversationMessagesRef.current = removeConversationMessages;
|
removeConversationMessagesRef.current = removeConversationMessages;
|
||||||
|
|
||||||
|
// Room servers replay stored history as a burst of DMs, all arriving with similar received_at
|
||||||
|
// but spanning a wide range of sender_timestamps. Sort by sender_timestamp for room contacts
|
||||||
|
// so the display reflects the original send order rather than our radio's receipt order.
|
||||||
|
const activeContactIsRoom =
|
||||||
|
activeConversation?.type === 'contact' &&
|
||||||
|
contacts.find((c) => c.public_key === activeConversation.id)?.type === CONTACT_TYPE_ROOM;
|
||||||
|
const sortedMessages = useMemo(() => {
|
||||||
|
if (!activeContactIsRoom || messages.length === 0) return messages;
|
||||||
|
return [...messages].sort((a, b) => {
|
||||||
|
const aTs = a.sender_timestamp ?? a.received_at;
|
||||||
|
const bTs = b.sender_timestamp ?? b.received_at;
|
||||||
|
return aTs !== bTs ? aTs - bTs : a.id - b.id;
|
||||||
|
});
|
||||||
|
}, [activeContactIsRoom, messages]);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
unreadCounts,
|
unreadCounts,
|
||||||
mentions,
|
mentions,
|
||||||
@@ -427,7 +443,7 @@ export function App() {
|
|||||||
config,
|
config,
|
||||||
health,
|
health,
|
||||||
favorites,
|
favorites,
|
||||||
messages,
|
messages: sortedMessages,
|
||||||
messagesLoading,
|
messagesLoading,
|
||||||
loadingOlder,
|
loadingOlder,
|
||||||
hasOlderMessages,
|
hasOlderMessages,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { Button } from './ui/button';
|
|||||||
import { Bell, Route, Star, Trash2 } from 'lucide-react';
|
import { Bell, Route, Star, Trash2 } from 'lucide-react';
|
||||||
import { DirectTraceIcon } from './DirectTraceIcon';
|
import { DirectTraceIcon } from './DirectTraceIcon';
|
||||||
import { RepeaterLogin } from './RepeaterLogin';
|
import { RepeaterLogin } from './RepeaterLogin';
|
||||||
|
import { ServerLoginStatusBanner } from './ServerLoginStatusBanner';
|
||||||
import { useRememberedServerPassword } from '../hooks/useRememberedServerPassword';
|
import { useRememberedServerPassword } from '../hooks/useRememberedServerPassword';
|
||||||
import { useRepeaterDashboard } from '../hooks/useRepeaterDashboard';
|
import { useRepeaterDashboard } from '../hooks/useRepeaterDashboard';
|
||||||
import { isFavorite } from '../utils/favorites';
|
import { isFavorite } from '../utils/favorites';
|
||||||
@@ -69,6 +70,7 @@ export function RepeaterDashboard({
|
|||||||
loggedIn,
|
loggedIn,
|
||||||
loginLoading,
|
loginLoading,
|
||||||
loginError,
|
loginError,
|
||||||
|
lastLoginAttempt,
|
||||||
paneData,
|
paneData,
|
||||||
paneStates,
|
paneStates,
|
||||||
consoleHistory,
|
consoleHistory,
|
||||||
@@ -249,6 +251,14 @@ export function RepeaterDashboard({
|
|||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
|
<ServerLoginStatusBanner
|
||||||
|
attempt={lastLoginAttempt}
|
||||||
|
loading={loginLoading}
|
||||||
|
canRetryPassword={password.trim().length > 0}
|
||||||
|
onRetryPassword={() => handleRepeaterLogin(password)}
|
||||||
|
onRetryBlank={handleRepeaterGuestLogin}
|
||||||
|
blankRetryLabel="Retry Existing-Access Login"
|
||||||
|
/>
|
||||||
{/* Top row: Telemetry + Radio Settings | Node Info + Neighbors */}
|
{/* Top row: Telemetry + Radio Settings | Node Info + Neighbors */}
|
||||||
<div className="grid grid-cols-1 gap-4 md:grid-cols-2 md:items-stretch">
|
<div className="grid grid-cols-1 gap-4 md:grid-cols-2 md:items-stretch">
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
|
|||||||
@@ -16,7 +16,13 @@ import { AclPane } from './repeater/RepeaterAclPane';
|
|||||||
import { LppTelemetryPane } from './repeater/RepeaterLppTelemetryPane';
|
import { LppTelemetryPane } from './repeater/RepeaterLppTelemetryPane';
|
||||||
import { ConsolePane } from './repeater/RepeaterConsolePane';
|
import { ConsolePane } from './repeater/RepeaterConsolePane';
|
||||||
import { RepeaterLogin } from './RepeaterLogin';
|
import { RepeaterLogin } from './RepeaterLogin';
|
||||||
|
import { ServerLoginStatusBanner } from './ServerLoginStatusBanner';
|
||||||
import { useRememberedServerPassword } from '../hooks/useRememberedServerPassword';
|
import { useRememberedServerPassword } from '../hooks/useRememberedServerPassword';
|
||||||
|
import {
|
||||||
|
buildServerLoginAttemptFromError,
|
||||||
|
buildServerLoginAttemptFromResponse,
|
||||||
|
type ServerLoginAttemptState,
|
||||||
|
} from '../utils/serverLoginState';
|
||||||
|
|
||||||
interface RoomServerPanelProps {
|
interface RoomServerPanelProps {
|
||||||
contact: Contact;
|
contact: Contact;
|
||||||
@@ -61,6 +67,7 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
const [loginLoading, setLoginLoading] = useState(false);
|
const [loginLoading, setLoginLoading] = useState(false);
|
||||||
const [loginError, setLoginError] = useState<string | null>(null);
|
const [loginError, setLoginError] = useState<string | null>(null);
|
||||||
const [authenticated, setAuthenticated] = useState(false);
|
const [authenticated, setAuthenticated] = useState(false);
|
||||||
|
const [lastLoginAttempt, setLastLoginAttempt] = useState<ServerLoginAttemptState | null>(null);
|
||||||
const [advancedOpen, setAdvancedOpen] = useState(false);
|
const [advancedOpen, setAdvancedOpen] = useState(false);
|
||||||
const [paneData, setPaneData] = useState<RoomPaneData>({
|
const [paneData, setPaneData] = useState<RoomPaneData>({
|
||||||
status: null,
|
status: null,
|
||||||
@@ -75,6 +82,7 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
setLoginLoading(false);
|
setLoginLoading(false);
|
||||||
setLoginError(null);
|
setLoginError(null);
|
||||||
setAuthenticated(false);
|
setAuthenticated(false);
|
||||||
|
setLastLoginAttempt(null);
|
||||||
setAdvancedOpen(false);
|
setAdvancedOpen(false);
|
||||||
setPaneData({
|
setPaneData({
|
||||||
status: null,
|
status: null,
|
||||||
@@ -129,26 +137,32 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
);
|
);
|
||||||
|
|
||||||
const performLogin = useCallback(
|
const performLogin = useCallback(
|
||||||
async (password: string) => {
|
async (nextPassword: string, method: 'password' | 'blank') => {
|
||||||
if (loginLoading) return;
|
if (loginLoading) return;
|
||||||
|
|
||||||
setLoginLoading(true);
|
setLoginLoading(true);
|
||||||
setLoginError(null);
|
setLoginError(null);
|
||||||
try {
|
try {
|
||||||
const result = await api.roomLogin(contact.public_key, password);
|
const result = await api.roomLogin(contact.public_key, nextPassword);
|
||||||
|
setLastLoginAttempt(buildServerLoginAttemptFromResponse(method, result, 'room server'));
|
||||||
setAuthenticated(true);
|
setAuthenticated(true);
|
||||||
if (result.authenticated) {
|
if (result.authenticated) {
|
||||||
toast.success('Room login confirmed');
|
toast.success('Login confirmed by the room server.');
|
||||||
} else {
|
} else {
|
||||||
toast.warning('Room login not confirmed', {
|
toast.warning("Couldn't confirm room login", {
|
||||||
description: result.message ?? 'Room login was not confirmed',
|
description:
|
||||||
|
result.message ??
|
||||||
|
'No confirmation came back from the room server. You can still open tools and try again.',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const message = err instanceof Error ? err.message : 'Unknown error';
|
const message = err instanceof Error ? err.message : 'Unknown error';
|
||||||
|
setLastLoginAttempt(buildServerLoginAttemptFromError(method, message, 'room server'));
|
||||||
setAuthenticated(true);
|
setAuthenticated(true);
|
||||||
setLoginError(message);
|
setLoginError(message);
|
||||||
toast.error('Room login failed', { description: message });
|
toast.error('Room login request failed', {
|
||||||
|
description: `${message}. You can still open tools and retry the login from here.`,
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
setLoginLoading(false);
|
setLoginLoading(false);
|
||||||
}
|
}
|
||||||
@@ -157,15 +171,15 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
);
|
);
|
||||||
|
|
||||||
const handleLogin = useCallback(
|
const handleLogin = useCallback(
|
||||||
async (password: string) => {
|
async (nextPassword: string) => {
|
||||||
await performLogin(password);
|
await performLogin(nextPassword, 'password');
|
||||||
persistAfterLogin(password);
|
persistAfterLogin(nextPassword);
|
||||||
},
|
},
|
||||||
[performLogin, persistAfterLogin]
|
[performLogin, persistAfterLogin]
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleLoginAsGuest = useCallback(async () => {
|
const handleLoginAsGuest = useCallback(async () => {
|
||||||
await performLogin('');
|
await performLogin('', 'blank');
|
||||||
persistAfterLogin('');
|
persistAfterLogin('');
|
||||||
}, [performLogin, persistAfterLogin]);
|
}, [performLogin, persistAfterLogin]);
|
||||||
|
|
||||||
@@ -207,6 +221,8 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
);
|
);
|
||||||
|
|
||||||
const panelTitle = useMemo(() => contact.name || contact.public_key.slice(0, 12), [contact]);
|
const panelTitle = useMemo(() => contact.name || contact.public_key.slice(0, 12), [contact]);
|
||||||
|
const showLoginFailureState =
|
||||||
|
lastLoginAttempt !== null && lastLoginAttempt.outcome !== 'confirmed';
|
||||||
|
|
||||||
if (!authenticated) {
|
if (!authenticated) {
|
||||||
return (
|
return (
|
||||||
@@ -236,7 +252,7 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
onLoginAsGuest={handleLoginAsGuest}
|
onLoginAsGuest={handleLoginAsGuest}
|
||||||
description="Log in with the room password or use ACL/guest access to enter this room server"
|
description="Log in with the room password or use ACL/guest access to enter this room server"
|
||||||
passwordPlaceholder="Room server password..."
|
passwordPlaceholder="Room server password..."
|
||||||
guestLabel="Login with ACL / Guest"
|
guestLabel="Login with Existing Access / Guest"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -245,15 +261,52 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="border-b border-border bg-muted/20 px-4 py-3">
|
<section className="border-b border-border bg-muted/20 px-4 py-3">
|
||||||
<div className="flex justify-end">
|
<div className="space-y-3">
|
||||||
<Button
|
{showLoginFailureState ? (
|
||||||
type="button"
|
<ServerLoginStatusBanner
|
||||||
variant="outline"
|
attempt={lastLoginAttempt}
|
||||||
size="sm"
|
loading={loginLoading}
|
||||||
onClick={() => setAdvancedOpen((prev) => !prev)}
|
canRetryPassword={password.trim().length > 0}
|
||||||
>
|
onRetryPassword={() => handleLogin(password)}
|
||||||
{advancedOpen ? 'Hide Tools' : 'Show Tools'}
|
onRetryBlank={handleLoginAsGuest}
|
||||||
</Button>
|
blankRetryLabel="Retry Existing-Access Login"
|
||||||
|
showRetryActions={false}
|
||||||
|
/>
|
||||||
|
) : null}
|
||||||
|
<div className="flex flex-wrap items-center justify-between gap-2">
|
||||||
|
{showLoginFailureState ? (
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => void handleLogin(password)}
|
||||||
|
disabled={loginLoading || password.trim().length === 0}
|
||||||
|
>
|
||||||
|
Retry Password Login
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={handleLoginAsGuest}
|
||||||
|
disabled={loginLoading}
|
||||||
|
>
|
||||||
|
Retry Existing-Access Login
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div />
|
||||||
|
)}
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => setAdvancedOpen((prev) => !prev)}
|
||||||
|
>
|
||||||
|
{advancedOpen ? 'Hide Tools' : 'Show Tools'}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<Sheet open={advancedOpen} onOpenChange={setAdvancedOpen}>
|
<Sheet open={advancedOpen} onOpenChange={setAdvancedOpen}>
|
||||||
<SheetContent side="right" className="w-full sm:max-w-4xl p-0 flex flex-col">
|
<SheetContent side="right" className="w-full sm:max-w-4xl p-0 flex flex-col">
|
||||||
@@ -269,15 +322,6 @@ export function RoomServerPanel({ contact, onAuthenticatedChange }: RoomServerPa
|
|||||||
<h2 className="truncate text-base font-semibold">Room Server Tools</h2>
|
<h2 className="truncate text-base font-semibold">Room Server Tools</h2>
|
||||||
<p className="text-sm text-muted-foreground">{panelTitle}</p>
|
<p className="text-sm text-muted-foreground">{panelTitle}</p>
|
||||||
</div>
|
</div>
|
||||||
<Button
|
|
||||||
type="button"
|
|
||||||
variant="outline"
|
|
||||||
onClick={handleLoginAsGuest}
|
|
||||||
disabled={loginLoading}
|
|
||||||
className="self-start sm:self-auto"
|
|
||||||
>
|
|
||||||
Refresh ACL Login
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex-1 overflow-y-auto p-4">
|
<div className="flex-1 overflow-y-auto p-4">
|
||||||
|
|||||||
@@ -0,0 +1,76 @@
|
|||||||
|
import { Button } from './ui/button';
|
||||||
|
import type { ServerLoginAttemptState } from '../utils/serverLoginState';
|
||||||
|
import { getServerLoginAttemptTone } from '../utils/serverLoginState';
|
||||||
|
import { cn } from '../lib/utils';
|
||||||
|
|
||||||
|
interface ServerLoginStatusBannerProps {
|
||||||
|
attempt: ServerLoginAttemptState | null;
|
||||||
|
loading: boolean;
|
||||||
|
canRetryPassword: boolean;
|
||||||
|
onRetryPassword: () => Promise<void> | void;
|
||||||
|
onRetryBlank: () => Promise<void> | void;
|
||||||
|
passwordRetryLabel?: string;
|
||||||
|
blankRetryLabel?: string;
|
||||||
|
showRetryActions?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ServerLoginStatusBanner({
|
||||||
|
attempt,
|
||||||
|
loading,
|
||||||
|
canRetryPassword,
|
||||||
|
onRetryPassword,
|
||||||
|
onRetryBlank,
|
||||||
|
passwordRetryLabel = 'Retry Password Login',
|
||||||
|
blankRetryLabel = 'Retry Existing-Access Login',
|
||||||
|
showRetryActions = true,
|
||||||
|
}: ServerLoginStatusBannerProps) {
|
||||||
|
if (attempt?.outcome === 'confirmed') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tone = getServerLoginAttemptTone(attempt);
|
||||||
|
const shouldShowActions = showRetryActions;
|
||||||
|
const toneClassName =
|
||||||
|
tone === 'success'
|
||||||
|
? 'border-success/30 bg-success/10 text-success'
|
||||||
|
: tone === 'warning'
|
||||||
|
? 'border-warning/30 bg-warning/10 text-warning'
|
||||||
|
: tone === 'destructive'
|
||||||
|
? 'border-destructive/30 bg-destructive/10 text-destructive'
|
||||||
|
: 'border-border bg-muted/40 text-foreground';
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cn('rounded-md border px-4 py-3', toneClassName)}>
|
||||||
|
<div className="flex flex-col gap-3 sm:flex-row sm:items-start sm:justify-between">
|
||||||
|
<div className="min-w-0 space-y-1">
|
||||||
|
<p className="text-sm font-medium">
|
||||||
|
{attempt?.summary ?? 'No server login attempt has been recorded in this view yet.'}
|
||||||
|
</p>
|
||||||
|
{attempt?.details && <p className="text-xs opacity-90">{attempt.details}</p>}
|
||||||
|
</div>
|
||||||
|
{shouldShowActions ? (
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => void onRetryPassword()}
|
||||||
|
disabled={loading || !canRetryPassword}
|
||||||
|
>
|
||||||
|
{passwordRetryLabel}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => void onRetryBlank()}
|
||||||
|
disabled={loading}
|
||||||
|
>
|
||||||
|
{blankRetryLabel}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
) : null}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -112,7 +112,10 @@ export function RadioSettingsPane({
|
|||||||
<NotFetched />
|
<NotFetched />
|
||||||
) : (
|
) : (
|
||||||
<div>
|
<div>
|
||||||
<KvRow label="Local Advert" value={formatAdvertInterval(advertData.advert_interval)} />
|
<KvRow
|
||||||
|
label="Local Advert"
|
||||||
|
value={formatAdvertInterval(advertData.advert_interval, 'minutes')}
|
||||||
|
/>
|
||||||
<KvRow
|
<KvRow
|
||||||
label="Flood Advert"
|
label="Flood Advert"
|
||||||
value={formatAdvertInterval(advertData.flood_advert_interval)}
|
value={formatAdvertInterval(advertData.flood_advert_interval)}
|
||||||
|
|||||||
@@ -76,11 +76,19 @@ export function formatClockDrift(
|
|||||||
return { text: parts.join(''), isLarge: false };
|
return { text: parts.join(''), isLarge: false };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function formatAdvertInterval(val: string | null): string {
|
export function formatAdvertInterval(
|
||||||
|
val: string | null,
|
||||||
|
unit: 'minutes' | 'hours' = 'hours'
|
||||||
|
): string {
|
||||||
if (val == null) return '—';
|
if (val == null) return '—';
|
||||||
const trimmed = val.trim();
|
const trimmed = val.trim();
|
||||||
if (trimmed === '0') return '<disabled>';
|
if (trimmed === '0') return '<disabled>';
|
||||||
return `${trimmed}h`;
|
if (unit === 'hours') return `${trimmed}h`;
|
||||||
|
const mins = parseInt(trimmed, 10);
|
||||||
|
if (isNaN(mins)) return trimmed;
|
||||||
|
if (mins >= 60 && mins % 60 === 0) return `${mins / 60}h`;
|
||||||
|
if (mins >= 60) return `${Math.floor(mins / 60)}h${mins % 60}m`;
|
||||||
|
return `${mins}m`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatFetchedRelative(fetchedAt: number): string {
|
function formatFetchedRelative(fetchedAt: number): string {
|
||||||
|
|||||||
@@ -1,10 +1,17 @@
|
|||||||
import { useState, useEffect, useCallback, useMemo, useRef, lazy, Suspense } from 'react';
|
import { useState, useEffect, useCallback, useMemo, useRef, lazy, Suspense } from 'react';
|
||||||
import { ChevronDown } from 'lucide-react';
|
import { ChevronDown, Info } from 'lucide-react';
|
||||||
import { Input } from '../ui/input';
|
import { Input } from '../ui/input';
|
||||||
import { Label } from '../ui/label';
|
import { Label } from '../ui/label';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
import { Separator } from '../ui/separator';
|
import { Separator } from '../ui/separator';
|
||||||
import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle } from '../ui/dialog';
|
import {
|
||||||
|
Dialog,
|
||||||
|
DialogContent,
|
||||||
|
DialogDescription,
|
||||||
|
DialogFooter,
|
||||||
|
DialogHeader,
|
||||||
|
DialogTitle,
|
||||||
|
} from '../ui/dialog';
|
||||||
import { toast } from '../ui/sonner';
|
import { toast } from '../ui/sonner';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { api } from '../../api';
|
import { api } from '../../api';
|
||||||
@@ -16,11 +23,12 @@ const BotCodeEditor = lazy(() =>
|
|||||||
|
|
||||||
const TYPE_LABELS: Record<string, string> = {
|
const TYPE_LABELS: Record<string, string> = {
|
||||||
mqtt_private: 'Private MQTT',
|
mqtt_private: 'Private MQTT',
|
||||||
mqtt_community: 'Community MQTT',
|
mqtt_community: 'Community Sharing',
|
||||||
bot: 'Python Bot',
|
bot: 'Python Bot',
|
||||||
webhook: 'Webhook',
|
webhook: 'Webhook',
|
||||||
apprise: 'Apprise',
|
apprise: 'Apprise',
|
||||||
sqs: 'Amazon SQS',
|
sqs: 'Amazon SQS',
|
||||||
|
map_upload: 'Map Upload',
|
||||||
};
|
};
|
||||||
|
|
||||||
const DEFAULT_COMMUNITY_PACKET_TOPIC_TEMPLATE = 'meshcore/{IATA}/{PUBLIC_KEY}/packets';
|
const DEFAULT_COMMUNITY_PACKET_TOPIC_TEMPLATE = 'meshcore/{IATA}/{PUBLIC_KEY}/packets';
|
||||||
@@ -100,7 +108,8 @@ type DraftType =
|
|||||||
| 'webhook'
|
| 'webhook'
|
||||||
| 'apprise'
|
| 'apprise'
|
||||||
| 'sqs'
|
| 'sqs'
|
||||||
| 'bot';
|
| 'bot'
|
||||||
|
| 'map_upload';
|
||||||
|
|
||||||
type CreateIntegrationDefinition = {
|
type CreateIntegrationDefinition = {
|
||||||
value: DraftType;
|
value: DraftType;
|
||||||
@@ -143,7 +152,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
value: 'mqtt_community',
|
value: 'mqtt_community',
|
||||||
savedType: 'mqtt_community',
|
savedType: 'mqtt_community',
|
||||||
label: 'Community MQTT/meshcoretomqtt',
|
label: 'Community MQTT/meshcoretomqtt',
|
||||||
section: 'Community MQTT',
|
section: 'Community Sharing',
|
||||||
description:
|
description:
|
||||||
'MeshcoreToMQTT-compatible raw-packet feed publishing, compatible with community aggregators (in other words, make your companion radio also serve as an observer node). Superset of other Community MQTT presets.',
|
'MeshcoreToMQTT-compatible raw-packet feed publishing, compatible with community aggregators (in other words, make your companion radio also serve as an observer node). Superset of other Community MQTT presets.',
|
||||||
defaultName: 'Community MQTT',
|
defaultName: 'Community MQTT',
|
||||||
@@ -157,7 +166,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
value: 'mqtt_community_meshrank',
|
value: 'mqtt_community_meshrank',
|
||||||
savedType: 'mqtt_community',
|
savedType: 'mqtt_community',
|
||||||
label: 'MeshRank',
|
label: 'MeshRank',
|
||||||
section: 'Community MQTT',
|
section: 'Community Sharing',
|
||||||
description:
|
description:
|
||||||
'A community MQTT config preconfigured for MeshRank, requiring only the provided topic from your MeshRank configuration. A subset of the primary Community MQTT/meshcoretomqtt configuration; you are free to edit all configuration after creation.',
|
'A community MQTT config preconfigured for MeshRank, requiring only the provided topic from your MeshRank configuration. A subset of the primary Community MQTT/meshcoretomqtt configuration; you are free to edit all configuration after creation.',
|
||||||
defaultName: 'MeshRank',
|
defaultName: 'MeshRank',
|
||||||
@@ -180,7 +189,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
value: 'mqtt_community_letsmesh_us',
|
value: 'mqtt_community_letsmesh_us',
|
||||||
savedType: 'mqtt_community',
|
savedType: 'mqtt_community',
|
||||||
label: 'LetsMesh (US)',
|
label: 'LetsMesh (US)',
|
||||||
section: 'Community MQTT',
|
section: 'Community Sharing',
|
||||||
description:
|
description:
|
||||||
'A community MQTT config preconfigured for the LetsMesh US-ingest endpoint, requiring only your email and IATA region code. Good to use with an additional EU configuration for redundancy. A subset of the primary Community MQTT/meshcoretomqtt configuration; you are free to edit all configuration after creation.',
|
'A community MQTT config preconfigured for the LetsMesh US-ingest endpoint, requiring only your email and IATA region code. Good to use with an additional EU configuration for redundancy. A subset of the primary Community MQTT/meshcoretomqtt configuration; you are free to edit all configuration after creation.',
|
||||||
defaultName: 'LetsMesh (US)',
|
defaultName: 'LetsMesh (US)',
|
||||||
@@ -197,7 +206,7 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
value: 'mqtt_community_letsmesh_eu',
|
value: 'mqtt_community_letsmesh_eu',
|
||||||
savedType: 'mqtt_community',
|
savedType: 'mqtt_community',
|
||||||
label: 'LetsMesh (EU)',
|
label: 'LetsMesh (EU)',
|
||||||
section: 'Community MQTT',
|
section: 'Community Sharing',
|
||||||
description:
|
description:
|
||||||
'A community MQTT config preconfigured for the LetsMesh EU-ingest endpoint, requiring only your email and IATA region code. Good to use with an additional US configuration for redundancy. A subset of the primary Community MQTT/meshcoretomqtt configuration; you are free to edit all configuration after creation.',
|
'A community MQTT config preconfigured for the LetsMesh EU-ingest endpoint, requiring only your email and IATA region code. Good to use with an additional US configuration for redundancy. A subset of the primary Community MQTT/meshcoretomqtt configuration; you are free to edit all configuration after creation.',
|
||||||
defaultName: 'LetsMesh (EU)',
|
defaultName: 'LetsMesh (EU)',
|
||||||
@@ -284,6 +293,23 @@ const CREATE_INTEGRATION_DEFINITIONS: readonly CreateIntegrationDefinition[] = [
|
|||||||
scope: { messages: 'all', raw_packets: 'none' },
|
scope: { messages: 'all', raw_packets: 'none' },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
value: 'map_upload',
|
||||||
|
savedType: 'map_upload',
|
||||||
|
label: 'Map Upload',
|
||||||
|
section: 'Community Sharing',
|
||||||
|
description:
|
||||||
|
'Upload repeaters and room servers to map.meshcore.dev or a compatible map API endpoint.',
|
||||||
|
defaultName: 'Map Upload',
|
||||||
|
nameMode: 'counted',
|
||||||
|
defaults: {
|
||||||
|
config: {
|
||||||
|
api_url: '',
|
||||||
|
dry_run: true,
|
||||||
|
},
|
||||||
|
scope: { messages: 'none', raw_packets: 'all' },
|
||||||
|
},
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const CREATE_INTEGRATION_DEFINITIONS_BY_VALUE = Object.fromEntries(
|
const CREATE_INTEGRATION_DEFINITIONS_BY_VALUE = Object.fromEntries(
|
||||||
@@ -566,7 +592,9 @@ function getDefaultIntegrationName(type: string, configs: FanoutConfig[]) {
|
|||||||
|
|
||||||
function getStatusLabel(status: string | undefined, type?: string) {
|
function getStatusLabel(status: string | undefined, type?: string) {
|
||||||
if (status === 'connected')
|
if (status === 'connected')
|
||||||
return type === 'bot' || type === 'webhook' || type === 'apprise' ? 'Active' : 'Connected';
|
return type === 'bot' || type === 'webhook' || type === 'apprise' || type === 'map_upload'
|
||||||
|
? 'Active'
|
||||||
|
: 'Connected';
|
||||||
if (status === 'error') return 'Error';
|
if (status === 'error') return 'Error';
|
||||||
if (status === 'disconnected') return 'Disconnected';
|
if (status === 'disconnected') return 'Disconnected';
|
||||||
return 'Inactive';
|
return 'Inactive';
|
||||||
@@ -1059,6 +1087,152 @@ function BotConfigEditor({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function MapUploadConfigEditor({
|
||||||
|
config,
|
||||||
|
onChange,
|
||||||
|
}: {
|
||||||
|
config: Record<string, unknown>;
|
||||||
|
onChange: (config: Record<string, unknown>) => void;
|
||||||
|
}) {
|
||||||
|
const isDryRun = config.dry_run !== false;
|
||||||
|
const [radioLat, setRadioLat] = useState<number | null>(null);
|
||||||
|
const [radioLon, setRadioLon] = useState<number | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
api
|
||||||
|
.getRadioConfig()
|
||||||
|
.then((rc) => {
|
||||||
|
setRadioLat(rc.lat ?? 0);
|
||||||
|
setRadioLon(rc.lon ?? 0);
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
setRadioLat(0);
|
||||||
|
setRadioLon(0);
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const radioLatLonConfigured =
|
||||||
|
radioLat !== null && radioLon !== null && !(radioLat === 0 && radioLon === 0);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-3">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Automatically upload heard repeater and room server advertisements to{' '}
|
||||||
|
<a
|
||||||
|
href="https://map.meshcore.dev"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="underline hover:text-foreground"
|
||||||
|
>
|
||||||
|
map.meshcore.dev
|
||||||
|
</a>
|
||||||
|
. Requires the radio's private key to be available (firmware must have{' '}
|
||||||
|
<code>ENABLE_PRIVATE_KEY_EXPORT=1</code>). Only raw RF packets are shared — never
|
||||||
|
decrypted messages.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="rounded-md border border-warning/50 bg-warning/10 px-3 py-2 text-xs text-warning">
|
||||||
|
<strong>Dry Run is {isDryRun ? 'ON' : 'OFF'}.</strong>{' '}
|
||||||
|
{isDryRun
|
||||||
|
? 'No uploads will be sent. Check the backend logs to verify the payload looks correct before enabling live sends.'
|
||||||
|
: 'Live uploads are enabled. Each advert is rate-limited to once per hour per node.'}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<label className="flex items-center gap-3 cursor-pointer">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={isDryRun}
|
||||||
|
onChange={(e) => onChange({ ...config, dry_run: e.target.checked })}
|
||||||
|
className="h-4 w-4 rounded border-border"
|
||||||
|
/>
|
||||||
|
<div>
|
||||||
|
<span className="text-sm font-medium">Dry Run (log only, no uploads)</span>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
When enabled, upload payloads are logged at INFO level but not sent. Disable once you
|
||||||
|
have confirmed the logged output looks correct.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-map-api-url">API URL (optional)</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-map-api-url"
|
||||||
|
type="url"
|
||||||
|
placeholder="https://map.meshcore.dev/api/v1/uploader/node"
|
||||||
|
value={(config.api_url as string) || ''}
|
||||||
|
onChange={(e) => onChange({ ...config, api_url: e.target.value })}
|
||||||
|
/>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Leave blank to use the default <code>map.meshcore.dev</code> endpoint.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<label className="flex items-center gap-3 cursor-pointer">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={!!config.geofence_enabled}
|
||||||
|
onChange={(e) => onChange({ ...config, geofence_enabled: e.target.checked })}
|
||||||
|
className="h-4 w-4 rounded border-border"
|
||||||
|
/>
|
||||||
|
<div>
|
||||||
|
<span className="text-sm font-medium">Enable Geofence</span>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Only upload nodes whose location falls within the configured radius of your radio's
|
||||||
|
own position. Helps exclude nodes with false or spoofed coordinates. Uses the
|
||||||
|
latitude/longitude set in Radio Settings.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
{!!config.geofence_enabled && (
|
||||||
|
<div className="space-y-3 pl-7">
|
||||||
|
{!radioLatLonConfigured && (
|
||||||
|
<div className="rounded-md border border-warning/50 bg-warning/10 px-3 py-2 text-xs text-warning">
|
||||||
|
Your radio does not currently have a latitude/longitude configured. Geofencing will be
|
||||||
|
silently skipped until coordinates are set in{' '}
|
||||||
|
<strong>Settings → Radio → Location</strong>.
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{radioLatLonConfigured && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Using radio position{' '}
|
||||||
|
<code>
|
||||||
|
{radioLat?.toFixed(5)}, {radioLon?.toFixed(5)}
|
||||||
|
</code>{' '}
|
||||||
|
as the geofence center. Update coordinates in Radio Settings to move the center.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="fanout-map-geofence-radius">Radius (km)</Label>
|
||||||
|
<Input
|
||||||
|
id="fanout-map-geofence-radius"
|
||||||
|
type="number"
|
||||||
|
min="0"
|
||||||
|
step="any"
|
||||||
|
placeholder="e.g. 100"
|
||||||
|
value={(config.geofence_radius_km as number | undefined) ?? ''}
|
||||||
|
onChange={(e) =>
|
||||||
|
onChange({
|
||||||
|
...config,
|
||||||
|
geofence_radius_km: e.target.value === '' ? 0 : parseFloat(e.target.value),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Nodes further than this distance from your radio's position will not be uploaded.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
type ScopeMode = 'all' | 'none' | 'only' | 'except';
|
type ScopeMode = 'all' | 'none' | 'only' | 'except';
|
||||||
|
|
||||||
function getScopeMode(value: unknown): ScopeMode {
|
function getScopeMode(value: unknown): ScopeMode {
|
||||||
@@ -1687,6 +1861,10 @@ export function SettingsFanoutSection({
|
|||||||
const [inlineEditName, setInlineEditName] = useState('');
|
const [inlineEditName, setInlineEditName] = useState('');
|
||||||
const [createDialogOpen, setCreateDialogOpen] = useState(false);
|
const [createDialogOpen, setCreateDialogOpen] = useState(false);
|
||||||
const [selectedCreateType, setSelectedCreateType] = useState<DraftType | null>(null);
|
const [selectedCreateType, setSelectedCreateType] = useState<DraftType | null>(null);
|
||||||
|
const [errorDialogState, setErrorDialogState] = useState<{
|
||||||
|
integrationName: string;
|
||||||
|
error: string;
|
||||||
|
} | null>(null);
|
||||||
const [busy, setBusy] = useState(false);
|
const [busy, setBusy] = useState(false);
|
||||||
|
|
||||||
const loadConfigs = useCallback(async () => {
|
const loadConfigs = useCallback(async () => {
|
||||||
@@ -1975,6 +2153,10 @@ export function SettingsFanoutSection({
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{detailType === 'map_upload' && (
|
||||||
|
<MapUploadConfigEditor config={editConfig} onChange={setEditConfig} />
|
||||||
|
)}
|
||||||
|
|
||||||
<Separator />
|
<Separator />
|
||||||
|
|
||||||
<div className="flex gap-2">
|
<div className="flex gap-2">
|
||||||
@@ -2036,6 +2218,31 @@ export function SettingsFanoutSection({
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
<Dialog
|
||||||
|
open={errorDialogState !== null}
|
||||||
|
onOpenChange={(open) => {
|
||||||
|
if (!open) {
|
||||||
|
setErrorDialogState(null);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<DialogContent className="sm:max-w-md">
|
||||||
|
<DialogHeader className="border-b border-border px-5 py-4">
|
||||||
|
<DialogTitle>
|
||||||
|
{errorDialogState ? `${errorDialogState.integrationName} Error` : 'Integration Error'}
|
||||||
|
</DialogTitle>
|
||||||
|
<DialogDescription>
|
||||||
|
Most recent backend error retained for this integration.
|
||||||
|
</DialogDescription>
|
||||||
|
</DialogHeader>
|
||||||
|
<div className="px-5 py-4 text-sm text-muted-foreground">
|
||||||
|
<p className="whitespace-pre-wrap break-words font-mono text-foreground">
|
||||||
|
{errorDialogState?.error}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
|
||||||
{configGroups.length > 0 && (
|
{configGroups.length > 0 && (
|
||||||
<div className="columns-1 gap-4 md:columns-2">
|
<div className="columns-1 gap-4 md:columns-2">
|
||||||
{configGroups.map((group) => (
|
{configGroups.map((group) => (
|
||||||
@@ -2049,6 +2256,7 @@ export function SettingsFanoutSection({
|
|||||||
{group.configs.map((cfg) => {
|
{group.configs.map((cfg) => {
|
||||||
const statusEntry = health?.fanout_statuses?.[cfg.id];
|
const statusEntry = health?.fanout_statuses?.[cfg.id];
|
||||||
const status = cfg.enabled ? statusEntry?.status : undefined;
|
const status = cfg.enabled ? statusEntry?.status : undefined;
|
||||||
|
const lastError = cfg.enabled ? statusEntry?.last_error : null;
|
||||||
const communityConfig = cfg.config as Record<string, unknown>;
|
const communityConfig = cfg.config as Record<string, unknown>;
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
@@ -2115,6 +2323,25 @@ export function SettingsFanoutSection({
|
|||||||
{cfg.enabled ? getStatusLabel(status, cfg.type) : 'Disabled'}
|
{cfg.enabled ? getStatusLabel(status, cfg.type) : 'Disabled'}
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
|
{lastError && (
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
className="h-6 w-6 px-0"
|
||||||
|
onClick={() =>
|
||||||
|
setErrorDialogState({
|
||||||
|
integrationName: cfg.name,
|
||||||
|
error: lastError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
aria-label={`View error details for ${cfg.name}`}
|
||||||
|
title="View latest error"
|
||||||
|
>
|
||||||
|
<Info className="h-3.5 w-3.5" aria-hidden="true" />
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
<Button
|
<Button
|
||||||
type="button"
|
type="button"
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import {
|
|||||||
} from '../utils/lastViewedConversation';
|
} from '../utils/lastViewedConversation';
|
||||||
import { findPublicChannel } from '../utils/publicChannel';
|
import { findPublicChannel } from '../utils/publicChannel';
|
||||||
import { getContactDisplayName } from '../utils/pubkey';
|
import { getContactDisplayName } from '../utils/pubkey';
|
||||||
|
import { toast } from '../components/ui/sonner';
|
||||||
import type { Channel, Contact, Conversation } from '../types';
|
import type { Channel, Contact, Conversation } from '../types';
|
||||||
|
|
||||||
interface UseConversationRouterArgs {
|
interface UseConversationRouterArgs {
|
||||||
@@ -137,6 +138,11 @@ export function useConversationRouter({
|
|||||||
// No hash or unresolvable — default to Public
|
// No hash or unresolvable — default to Public
|
||||||
const publicConversation = getPublicChannelConversation();
|
const publicConversation = getPublicChannelConversation();
|
||||||
if (publicConversation) {
|
if (publicConversation) {
|
||||||
|
if (hashConv?.type === 'channel') {
|
||||||
|
const token =
|
||||||
|
hashConv.name.length > 16 ? hashConv.name.substring(0, 16) + '…' : hashConv.name;
|
||||||
|
toast.error(`Channel not found: ${token}`);
|
||||||
|
}
|
||||||
setActiveConversationState(publicConversation);
|
setActiveConversationState(publicConversation);
|
||||||
hasSetDefaultConversation.current = true;
|
hasSetDefaultConversation.current = true;
|
||||||
}
|
}
|
||||||
@@ -162,6 +168,9 @@ export function useConversationRouter({
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Contact hash didn't match — fall back to Public if channels loaded.
|
// Contact hash didn't match — fall back to Public if channels loaded.
|
||||||
|
const token =
|
||||||
|
hashConv.name.length > 16 ? hashConv.name.substring(0, 16) + '…' : hashConv.name;
|
||||||
|
toast.error(`Contact not found: ${token}`);
|
||||||
const publicConversation = getPublicChannelConversation();
|
const publicConversation = getPublicChannelConversation();
|
||||||
if (publicConversation) {
|
if (publicConversation) {
|
||||||
setActiveConversationState(publicConversation);
|
setActiveConversationState(publicConversation);
|
||||||
|
|||||||
@@ -2,12 +2,13 @@ import { useCallback, useEffect, useMemo, useState } from 'react';
|
|||||||
|
|
||||||
type ServerLoginKind = 'repeater' | 'room';
|
type ServerLoginKind = 'repeater' | 'room';
|
||||||
|
|
||||||
const STORAGE_KEY_PREFIX = 'remoteterm-server-password';
|
|
||||||
|
|
||||||
type StoredPassword = {
|
type StoredPassword = {
|
||||||
password: string;
|
password: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const STORAGE_KEY_PREFIX = 'remoteterm-server-password';
|
||||||
|
const inMemoryPasswords = new Map<string, StoredPassword>();
|
||||||
|
|
||||||
function getStorageKey(kind: ServerLoginKind, publicKey: string): string {
|
function getStorageKey(kind: ServerLoginKind, publicKey: string): string {
|
||||||
return `${STORAGE_KEY_PREFIX}:${kind}:${publicKey}`;
|
return `${STORAGE_KEY_PREFIX}:${kind}:${publicKey}`;
|
||||||
}
|
}
|
||||||
@@ -33,37 +34,46 @@ export function useRememberedServerPassword(kind: ServerLoginKind, publicKey: st
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const stored = loadStoredPassword(kind, publicKey);
|
const stored = loadStoredPassword(kind, publicKey);
|
||||||
if (!stored) {
|
if (stored) {
|
||||||
setPassword('');
|
setPassword(stored.password);
|
||||||
|
setRememberPassword(true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const inMemoryStored = inMemoryPasswords.get(storageKey);
|
||||||
|
if (inMemoryStored) {
|
||||||
|
setPassword(inMemoryStored.password);
|
||||||
setRememberPassword(false);
|
setRememberPassword(false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
setPassword(stored.password);
|
|
||||||
setRememberPassword(true);
|
setPassword('');
|
||||||
}, [kind, publicKey]);
|
setRememberPassword(false);
|
||||||
|
}, [kind, publicKey, storageKey]);
|
||||||
|
|
||||||
const persistAfterLogin = useCallback(
|
const persistAfterLogin = useCallback(
|
||||||
(submittedPassword: string) => {
|
(submittedPassword: string) => {
|
||||||
|
const trimmedPassword = submittedPassword.trim();
|
||||||
|
if (!trimmedPassword) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
inMemoryPasswords.set(storageKey, { password: trimmedPassword });
|
||||||
|
|
||||||
if (!rememberPassword) {
|
if (!rememberPassword) {
|
||||||
try {
|
try {
|
||||||
localStorage.removeItem(storageKey);
|
localStorage.removeItem(storageKey);
|
||||||
} catch {
|
} catch {
|
||||||
// localStorage may be unavailable
|
// localStorage may be unavailable
|
||||||
}
|
}
|
||||||
setPassword('');
|
} else {
|
||||||
return;
|
try {
|
||||||
|
localStorage.setItem(storageKey, JSON.stringify({ password: trimmedPassword }));
|
||||||
|
} catch {
|
||||||
|
// localStorage may be unavailable
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const trimmedPassword = submittedPassword.trim();
|
|
||||||
if (!trimmedPassword) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
localStorage.setItem(storageKey, JSON.stringify({ password: trimmedPassword }));
|
|
||||||
} catch {
|
|
||||||
// localStorage may be unavailable
|
|
||||||
}
|
|
||||||
setPassword(trimmedPassword);
|
setPassword(trimmedPassword);
|
||||||
},
|
},
|
||||||
[rememberPassword, storageKey]
|
[rememberPassword, storageKey]
|
||||||
|
|||||||
@@ -15,6 +15,11 @@ import type {
|
|||||||
RepeaterLppTelemetryResponse,
|
RepeaterLppTelemetryResponse,
|
||||||
CommandResponse,
|
CommandResponse,
|
||||||
} from '../types';
|
} from '../types';
|
||||||
|
import {
|
||||||
|
buildServerLoginAttemptFromError,
|
||||||
|
buildServerLoginAttemptFromResponse,
|
||||||
|
type ServerLoginAttemptState,
|
||||||
|
} from '../utils/serverLoginState';
|
||||||
|
|
||||||
const MAX_RETRIES = 3;
|
const MAX_RETRIES = 3;
|
||||||
const RETRY_DELAY_MS = 2000;
|
const RETRY_DELAY_MS = 2000;
|
||||||
@@ -41,6 +46,7 @@ interface PaneData {
|
|||||||
interface RepeaterDashboardCacheEntry {
|
interface RepeaterDashboardCacheEntry {
|
||||||
loggedIn: boolean;
|
loggedIn: boolean;
|
||||||
loginError: string | null;
|
loginError: string | null;
|
||||||
|
lastLoginAttempt: ServerLoginAttemptState | null;
|
||||||
paneData: PaneData;
|
paneData: PaneData;
|
||||||
paneStates: Record<PaneName, PaneState>;
|
paneStates: Record<PaneName, PaneState>;
|
||||||
consoleHistory: ConsoleEntry[];
|
consoleHistory: ConsoleEntry[];
|
||||||
@@ -119,6 +125,7 @@ function getCachedState(publicKey: string | null): RepeaterDashboardCacheEntry |
|
|||||||
return {
|
return {
|
||||||
loggedIn: cached.loggedIn,
|
loggedIn: cached.loggedIn,
|
||||||
loginError: cached.loginError,
|
loginError: cached.loginError,
|
||||||
|
lastLoginAttempt: cached.lastLoginAttempt,
|
||||||
paneData: clonePaneData(cached.paneData),
|
paneData: clonePaneData(cached.paneData),
|
||||||
paneStates: normalizePaneStates(cached.paneStates),
|
paneStates: normalizePaneStates(cached.paneStates),
|
||||||
consoleHistory: cloneConsoleHistory(cached.consoleHistory),
|
consoleHistory: cloneConsoleHistory(cached.consoleHistory),
|
||||||
@@ -130,6 +137,7 @@ function cacheState(publicKey: string, entry: RepeaterDashboardCacheEntry) {
|
|||||||
repeaterDashboardCache.set(publicKey, {
|
repeaterDashboardCache.set(publicKey, {
|
||||||
loggedIn: entry.loggedIn,
|
loggedIn: entry.loggedIn,
|
||||||
loginError: entry.loginError,
|
loginError: entry.loginError,
|
||||||
|
lastLoginAttempt: entry.lastLoginAttempt,
|
||||||
paneData: clonePaneData(entry.paneData),
|
paneData: clonePaneData(entry.paneData),
|
||||||
paneStates: normalizePaneStates(entry.paneStates),
|
paneStates: normalizePaneStates(entry.paneStates),
|
||||||
consoleHistory: cloneConsoleHistory(entry.consoleHistory),
|
consoleHistory: cloneConsoleHistory(entry.consoleHistory),
|
||||||
@@ -173,6 +181,7 @@ export interface UseRepeaterDashboardResult {
|
|||||||
loggedIn: boolean;
|
loggedIn: boolean;
|
||||||
loginLoading: boolean;
|
loginLoading: boolean;
|
||||||
loginError: string | null;
|
loginError: string | null;
|
||||||
|
lastLoginAttempt: ServerLoginAttemptState | null;
|
||||||
paneData: PaneData;
|
paneData: PaneData;
|
||||||
paneStates: Record<PaneName, PaneState>;
|
paneStates: Record<PaneName, PaneState>;
|
||||||
consoleHistory: ConsoleEntry[];
|
consoleHistory: ConsoleEntry[];
|
||||||
@@ -203,6 +212,9 @@ export function useRepeaterDashboard(
|
|||||||
const [loggedIn, setLoggedIn] = useState(cachedState?.loggedIn ?? false);
|
const [loggedIn, setLoggedIn] = useState(cachedState?.loggedIn ?? false);
|
||||||
const [loginLoading, setLoginLoading] = useState(false);
|
const [loginLoading, setLoginLoading] = useState(false);
|
||||||
const [loginError, setLoginError] = useState<string | null>(cachedState?.loginError ?? null);
|
const [loginError, setLoginError] = useState<string | null>(cachedState?.loginError ?? null);
|
||||||
|
const [lastLoginAttempt, setLastLoginAttempt] = useState<ServerLoginAttemptState | null>(
|
||||||
|
cachedState?.lastLoginAttempt ?? null
|
||||||
|
);
|
||||||
|
|
||||||
const [paneData, setPaneData] = useState<PaneData>(
|
const [paneData, setPaneData] = useState<PaneData>(
|
||||||
cachedState?.paneData ?? createInitialPaneData
|
cachedState?.paneData ?? createInitialPaneData
|
||||||
@@ -243,11 +255,20 @@ export function useRepeaterDashboard(
|
|||||||
cacheState(conversationId, {
|
cacheState(conversationId, {
|
||||||
loggedIn,
|
loggedIn,
|
||||||
loginError,
|
loginError,
|
||||||
|
lastLoginAttempt,
|
||||||
paneData,
|
paneData,
|
||||||
paneStates,
|
paneStates,
|
||||||
consoleHistory,
|
consoleHistory,
|
||||||
});
|
});
|
||||||
}, [consoleHistory, conversationId, loggedIn, loginError, paneData, paneStates]);
|
}, [
|
||||||
|
consoleHistory,
|
||||||
|
conversationId,
|
||||||
|
loggedIn,
|
||||||
|
loginError,
|
||||||
|
lastLoginAttempt,
|
||||||
|
paneData,
|
||||||
|
paneStates,
|
||||||
|
]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
paneDataRef.current = paneData;
|
paneDataRef.current = paneData;
|
||||||
@@ -267,12 +288,14 @@ export function useRepeaterDashboard(
|
|||||||
const publicKey = getPublicKey();
|
const publicKey = getPublicKey();
|
||||||
if (!publicKey) return;
|
if (!publicKey) return;
|
||||||
const conversationId = publicKey;
|
const conversationId = publicKey;
|
||||||
|
const method = password.trim().length > 0 ? 'password' : 'blank';
|
||||||
|
|
||||||
setLoginLoading(true);
|
setLoginLoading(true);
|
||||||
setLoginError(null);
|
setLoginError(null);
|
||||||
try {
|
try {
|
||||||
const result = await api.repeaterLogin(publicKey, password);
|
const result = await api.repeaterLogin(publicKey, password);
|
||||||
if (activeIdRef.current !== conversationId) return;
|
if (activeIdRef.current !== conversationId) return;
|
||||||
|
setLastLoginAttempt(buildServerLoginAttemptFromResponse(method, result, 'repeater'));
|
||||||
setLoggedIn(true);
|
setLoggedIn(true);
|
||||||
if (!result.authenticated) {
|
if (!result.authenticated) {
|
||||||
const msg = result.message ?? 'Repeater login was not confirmed';
|
const msg = result.message ?? 'Repeater login was not confirmed';
|
||||||
@@ -282,6 +305,7 @@ export function useRepeaterDashboard(
|
|||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (activeIdRef.current !== conversationId) return;
|
if (activeIdRef.current !== conversationId) return;
|
||||||
const msg = err instanceof Error ? err.message : 'Login failed';
|
const msg = err instanceof Error ? err.message : 'Login failed';
|
||||||
|
setLastLoginAttempt(buildServerLoginAttemptFromError(method, msg, 'repeater'));
|
||||||
setLoggedIn(true);
|
setLoggedIn(true);
|
||||||
setLoginError(msg);
|
setLoginError(msg);
|
||||||
toast.error('Login request failed', {
|
toast.error('Login request failed', {
|
||||||
@@ -475,6 +499,7 @@ export function useRepeaterDashboard(
|
|||||||
loggedIn,
|
loggedIn,
|
||||||
loginLoading,
|
loginLoading,
|
||||||
loginError,
|
loginError,
|
||||||
|
lastLoginAttempt,
|
||||||
paneData,
|
paneData,
|
||||||
paneStates,
|
paneStates,
|
||||||
consoleHistory,
|
consoleHistory,
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ vi.mock('../api', () => ({
|
|||||||
deleteFanoutConfig: vi.fn(),
|
deleteFanoutConfig: vi.fn(),
|
||||||
getChannels: vi.fn(),
|
getChannels: vi.fn(),
|
||||||
getContacts: vi.fn(),
|
getContacts: vi.fn(),
|
||||||
|
getRadioConfig: vi.fn(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -96,6 +97,17 @@ beforeEach(() => {
|
|||||||
mockedApi.getFanoutConfigs.mockResolvedValue([]);
|
mockedApi.getFanoutConfigs.mockResolvedValue([]);
|
||||||
mockedApi.getChannels.mockResolvedValue([]);
|
mockedApi.getChannels.mockResolvedValue([]);
|
||||||
mockedApi.getContacts.mockResolvedValue([]);
|
mockedApi.getContacts.mockResolvedValue([]);
|
||||||
|
mockedApi.getRadioConfig.mockResolvedValue({
|
||||||
|
public_key: 'aa'.repeat(32),
|
||||||
|
name: 'TestNode',
|
||||||
|
lat: 0,
|
||||||
|
lon: 0,
|
||||||
|
tx_power: 17,
|
||||||
|
max_tx_power: 22,
|
||||||
|
radio: { freq: 910.525, bw: 62.5, sf: 7, cr: 5 },
|
||||||
|
path_hash_mode: 0,
|
||||||
|
path_hash_mode_supported: false,
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('SettingsFanoutSection', () => {
|
describe('SettingsFanoutSection', () => {
|
||||||
@@ -106,7 +118,7 @@ describe('SettingsFanoutSection', () => {
|
|||||||
const optionButtons = within(dialog)
|
const optionButtons = within(dialog)
|
||||||
.getAllByRole('button')
|
.getAllByRole('button')
|
||||||
.filter((button) => button.hasAttribute('aria-pressed'));
|
.filter((button) => button.hasAttribute('aria-pressed'));
|
||||||
expect(optionButtons).toHaveLength(9);
|
expect(optionButtons).toHaveLength(10);
|
||||||
expect(within(dialog).getByRole('button', { name: 'Close' })).toBeInTheDocument();
|
expect(within(dialog).getByRole('button', { name: 'Close' })).toBeInTheDocument();
|
||||||
expect(within(dialog).getByRole('button', { name: 'Create' })).toBeInTheDocument();
|
expect(within(dialog).getByRole('button', { name: 'Create' })).toBeInTheDocument();
|
||||||
expect(
|
expect(
|
||||||
@@ -138,6 +150,9 @@ describe('SettingsFanoutSection', () => {
|
|||||||
expect(
|
expect(
|
||||||
within(dialog).getByRole('button', { name: startsWithAccessibleName('Python Bot') })
|
within(dialog).getByRole('button', { name: startsWithAccessibleName('Python Bot') })
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
within(dialog).getByRole('button', { name: startsWithAccessibleName('Map Upload') })
|
||||||
|
).toBeInTheDocument();
|
||||||
expect(within(dialog).getByRole('heading', { level: 3 })).toBeInTheDocument();
|
expect(within(dialog).getByRole('heading', { level: 3 })).toBeInTheDocument();
|
||||||
|
|
||||||
const genericCommunityIndex = optionButtons.findIndex((button) =>
|
const genericCommunityIndex = optionButtons.findIndex((button) =>
|
||||||
@@ -191,6 +206,56 @@ describe('SettingsFanoutSection', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('shows an error info button and dialog when the integration has a retained error', async () => {
|
||||||
|
mockedApi.getFanoutConfigs.mockResolvedValue([webhookConfig]);
|
||||||
|
renderSection({
|
||||||
|
health: {
|
||||||
|
...baseHealth,
|
||||||
|
fanout_statuses: {
|
||||||
|
'wh-1': {
|
||||||
|
name: 'Test Hook',
|
||||||
|
type: 'webhook',
|
||||||
|
status: 'error',
|
||||||
|
last_error: 'HTTP 500',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('Test Hook')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: 'View error details for Test Hook' }));
|
||||||
|
|
||||||
|
expect(screen.getByRole('dialog', { name: 'Test Hook Error' })).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('HTTP 500')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not show an error info button when the integration has no retained error', async () => {
|
||||||
|
mockedApi.getFanoutConfigs.mockResolvedValue([webhookConfig]);
|
||||||
|
renderSection({
|
||||||
|
health: {
|
||||||
|
...baseHealth,
|
||||||
|
fanout_statuses: {
|
||||||
|
'wh-1': {
|
||||||
|
name: 'Test Hook',
|
||||||
|
type: 'webhook',
|
||||||
|
status: 'connected',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('Test Hook')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.queryByRole('button', { name: 'View error details for Test Hook' })
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('navigates to edit view when clicking edit', async () => {
|
it('navigates to edit view when clicking edit', async () => {
|
||||||
mockedApi.getFanoutConfigs.mockResolvedValue([webhookConfig]);
|
mockedApi.getFanoutConfigs.mockResolvedValue([webhookConfig]);
|
||||||
renderSection();
|
renderSection();
|
||||||
@@ -916,7 +981,7 @@ describe('SettingsFanoutSection', () => {
|
|||||||
|
|
||||||
await waitFor(() => expect(screen.getByText('← Back to list')).toBeInTheDocument());
|
await waitFor(() => expect(screen.getByText('← Back to list')).toBeInTheDocument());
|
||||||
|
|
||||||
expect(screen.getByLabelText('Name')).toHaveValue('Community MQTT #1');
|
expect(screen.getByLabelText('Name')).toHaveValue('Community Sharing #1');
|
||||||
expect(screen.getByLabelText('Broker Host')).toBeInTheDocument();
|
expect(screen.getByLabelText('Broker Host')).toBeInTheDocument();
|
||||||
expect(screen.getByLabelText('Authentication')).toBeInTheDocument();
|
expect(screen.getByLabelText('Authentication')).toBeInTheDocument();
|
||||||
expect(screen.getByLabelText('Packet Topic Template')).toBeInTheDocument();
|
expect(screen.getByLabelText('Packet Topic Template')).toBeInTheDocument();
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ const mockHook: {
|
|||||||
loggedIn: false,
|
loggedIn: false,
|
||||||
loginLoading: false,
|
loginLoading: false,
|
||||||
loginError: null,
|
loginError: null,
|
||||||
|
lastLoginAttempt: null,
|
||||||
paneData: {
|
paneData: {
|
||||||
status: null,
|
status: null,
|
||||||
nodeInfo: null,
|
nodeInfo: null,
|
||||||
|
|||||||
@@ -56,22 +56,84 @@ describe('RoomServerPanel', () => {
|
|||||||
status: 'timeout',
|
status: 'timeout',
|
||||||
authenticated: false,
|
authenticated: false,
|
||||||
message:
|
message:
|
||||||
'No login confirmation was heard from the room server. The control panel is still available; try logging in again if authenticated actions fail.',
|
"No login confirmation was heard from the room server. You're free to try sending messages; try logging in again if authenticated actions fail.",
|
||||||
});
|
});
|
||||||
const onAuthenticatedChange = vi.fn();
|
const onAuthenticatedChange = vi.fn();
|
||||||
|
|
||||||
render(<RoomServerPanel contact={roomContact} onAuthenticatedChange={onAuthenticatedChange} />);
|
render(<RoomServerPanel contact={roomContact} onAuthenticatedChange={onAuthenticatedChange} />);
|
||||||
|
|
||||||
fireEvent.click(screen.getByText('Login with ACL / Guest'));
|
fireEvent.click(screen.getByText('Login with Existing Access / Guest'));
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(screen.getByText('Show Tools')).toBeInTheDocument();
|
expect(screen.getByText('Show Tools')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
expect(screen.getByText('Show Tools')).toBeInTheDocument();
|
expect(screen.getByText('Show Tools')).toBeInTheDocument();
|
||||||
expect(mockToast.warning).toHaveBeenCalledWith('Room login not confirmed', {
|
expect(screen.getByText('Retry Existing-Access Login')).toBeInTheDocument();
|
||||||
|
expect(mockToast.warning).toHaveBeenCalledWith("Couldn't confirm room login", {
|
||||||
description:
|
description:
|
||||||
'No login confirmation was heard from the room server. The control panel is still available; try logging in again if authenticated actions fail.',
|
"No login confirmation was heard from the room server. You're free to try sending messages; try logging in again if authenticated actions fail.",
|
||||||
});
|
});
|
||||||
expect(onAuthenticatedChange).toHaveBeenLastCalledWith(true);
|
expect(onAuthenticatedChange).toHaveBeenLastCalledWith(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('retains the last password for one-click retry after unlocking the panel', async () => {
|
||||||
|
mockApi.roomLogin
|
||||||
|
.mockResolvedValueOnce({
|
||||||
|
status: 'timeout',
|
||||||
|
authenticated: false,
|
||||||
|
message: 'No reply heard',
|
||||||
|
})
|
||||||
|
.mockResolvedValueOnce({
|
||||||
|
status: 'ok',
|
||||||
|
authenticated: true,
|
||||||
|
message: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
render(<RoomServerPanel contact={roomContact} />);
|
||||||
|
|
||||||
|
fireEvent.change(screen.getByLabelText('Repeater password'), {
|
||||||
|
target: { value: 'secret-room-password' },
|
||||||
|
});
|
||||||
|
fireEvent.click(screen.getByText('Login with Password'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('Retry Password Login')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByText('Retry Password Login'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockApi.roomLogin).toHaveBeenNthCalledWith(
|
||||||
|
1,
|
||||||
|
roomContact.public_key,
|
||||||
|
'secret-room-password'
|
||||||
|
);
|
||||||
|
expect(mockApi.roomLogin).toHaveBeenNthCalledWith(
|
||||||
|
2,
|
||||||
|
roomContact.public_key,
|
||||||
|
'secret-room-password'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows only a success toast after a confirmed login', async () => {
|
||||||
|
mockApi.roomLogin.mockResolvedValueOnce({
|
||||||
|
status: 'ok',
|
||||||
|
authenticated: true,
|
||||||
|
message: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
render(<RoomServerPanel contact={roomContact} />);
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByText('Login with Existing Access / Guest'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('Show Tools')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.queryByText('Login confirmed by the room server.')).not.toBeInTheDocument();
|
||||||
|
expect(screen.queryByText('Retry Password Login')).not.toBeInTheDocument();
|
||||||
|
expect(screen.queryByText('Retry Existing-Access Login')).not.toBeInTheDocument();
|
||||||
|
expect(mockToast.success).toHaveBeenCalledWith('Login confirmed by the room server.');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -8,70 +8,24 @@ describe('useRememberedServerPassword', () => {
|
|||||||
localStorage.clear();
|
localStorage.clear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('loads remembered passwords from localStorage', () => {
|
it('restores the last in-memory password when local remember is disabled', () => {
|
||||||
localStorage.setItem(
|
const { result, unmount } = renderHook(() =>
|
||||||
'remoteterm-server-password:repeater:abc123',
|
useRememberedServerPassword('room', 'aa'.repeat(32))
|
||||||
JSON.stringify({ password: 'stored-secret' })
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const { result } = renderHook(() => useRememberedServerPassword('repeater', 'abc123'));
|
|
||||||
|
|
||||||
expect(result.current.password).toBe('stored-secret');
|
|
||||||
expect(result.current.rememberPassword).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores passwords after login when remember is enabled', () => {
|
|
||||||
const { result } = renderHook(() => useRememberedServerPassword('room', 'room-key'));
|
|
||||||
|
|
||||||
act(() => {
|
act(() => {
|
||||||
result.current.setRememberPassword(true);
|
result.current.setPassword('room-secret');
|
||||||
|
result.current.persistAfterLogin('room-secret');
|
||||||
});
|
});
|
||||||
|
|
||||||
act(() => {
|
expect(result.current.password).toBe('room-secret');
|
||||||
result.current.persistAfterLogin(' hello ');
|
unmount();
|
||||||
});
|
|
||||||
|
|
||||||
expect(localStorage.getItem('remoteterm-server-password:room:room-key')).toBe(
|
const { result: remounted } = renderHook(() =>
|
||||||
JSON.stringify({ password: 'hello' })
|
useRememberedServerPassword('room', 'aa'.repeat(32))
|
||||||
);
|
|
||||||
expect(result.current.password).toBe('hello');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('clears stored passwords when login is done with remember disabled', () => {
|
|
||||||
localStorage.setItem(
|
|
||||||
'remoteterm-server-password:repeater:abc123',
|
|
||||||
JSON.stringify({ password: 'stored-secret' })
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const { result } = renderHook(() => useRememberedServerPassword('repeater', 'abc123'));
|
expect(remounted.current.password).toBe('room-secret');
|
||||||
|
expect(remounted.current.rememberPassword).toBe(false);
|
||||||
act(() => {
|
|
||||||
result.current.setRememberPassword(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
act(() => {
|
|
||||||
result.current.persistAfterLogin('new-secret');
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(localStorage.getItem('remoteterm-server-password:repeater:abc123')).toBeNull();
|
|
||||||
expect(result.current.password).toBe('');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('preserves remembered passwords on guest login when remember stays enabled', () => {
|
|
||||||
localStorage.setItem(
|
|
||||||
'remoteterm-server-password:room:room-key',
|
|
||||||
JSON.stringify({ password: 'stored-secret' })
|
|
||||||
);
|
|
||||||
|
|
||||||
const { result } = renderHook(() => useRememberedServerPassword('room', 'room-key'));
|
|
||||||
|
|
||||||
act(() => {
|
|
||||||
result.current.persistAfterLogin('');
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(localStorage.getItem('remoteterm-server-password:room:room-key')).toBe(
|
|
||||||
JSON.stringify({ password: 'stored-secret' })
|
|
||||||
);
|
|
||||||
expect(result.current.password).toBe('stored-secret');
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -74,6 +74,8 @@ describe('useRepeaterDashboard', () => {
|
|||||||
|
|
||||||
expect(result.current.loggedIn).toBe(true);
|
expect(result.current.loggedIn).toBe(true);
|
||||||
expect(result.current.loginError).toBe(null);
|
expect(result.current.loginError).toBe(null);
|
||||||
|
expect(result.current.lastLoginAttempt?.heardBack).toBe(true);
|
||||||
|
expect(result.current.lastLoginAttempt?.outcome).toBe('confirmed');
|
||||||
expect(mockApi.repeaterLogin).toHaveBeenCalledWith(REPEATER_KEY, 'secret');
|
expect(mockApi.repeaterLogin).toHaveBeenCalledWith(REPEATER_KEY, 'secret');
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -92,6 +94,8 @@ describe('useRepeaterDashboard', () => {
|
|||||||
|
|
||||||
expect(result.current.loggedIn).toBe(true);
|
expect(result.current.loggedIn).toBe(true);
|
||||||
expect(result.current.loginError).toBe('Auth failed');
|
expect(result.current.loginError).toBe('Auth failed');
|
||||||
|
expect(result.current.lastLoginAttempt?.heardBack).toBe(true);
|
||||||
|
expect(result.current.lastLoginAttempt?.outcome).toBe('not_confirmed');
|
||||||
expect(mockToast.error).toHaveBeenCalledWith('Login not confirmed', {
|
expect(mockToast.error).toHaveBeenCalledWith('Login not confirmed', {
|
||||||
description: 'Auth failed',
|
description: 'Auth failed',
|
||||||
});
|
});
|
||||||
@@ -125,6 +129,8 @@ describe('useRepeaterDashboard', () => {
|
|||||||
|
|
||||||
expect(result.current.loggedIn).toBe(true);
|
expect(result.current.loggedIn).toBe(true);
|
||||||
expect(result.current.loginError).toBe('Network error');
|
expect(result.current.loginError).toBe('Network error');
|
||||||
|
expect(result.current.lastLoginAttempt?.heardBack).toBe(false);
|
||||||
|
expect(result.current.lastLoginAttempt?.outcome).toBe('request_failed');
|
||||||
expect(mockToast.error).toHaveBeenCalledWith('Login request failed', {
|
expect(mockToast.error).toHaveBeenCalledWith('Login request failed', {
|
||||||
description:
|
description:
|
||||||
'Network error. The dashboard is still available, but repeater operations may fail until a login succeeds.',
|
'Network error. The dashboard is still available, but repeater operations may fail until a login succeeds.',
|
||||||
|
|||||||
@@ -53,6 +53,7 @@ export interface FanoutStatusEntry {
|
|||||||
name: string;
|
name: string;
|
||||||
type: string;
|
type: string;
|
||||||
status: string;
|
status: string;
|
||||||
|
last_error?: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AppInfo {
|
export interface AppInfo {
|
||||||
|
|||||||
@@ -0,0 +1,107 @@
|
|||||||
|
import type { RepeaterLoginResponse } from '../types';
|
||||||
|
|
||||||
|
export type ServerLoginMethod = 'password' | 'blank';
|
||||||
|
|
||||||
|
export type ServerLoginAttemptState =
|
||||||
|
| {
|
||||||
|
method: ServerLoginMethod;
|
||||||
|
outcome: 'confirmed';
|
||||||
|
summary: string;
|
||||||
|
details: string | null;
|
||||||
|
heardBack: true;
|
||||||
|
at: number;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
method: ServerLoginMethod;
|
||||||
|
outcome: 'not_confirmed';
|
||||||
|
summary: string;
|
||||||
|
details: string | null;
|
||||||
|
heardBack: boolean;
|
||||||
|
at: number;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
method: ServerLoginMethod;
|
||||||
|
outcome: 'request_failed';
|
||||||
|
summary: string;
|
||||||
|
details: string | null;
|
||||||
|
heardBack: false;
|
||||||
|
at: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function getServerLoginMethodLabel(
|
||||||
|
method: ServerLoginMethod,
|
||||||
|
blankLabel = 'existing-access'
|
||||||
|
): string {
|
||||||
|
return method === 'password' ? 'password' : blankLabel;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getServerLoginAttemptTone(
|
||||||
|
attempt: ServerLoginAttemptState | null
|
||||||
|
): 'success' | 'warning' | 'destructive' | 'muted' {
|
||||||
|
if (!attempt) return 'muted';
|
||||||
|
if (attempt.outcome === 'confirmed') return 'success';
|
||||||
|
if (attempt.outcome === 'not_confirmed') return 'warning';
|
||||||
|
return 'destructive';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildServerLoginAttemptFromResponse(
|
||||||
|
method: ServerLoginMethod,
|
||||||
|
result: RepeaterLoginResponse,
|
||||||
|
entityLabel: string
|
||||||
|
): ServerLoginAttemptState {
|
||||||
|
const methodLabel = getServerLoginMethodLabel(method);
|
||||||
|
const at = Date.now();
|
||||||
|
const target = `the ${entityLabel}`;
|
||||||
|
|
||||||
|
if (result.authenticated) {
|
||||||
|
return {
|
||||||
|
method,
|
||||||
|
outcome: 'confirmed',
|
||||||
|
summary: `Login confirmed by ${target}.`,
|
||||||
|
details: null,
|
||||||
|
heardBack: true,
|
||||||
|
at,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.status === 'timeout') {
|
||||||
|
return {
|
||||||
|
method,
|
||||||
|
outcome: 'not_confirmed',
|
||||||
|
summary: `We couldn't confirm the login.`,
|
||||||
|
details:
|
||||||
|
result.message ??
|
||||||
|
`No confirmation came back from ${target} after the ${methodLabel} login attempt.`,
|
||||||
|
heardBack: false,
|
||||||
|
at,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
method,
|
||||||
|
outcome: 'not_confirmed',
|
||||||
|
summary: `Login was not confirmed.`,
|
||||||
|
details:
|
||||||
|
result.message ??
|
||||||
|
`${target} responded, but did not confirm the ${methodLabel} login attempt.`,
|
||||||
|
heardBack: true,
|
||||||
|
at,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildServerLoginAttemptFromError(
|
||||||
|
method: ServerLoginMethod,
|
||||||
|
message: string,
|
||||||
|
entityLabel: string
|
||||||
|
): ServerLoginAttemptState {
|
||||||
|
const methodLabel = getServerLoginMethodLabel(method);
|
||||||
|
const target = `the ${entityLabel}`;
|
||||||
|
return {
|
||||||
|
method,
|
||||||
|
outcome: 'request_failed',
|
||||||
|
summary: `We couldn't send the login request.`,
|
||||||
|
details: `${target} never acknowledged the ${methodLabel} login attempt. ${message}`,
|
||||||
|
heardBack: false,
|
||||||
|
at: Date.now(),
|
||||||
|
};
|
||||||
|
}
|
||||||
+1
-1
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "remoteterm-meshcore"
|
name = "remoteterm-meshcore"
|
||||||
version = "3.6.0"
|
version = "3.6.2"
|
||||||
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.10"
|
||||||
|
|||||||
+42
-18
@@ -12,6 +12,8 @@ cd "$SCRIPT_DIR"
|
|||||||
|
|
||||||
RELEASE_WORK_DIR=""
|
RELEASE_WORK_DIR=""
|
||||||
RELEASE_BUNDLE_DIR_NAME="Remote-Terminal-for-MeshCore"
|
RELEASE_BUNDLE_DIR_NAME="Remote-Terminal-for-MeshCore"
|
||||||
|
DOCKER_IMAGE="jkingsman/remoteterm-meshcore"
|
||||||
|
DOCKER_PLATFORMS="linux/amd64,linux/arm64"
|
||||||
|
|
||||||
cleanup_release_build_artifacts() {
|
cleanup_release_build_artifacts() {
|
||||||
if [ -d "$SCRIPT_DIR/frontend/prebuilt" ]; then
|
if [ -d "$SCRIPT_DIR/frontend/prebuilt" ]; then
|
||||||
@@ -24,6 +26,28 @@ cleanup_release_build_artifacts() {
|
|||||||
|
|
||||||
trap cleanup_release_build_artifacts EXIT
|
trap cleanup_release_build_artifacts EXIT
|
||||||
|
|
||||||
|
ensure_buildx_builder() {
|
||||||
|
if ! docker buildx version >/dev/null 2>&1; then
|
||||||
|
echo -e "${RED}Error: docker buildx is required for multi-arch Docker builds.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local current_builder
|
||||||
|
current_builder="$(docker buildx inspect --format '{{ .Name }}' 2>/dev/null || true)"
|
||||||
|
|
||||||
|
if [ -n "$current_builder" ]; then
|
||||||
|
docker buildx inspect --bootstrap >/dev/null
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if docker buildx inspect remoteterm-multiarch >/dev/null 2>&1; then
|
||||||
|
docker buildx use remoteterm-multiarch >/dev/null
|
||||||
|
else
|
||||||
|
docker buildx create --name remoteterm-multiarch --use >/dev/null
|
||||||
|
fi
|
||||||
|
docker buildx inspect --bootstrap >/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
echo -e "${YELLOW}=== RemoteTerm for MeshCore Publish Script ===${NC}"
|
echo -e "${YELLOW}=== RemoteTerm for MeshCore Publish Script ===${NC}"
|
||||||
echo
|
echo
|
||||||
|
|
||||||
@@ -199,21 +223,18 @@ rm -f "$SCRIPT_DIR/$RELEASE_ASSET"
|
|||||||
echo -e "${GREEN}Packaged release artifact created: $RELEASE_ASSET${NC}"
|
echo -e "${GREEN}Packaged release artifact created: $RELEASE_ASSET${NC}"
|
||||||
echo
|
echo
|
||||||
|
|
||||||
# Build docker image
|
# Build and push multi-arch docker image
|
||||||
echo -e "${YELLOW}Building Docker image...${NC}"
|
echo -e "${YELLOW}Building and pushing multi-arch Docker image...${NC}"
|
||||||
docker build --build-arg COMMIT_HASH=$GIT_HASH \
|
ensure_buildx_builder
|
||||||
-t jkingsman/remoteterm-meshcore:latest \
|
docker buildx build \
|
||||||
-t jkingsman/remoteterm-meshcore:$VERSION \
|
--platform "$DOCKER_PLATFORMS" \
|
||||||
-t jkingsman/remoteterm-meshcore:$GIT_HASH .
|
--build-arg COMMIT_HASH="$GIT_HASH" \
|
||||||
echo -e "${GREEN}Docker build complete!${NC}"
|
-t "$DOCKER_IMAGE:latest" \
|
||||||
echo
|
-t "$DOCKER_IMAGE:$VERSION" \
|
||||||
|
-t "$DOCKER_IMAGE:$GIT_HASH" \
|
||||||
# Push docker images
|
--push \
|
||||||
echo -e "${YELLOW}Pushing Docker images...${NC}"
|
.
|
||||||
docker push jkingsman/remoteterm-meshcore:latest
|
echo -e "${GREEN}Multi-arch Docker build + push complete!${NC}"
|
||||||
docker push jkingsman/remoteterm-meshcore:$VERSION
|
|
||||||
docker push jkingsman/remoteterm-meshcore:$GIT_HASH
|
|
||||||
echo -e "${GREEN}Docker push complete!${NC}"
|
|
||||||
echo
|
echo
|
||||||
|
|
||||||
# Create GitHub release using the changelog notes for this version.
|
# Create GitHub release using the changelog notes for this version.
|
||||||
@@ -254,9 +275,12 @@ echo -e "${GREEN}=== Publish complete! ===${NC}"
|
|||||||
echo -e "Version: ${YELLOW}$VERSION${NC}"
|
echo -e "Version: ${YELLOW}$VERSION${NC}"
|
||||||
echo -e "Git hash: ${YELLOW}$GIT_HASH${NC}"
|
echo -e "Git hash: ${YELLOW}$GIT_HASH${NC}"
|
||||||
echo -e "Docker tags pushed:"
|
echo -e "Docker tags pushed:"
|
||||||
echo -e " - jkingsman/remoteterm-meshcore:latest"
|
echo -e " - $DOCKER_IMAGE:latest"
|
||||||
echo -e " - jkingsman/remoteterm-meshcore:$VERSION"
|
echo -e " - $DOCKER_IMAGE:$VERSION"
|
||||||
echo -e " - jkingsman/remoteterm-meshcore:$GIT_HASH"
|
echo -e " - $DOCKER_IMAGE:$GIT_HASH"
|
||||||
|
echo -e "Platforms:"
|
||||||
|
echo -e " - linux/amd64"
|
||||||
|
echo -e " - linux/arm64"
|
||||||
echo -e "GitHub release:"
|
echo -e "GitHub release:"
|
||||||
echo -e " - $VERSION"
|
echo -e " - $VERSION"
|
||||||
echo -e "Release artifact:"
|
echo -e "Release artifact:"
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ from app.fanout.community_mqtt import (
|
|||||||
_build_status_topic,
|
_build_status_topic,
|
||||||
_calculate_packet_hash,
|
_calculate_packet_hash,
|
||||||
_decode_packet_fields,
|
_decode_packet_fields,
|
||||||
_ed25519_sign_expanded,
|
|
||||||
_format_raw_packet,
|
_format_raw_packet,
|
||||||
_generate_jwt_token,
|
_generate_jwt_token,
|
||||||
_get_client_version,
|
_get_client_version,
|
||||||
@@ -29,6 +28,7 @@ from app.fanout.mqtt_community import (
|
|||||||
_publish_community_packet,
|
_publish_community_packet,
|
||||||
_render_packet_topic,
|
_render_packet_topic,
|
||||||
)
|
)
|
||||||
|
from app.keystore import ed25519_sign_expanded
|
||||||
|
|
||||||
|
|
||||||
def _make_test_keys() -> tuple[bytes, bytes]:
|
def _make_test_keys() -> tuple[bytes, bytes]:
|
||||||
@@ -173,13 +173,13 @@ class TestEddsaSignExpanded:
|
|||||||
def test_produces_64_byte_signature(self):
|
def test_produces_64_byte_signature(self):
|
||||||
private_key, public_key = _make_test_keys()
|
private_key, public_key = _make_test_keys()
|
||||||
message = b"test message"
|
message = b"test message"
|
||||||
sig = _ed25519_sign_expanded(message, private_key[:32], private_key[32:], public_key)
|
sig = ed25519_sign_expanded(message, private_key[:32], private_key[32:], public_key)
|
||||||
assert len(sig) == 64
|
assert len(sig) == 64
|
||||||
|
|
||||||
def test_signature_verifies_with_nacl(self):
|
def test_signature_verifies_with_nacl(self):
|
||||||
private_key, public_key = _make_test_keys()
|
private_key, public_key = _make_test_keys()
|
||||||
message = b"hello world"
|
message = b"hello world"
|
||||||
sig = _ed25519_sign_expanded(message, private_key[:32], private_key[32:], public_key)
|
sig = ed25519_sign_expanded(message, private_key[:32], private_key[32:], public_key)
|
||||||
|
|
||||||
signed_message = sig + message
|
signed_message = sig + message
|
||||||
verified = nacl.bindings.crypto_sign_open(signed_message, public_key)
|
verified = nacl.bindings.crypto_sign_open(signed_message, public_key)
|
||||||
@@ -187,8 +187,8 @@ class TestEddsaSignExpanded:
|
|||||||
|
|
||||||
def test_different_messages_produce_different_signatures(self):
|
def test_different_messages_produce_different_signatures(self):
|
||||||
private_key, public_key = _make_test_keys()
|
private_key, public_key = _make_test_keys()
|
||||||
sig1 = _ed25519_sign_expanded(b"msg1", private_key[:32], private_key[32:], public_key)
|
sig1 = ed25519_sign_expanded(b"msg1", private_key[:32], private_key[32:], public_key)
|
||||||
sig2 = _ed25519_sign_expanded(b"msg2", private_key[:32], private_key[32:], public_key)
|
sig2 = ed25519_sign_expanded(b"msg2", private_key[:32], private_key[32:], public_key)
|
||||||
assert sig1 != sig2
|
assert sig1 != sig2
|
||||||
|
|
||||||
|
|
||||||
@@ -210,8 +210,8 @@ class TestPacketFormatConversion:
|
|||||||
assert result["origin"] == "TestNode"
|
assert result["origin"] == "TestNode"
|
||||||
assert result["origin_id"] == "AABBCCDD" * 8
|
assert result["origin_id"] == "AABBCCDD" * 8
|
||||||
assert result["raw"] == "0A1B2C3D"
|
assert result["raw"] == "0A1B2C3D"
|
||||||
assert result["SNR"] == "5.5"
|
assert result["SNR"] == 5.5
|
||||||
assert result["RSSI"] == "-90"
|
assert result["RSSI"] == -90
|
||||||
assert result["type"] == "PACKET"
|
assert result["type"] == "PACKET"
|
||||||
assert result["direction"] == "rx"
|
assert result["direction"] == "rx"
|
||||||
assert result["len"] == "4"
|
assert result["len"] == "4"
|
||||||
|
|||||||
@@ -271,6 +271,35 @@ class TestFanoutManagerDispatch:
|
|||||||
assert statuses["test-id"]["name"] == "Test"
|
assert statuses["test-id"]["name"] == "Test"
|
||||||
assert statuses["test-id"]["type"] == "mqtt_private"
|
assert statuses["test-id"]["type"] == "mqtt_private"
|
||||||
|
|
||||||
|
def test_get_statuses_includes_last_error(self):
|
||||||
|
manager = FanoutManager()
|
||||||
|
mod = StubModule()
|
||||||
|
mod._status = "error"
|
||||||
|
mod._last_error = "HTTP 500"
|
||||||
|
manager._modules["test-id"] = (mod, {})
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"app.repository.fanout._configs_cache",
|
||||||
|
{"test-id": {"name": "Test", "type": "webhook", "enabled": True}},
|
||||||
|
):
|
||||||
|
statuses = manager.get_statuses()
|
||||||
|
|
||||||
|
assert statuses["test-id"]["status"] == "error"
|
||||||
|
assert statuses["test-id"]["last_error"] == "HTTP 500"
|
||||||
|
|
||||||
|
def test_get_statuses_includes_start_failure_error(self):
|
||||||
|
manager = FanoutManager()
|
||||||
|
manager._module_errors["test-id"] = "ConnectionError: broker down"
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"app.repository.fanout._configs_cache",
|
||||||
|
{"test-id": {"name": "Test", "type": "mqtt_private", "enabled": True}},
|
||||||
|
):
|
||||||
|
statuses = manager.get_statuses()
|
||||||
|
|
||||||
|
assert statuses["test-id"]["status"] == "error"
|
||||||
|
assert statuses["test-id"]["last_error"] == "ConnectionError: broker down"
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Repository tests
|
# Repository tests
|
||||||
@@ -707,6 +736,98 @@ class TestSqsValidation:
|
|||||||
{"queue_url": "https://sqs.us-east-1.amazonaws.com/123456789012/mesh-events"}
|
{"queue_url": "https://sqs.us-east-1.amazonaws.com/123456789012/mesh-events"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestMapUploadValidation:
|
||||||
|
def test_rejects_bad_api_url_scheme(self):
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
with pytest.raises(HTTPException) as exc_info:
|
||||||
|
_validate_map_upload_config({"api_url": "ftp://example.com"})
|
||||||
|
assert exc_info.value.status_code == 400
|
||||||
|
assert "api_url" in exc_info.value.detail
|
||||||
|
|
||||||
|
def test_accepts_empty_api_url(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {"api_url": ""}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["api_url"] == ""
|
||||||
|
|
||||||
|
def test_accepts_valid_api_url(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {"api_url": "https://custom.example.com/upload"}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["api_url"] == "https://custom.example.com/upload"
|
||||||
|
|
||||||
|
def test_normalizes_dry_run_to_bool(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {"dry_run": 1}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["dry_run"] is True
|
||||||
|
|
||||||
|
def test_normalizes_geofence_enabled_to_bool(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {"geofence_enabled": 1}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["geofence_enabled"] is True
|
||||||
|
|
||||||
|
def test_normalizes_geofence_radius_to_float(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {"geofence_radius_km": 100}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["geofence_radius_km"] == 100.0
|
||||||
|
assert isinstance(config["geofence_radius_km"], float)
|
||||||
|
|
||||||
|
def test_rejects_negative_geofence_radius(self):
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
with pytest.raises(HTTPException) as exc_info:
|
||||||
|
_validate_map_upload_config({"geofence_radius_km": -1})
|
||||||
|
assert exc_info.value.status_code == 400
|
||||||
|
assert "geofence_radius_km" in exc_info.value.detail
|
||||||
|
|
||||||
|
def test_rejects_non_numeric_geofence_radius(self):
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
with pytest.raises(HTTPException) as exc_info:
|
||||||
|
_validate_map_upload_config({"geofence_radius_km": "bad"})
|
||||||
|
assert exc_info.value.status_code == 400
|
||||||
|
assert "geofence_radius_km" in exc_info.value.detail
|
||||||
|
|
||||||
|
def test_accepts_zero_geofence_radius(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {"geofence_radius_km": 0}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["geofence_radius_km"] == 0.0
|
||||||
|
|
||||||
|
def test_defaults_applied_when_keys_absent(self):
|
||||||
|
from app.routers.fanout import _validate_map_upload_config
|
||||||
|
|
||||||
|
config = {}
|
||||||
|
_validate_map_upload_config(config)
|
||||||
|
assert config["api_url"] == ""
|
||||||
|
assert config["dry_run"] is True
|
||||||
|
assert config["geofence_enabled"] is False
|
||||||
|
assert config["geofence_radius_km"] == 0.0
|
||||||
|
|
||||||
|
def test_enforce_scope_map_upload_forces_raw_only(self):
|
||||||
|
"""map_upload scope is always fixed regardless of what the caller passes."""
|
||||||
|
from app.routers.fanout import _enforce_scope
|
||||||
|
|
||||||
|
scope = _enforce_scope("map_upload", {"messages": "all", "raw_packets": "none"})
|
||||||
|
assert scope == {"messages": "none", "raw_packets": "all"}
|
||||||
|
|
||||||
def test_enforce_scope_sqs_preserves_raw_packets_setting(self):
|
def test_enforce_scope_sqs_preserves_raw_packets_setting(self):
|
||||||
from app.routers.fanout import _enforce_scope
|
from app.routers.fanout import _enforce_scope
|
||||||
|
|
||||||
|
|||||||
@@ -1790,3 +1790,100 @@ class TestManagerRestartFailure:
|
|||||||
|
|
||||||
assert len(healthy.messages_received) == 1
|
assert len(healthy.messages_received) == 1
|
||||||
assert len(dead.messages_received) == 0
|
assert len(dead.messages_received) == 0
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# MapUploadModule integration tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestMapUploadIntegration:
|
||||||
|
"""Integration tests: FanoutManager loads and dispatches to MapUploadModule."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_map_upload_module_loaded_and_receives_raw(self, integration_db):
|
||||||
|
"""Enabled map_upload config is loaded by the manager and its on_raw is called."""
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
cfg = await FanoutConfigRepository.create(
|
||||||
|
config_type="map_upload",
|
||||||
|
name="Map",
|
||||||
|
config={"dry_run": True, "api_url": ""},
|
||||||
|
scope={"messages": "none", "raw_packets": "all"},
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager = FanoutManager()
|
||||||
|
await manager.load_from_db()
|
||||||
|
|
||||||
|
assert cfg["id"] in manager._modules
|
||||||
|
module, scope = manager._modules[cfg["id"]]
|
||||||
|
assert scope == {"messages": "none", "raw_packets": "all"}
|
||||||
|
|
||||||
|
# Raw ADVERT event should be dispatched to on_raw
|
||||||
|
advert_data = {
|
||||||
|
"payload_type": "ADVERT",
|
||||||
|
"data": "aabbccdd",
|
||||||
|
"timestamp": 1000,
|
||||||
|
"id": 1,
|
||||||
|
"observation_id": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(module, "_upload", new_callable=AsyncMock):
|
||||||
|
# Provide a parseable but minimal packet so on_raw gets past hex decode;
|
||||||
|
# parse_packet/parse_advertisement returning None is fine — on_raw silently exits
|
||||||
|
await manager.broadcast_raw(advert_data)
|
||||||
|
# Give the asyncio task a chance to run
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
# _upload may or may not be called depending on parse result, but no exception
|
||||||
|
|
||||||
|
await manager.stop_all()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_map_upload_disabled_not_loaded(self, integration_db):
|
||||||
|
"""Disabled map_upload config is not loaded by the manager."""
|
||||||
|
await FanoutConfigRepository.create(
|
||||||
|
config_type="map_upload",
|
||||||
|
name="Map Disabled",
|
||||||
|
config={"dry_run": True, "api_url": ""},
|
||||||
|
scope={"messages": "none", "raw_packets": "all"},
|
||||||
|
enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager = FanoutManager()
|
||||||
|
await manager.load_from_db()
|
||||||
|
|
||||||
|
assert len(manager._modules) == 0
|
||||||
|
await manager.stop_all()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_map_upload_does_not_receive_messages(self, integration_db):
|
||||||
|
"""map_upload scope forces raw_packets only — message events must not reach it."""
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
cfg = await FanoutConfigRepository.create(
|
||||||
|
config_type="map_upload",
|
||||||
|
name="Map",
|
||||||
|
config={"dry_run": True, "api_url": ""},
|
||||||
|
scope={"messages": "none", "raw_packets": "all"},
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager = FanoutManager()
|
||||||
|
await manager.load_from_db()
|
||||||
|
|
||||||
|
assert cfg["id"] in manager._modules
|
||||||
|
module, _ = manager._modules[cfg["id"]]
|
||||||
|
|
||||||
|
with patch.object(module, "on_message", new_callable=AsyncMock) as mock_msg:
|
||||||
|
await manager.broadcast_message(
|
||||||
|
{"type": "CHAN", "conversation_key": "k1", "text": "hi"}
|
||||||
|
)
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
mock_msg.assert_not_called()
|
||||||
|
|
||||||
|
await manager.stop_all()
|
||||||
|
|||||||
@@ -28,11 +28,17 @@ class TestHealthFanoutStatus:
|
|||||||
async def test_fanout_statuses_reflect_manager(self, test_db):
|
async def test_fanout_statuses_reflect_manager(self, test_db):
|
||||||
"""fanout_statuses should return whatever the manager reports."""
|
"""fanout_statuses should return whatever the manager reports."""
|
||||||
mock_statuses = {
|
mock_statuses = {
|
||||||
"uuid-1": {"name": "Private MQTT", "type": "mqtt_private", "status": "connected"},
|
"uuid-1": {
|
||||||
|
"name": "Private MQTT",
|
||||||
|
"type": "mqtt_private",
|
||||||
|
"status": "connected",
|
||||||
|
"last_error": None,
|
||||||
|
},
|
||||||
"uuid-2": {
|
"uuid-2": {
|
||||||
"name": "Community MQTT",
|
"name": "Community MQTT",
|
||||||
"type": "mqtt_community",
|
"type": "mqtt_community",
|
||||||
"status": "disconnected",
|
"status": "error",
|
||||||
|
"last_error": "auth failed",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
with patch("app.fanout.manager.fanout_manager") as mock_fm:
|
with patch("app.fanout.manager.fanout_manager") as mock_fm:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -146,6 +146,7 @@ class TestMqttPublisher:
|
|||||||
# After a publish failure, connected should be cleared to stop
|
# After a publish failure, connected should be cleared to stop
|
||||||
# further attempts and reflect accurate status
|
# further attempts and reflect accurate status
|
||||||
assert pub.connected is False
|
assert pub.connected is False
|
||||||
|
assert pub.last_error == "Network error"
|
||||||
assert "Primary MQTT" in caplog.text
|
assert "Primary MQTT" in caplog.text
|
||||||
assert "usually transient network noise" in caplog.text
|
assert "usually transient network noise" in caplog.text
|
||||||
|
|
||||||
|
|||||||
+130
-7
@@ -5,12 +5,14 @@ contact/channel sync operations, and default channel management.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
from unittest.mock import AsyncMock, MagicMock, call, patch
|
from unittest.mock import AsyncMock, MagicMock, call, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from meshcore import EventType
|
from meshcore import EventType
|
||||||
from meshcore.events import Event
|
from meshcore.events import Event
|
||||||
|
|
||||||
|
import app.radio_sync as radio_sync
|
||||||
from app.models import Favorite
|
from app.models import Favorite
|
||||||
from app.radio import RadioManager, radio_manager
|
from app.radio import RadioManager, radio_manager
|
||||||
from app.radio_sync import (
|
from app.radio_sync import (
|
||||||
@@ -36,8 +38,6 @@ from app.repository import (
|
|||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def reset_sync_state():
|
def reset_sync_state():
|
||||||
"""Reset polling pause state, sync timestamp, and radio_manager before/after each test."""
|
"""Reset polling pause state, sync timestamp, and radio_manager before/after each test."""
|
||||||
import app.radio_sync as radio_sync
|
|
||||||
|
|
||||||
prev_mc = radio_manager._meshcore
|
prev_mc = radio_manager._meshcore
|
||||||
prev_lock = radio_manager._operation_lock
|
prev_lock = radio_manager._operation_lock
|
||||||
prev_max_channels = radio_manager.max_channels
|
prev_max_channels = radio_manager.max_channels
|
||||||
@@ -45,12 +45,20 @@ def reset_sync_state():
|
|||||||
prev_slot_by_key = radio_manager._channel_slot_by_key.copy()
|
prev_slot_by_key = radio_manager._channel_slot_by_key.copy()
|
||||||
prev_key_by_slot = radio_manager._channel_key_by_slot.copy()
|
prev_key_by_slot = radio_manager._channel_key_by_slot.copy()
|
||||||
prev_pending_channel_key_by_slot = radio_manager._pending_message_channel_key_by_slot.copy()
|
prev_pending_channel_key_by_slot = radio_manager._pending_message_channel_key_by_slot.copy()
|
||||||
|
prev_contact_reconcile_task = radio_sync._contact_reconcile_task
|
||||||
|
|
||||||
radio_sync._polling_pause_count = 0
|
radio_sync._polling_pause_count = 0
|
||||||
radio_sync._last_contact_sync = 0.0
|
radio_sync._last_contact_sync = 0.0
|
||||||
yield
|
yield
|
||||||
|
if (
|
||||||
|
radio_sync._contact_reconcile_task is not None
|
||||||
|
and radio_sync._contact_reconcile_task is not prev_contact_reconcile_task
|
||||||
|
and not radio_sync._contact_reconcile_task.done()
|
||||||
|
):
|
||||||
|
radio_sync._contact_reconcile_task.cancel()
|
||||||
radio_sync._polling_pause_count = 0
|
radio_sync._polling_pause_count = 0
|
||||||
radio_sync._last_contact_sync = 0.0
|
radio_sync._last_contact_sync = 0.0
|
||||||
|
radio_sync._contact_reconcile_task = prev_contact_reconcile_task
|
||||||
radio_manager._meshcore = prev_mc
|
radio_manager._meshcore = prev_mc
|
||||||
radio_manager._operation_lock = prev_lock
|
radio_manager._operation_lock = prev_lock
|
||||||
radio_manager.max_channels = prev_max_channels
|
radio_manager.max_channels = prev_max_channels
|
||||||
@@ -433,7 +441,7 @@ class TestSyncAndOffloadAll:
|
|||||||
"""Test session-local contact radio residency reset behavior."""
|
"""Test session-local contact radio residency reset behavior."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_clears_stale_contact_on_radio_flags_before_reload(self, test_db):
|
async def test_clears_stale_contact_on_radio_flags_before_background_reconcile(self, test_db):
|
||||||
await _insert_contact(KEY_A, "Alice", on_radio=True)
|
await _insert_contact(KEY_A, "Alice", on_radio=True)
|
||||||
await _insert_contact(KEY_B, "Bob", on_radio=True)
|
await _insert_contact(KEY_B, "Bob", on_radio=True)
|
||||||
|
|
||||||
@@ -441,8 +449,8 @@ class TestSyncAndOffloadAll:
|
|||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
"app.radio_sync.sync_and_offload_contacts",
|
"app.radio_sync.sync_contacts_from_radio",
|
||||||
new=AsyncMock(return_value={"synced": 0, "removed": 0}),
|
new=AsyncMock(return_value={"synced": 0, "radio_contacts": {}}),
|
||||||
),
|
),
|
||||||
patch(
|
patch(
|
||||||
"app.radio_sync.sync_and_offload_channels",
|
"app.radio_sync.sync_and_offload_channels",
|
||||||
@@ -450,8 +458,7 @@ class TestSyncAndOffloadAll:
|
|||||||
),
|
),
|
||||||
patch("app.radio_sync.ensure_default_channels", new=AsyncMock()),
|
patch("app.radio_sync.ensure_default_channels", new=AsyncMock()),
|
||||||
patch(
|
patch(
|
||||||
"app.radio_sync.sync_recent_contacts_to_radio",
|
"app.radio_sync.start_background_contact_reconciliation",
|
||||||
new=AsyncMock(return_value={"loaded": 0, "already_on_radio": 0, "failed": 0}),
|
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
await sync_and_offload_all(mock_mc)
|
await sync_and_offload_all(mock_mc)
|
||||||
@@ -461,6 +468,30 @@ class TestSyncAndOffloadAll:
|
|||||||
assert alice is not None and alice.on_radio is False
|
assert alice is not None and alice.on_radio is False
|
||||||
assert bob is not None and bob.on_radio is False
|
assert bob is not None and bob.on_radio is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_starts_background_contact_reconcile_with_radio_snapshot(self, test_db):
|
||||||
|
mock_mc = MagicMock()
|
||||||
|
radio_contacts = {KEY_A: {"public_key": KEY_A}}
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.sync_contacts_from_radio",
|
||||||
|
new=AsyncMock(return_value={"synced": 1, "radio_contacts": radio_contacts}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.sync_and_offload_channels",
|
||||||
|
new=AsyncMock(return_value={"synced": 0, "cleared": 0}),
|
||||||
|
),
|
||||||
|
patch("app.radio_sync.ensure_default_channels", new=AsyncMock()),
|
||||||
|
patch("app.radio_sync.start_background_contact_reconciliation") as mock_start,
|
||||||
|
):
|
||||||
|
result = await sync_and_offload_all(mock_mc)
|
||||||
|
|
||||||
|
mock_start.assert_called_once_with(
|
||||||
|
initial_radio_contacts=radio_contacts, expected_mc=mock_mc
|
||||||
|
)
|
||||||
|
assert result["contact_reconcile_started"] is True
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_advert_fill_skips_repeaters(self, test_db):
|
async def test_advert_fill_skips_repeaters(self, test_db):
|
||||||
"""Recent advert fallback only considers non-repeaters."""
|
"""Recent advert fallback only considers non-repeaters."""
|
||||||
@@ -1036,6 +1067,98 @@ class TestSyncAndOffloadContacts:
|
|||||||
assert KEY_A in mock_mc._contacts
|
assert KEY_A in mock_mc._contacts
|
||||||
|
|
||||||
|
|
||||||
|
class TestBackgroundContactReconcile:
|
||||||
|
"""Test the yielding background contact reconcile loop."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_rechecks_desired_set_before_deleting_contact(self, test_db):
|
||||||
|
await _insert_contact(KEY_A, "Alice", last_contacted=2000)
|
||||||
|
await _insert_contact(KEY_B, "Bob", last_contacted=1000)
|
||||||
|
alice = await ContactRepository.get_by_key(KEY_A)
|
||||||
|
bob = await ContactRepository.get_by_key(KEY_B)
|
||||||
|
assert alice is not None
|
||||||
|
assert bob is not None
|
||||||
|
|
||||||
|
mock_mc = MagicMock()
|
||||||
|
mock_mc.is_connected = True
|
||||||
|
mock_mc.get_contact_by_key_prefix = MagicMock(return_value=None)
|
||||||
|
mock_mc.commands.remove_contact = AsyncMock(return_value=MagicMock(type=EventType.OK))
|
||||||
|
mock_mc.commands.add_contact = AsyncMock(return_value=MagicMock(type=EventType.OK))
|
||||||
|
radio_manager._meshcore = mock_mc
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def _radio_operation(*args, **kwargs):
|
||||||
|
del args, kwargs
|
||||||
|
yield mock_mc
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(
|
||||||
|
radio_sync.radio_manager,
|
||||||
|
"radio_operation",
|
||||||
|
side_effect=lambda *args, **kwargs: _radio_operation(*args, **kwargs),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"app.radio_sync.get_contacts_selected_for_radio_sync",
|
||||||
|
side_effect=[[bob], [alice, bob], [alice, bob]],
|
||||||
|
),
|
||||||
|
patch("app.radio_sync.asyncio.sleep", new=AsyncMock()),
|
||||||
|
):
|
||||||
|
await radio_sync._reconcile_radio_contacts_in_background(
|
||||||
|
initial_radio_contacts={KEY_A: {"public_key": KEY_A}},
|
||||||
|
expected_mc=mock_mc,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_mc.commands.remove_contact.assert_not_called()
|
||||||
|
mock_mc.commands.add_contact.assert_awaited_once()
|
||||||
|
payload = mock_mc.commands.add_contact.call_args.args[0]
|
||||||
|
assert payload["public_key"] == KEY_B
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_yields_radio_lock_every_two_contact_operations(self, test_db):
|
||||||
|
await _insert_contact(KEY_A, "Alice", last_contacted=3000)
|
||||||
|
await _insert_contact(KEY_B, "Bob", last_contacted=2000)
|
||||||
|
extra_key = "cc" * 32
|
||||||
|
await _insert_contact(extra_key, "Carol", last_contacted=1000)
|
||||||
|
|
||||||
|
mock_mc = MagicMock()
|
||||||
|
mock_mc.is_connected = True
|
||||||
|
mock_mc.get_contact_by_key_prefix = MagicMock(return_value=None)
|
||||||
|
mock_mc.commands.remove_contact = AsyncMock(return_value=MagicMock(type=EventType.OK))
|
||||||
|
mock_mc.commands.add_contact = AsyncMock()
|
||||||
|
radio_manager._meshcore = mock_mc
|
||||||
|
|
||||||
|
acquire_count = 0
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def _radio_operation(*args, **kwargs):
|
||||||
|
del args, kwargs
|
||||||
|
nonlocal acquire_count
|
||||||
|
acquire_count += 1
|
||||||
|
yield mock_mc
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(
|
||||||
|
radio_sync.radio_manager,
|
||||||
|
"radio_operation",
|
||||||
|
side_effect=lambda *args, **kwargs: _radio_operation(*args, **kwargs),
|
||||||
|
),
|
||||||
|
patch("app.radio_sync.get_contacts_selected_for_radio_sync", return_value=[]),
|
||||||
|
patch("app.radio_sync.asyncio.sleep", new=AsyncMock()),
|
||||||
|
):
|
||||||
|
await radio_sync._reconcile_radio_contacts_in_background(
|
||||||
|
initial_radio_contacts={
|
||||||
|
KEY_A: {"public_key": KEY_A},
|
||||||
|
KEY_B: {"public_key": KEY_B},
|
||||||
|
extra_key: {"public_key": extra_key},
|
||||||
|
},
|
||||||
|
expected_mc=mock_mc,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert acquire_count == 2
|
||||||
|
assert mock_mc.commands.remove_contact.await_count == 3
|
||||||
|
mock_mc.commands.add_contact.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
class TestSyncAndOffloadChannels:
|
class TestSyncAndOffloadChannels:
|
||||||
"""Test sync_and_offload_channels: pull channels from radio, save to DB, clear from radio."""
|
"""Test sync_and_offload_channels: pull channels from radio, save to DB, clear from radio."""
|
||||||
|
|
||||||
|
|||||||
@@ -1098,7 +1098,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "remoteterm-meshcore"
|
name = "remoteterm-meshcore"
|
||||||
version = "3.6.0"
|
version = "3.6.2"
|
||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "aiomqtt" },
|
{ name = "aiomqtt" },
|
||||||
|
|||||||
Reference in New Issue
Block a user