mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-05-12 12:26:21 +02:00
Compare commits
50 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d6e1218888 | |||
| ad0e398704 | |||
| 39f5bb2b51 | |||
| 5257cb0b1b | |||
| b1547773c5 | |||
| 71da6841c1 | |||
| 6f00e857c2 | |||
| 303becf4b8 | |||
| 9ab4e7a9b0 | |||
| b1020e6e34 | |||
| 87a892fc6e | |||
| af76546287 | |||
| 31bd4a0744 | |||
| 1db724073b | |||
| 4783da8f3e | |||
| 4b69ec4519 | |||
| 8efbbd97bd | |||
| 1437e8e48a | |||
| 5cd8f7e80f | |||
| e8c50d0b2a | |||
| 7f3bb89323 | |||
| 5bfdd0880e | |||
| 0e9bd59b44 | |||
| b1cd6e1aa9 | |||
| 56fc589e0b | |||
| 64502c4ca2 | |||
| d1f657342a | |||
| 86a0ac7beb | |||
| 3b7e2737ee | |||
| 01158ac69f | |||
| 485df05372 | |||
| e5e9eab935 | |||
| 33b2d3c260 | |||
| eccbd0bac5 | |||
| 4f54ec2c93 | |||
| eed38337c8 | |||
| e1ee7fcd24 | |||
| 2756b1ae8d | |||
| ef1d6a5a1a | |||
| 14f42c59fe | |||
| b9414e84ee | |||
| 95a17ca8ee | |||
| e6cedfbd0b | |||
| c3d0af1473 | |||
| c24e291017 | |||
| d2d009ae79 | |||
| d09166df84 | |||
| f2762ab495 | |||
| a411562ca7 | |||
| cde4d1744e |
@@ -30,3 +30,6 @@ references/
|
|||||||
docker-compose.yml
|
docker-compose.yml
|
||||||
docker-compose.yaml
|
docker-compose.yaml
|
||||||
.docker-certs/
|
.docker-certs/
|
||||||
|
|
||||||
|
# HA test environment (created by scripts/setup/start_ha_test_env.sh)
|
||||||
|
ha_test_config/
|
||||||
|
|||||||
@@ -179,7 +179,9 @@ Outgoing DMs send once immediately, then may retry up to 2 more times in the bac
|
|||||||
|
|
||||||
ACKs are not a contact-route source. They drive message delivery state and may appear in analytics/detail surfaces, but they do not update `direct_path*` or otherwise influence route selection for future sends.
|
ACKs are not a contact-route source. They drive message delivery state and may appear in analytics/detail surfaces, but they do not update `direct_path*` or otherwise influence route selection for future sends.
|
||||||
|
|
||||||
**Channel messages**: Flood messages echo back through repeaters. Repeats are identified by the database UNIQUE constraint on `(type, conversation_key, text, sender_timestamp)` — when an INSERT hits a duplicate, `_handle_duplicate_message()` in `packet_processor.py` adds the new path and, for outgoing messages only, increments the ack count. Incoming repeats add path data but do not change the ack count. There is no timestamp-windowed matching; deduplication is exact-match only.
|
**Channel messages**: Flood messages echo back through repeaters. Repeats are identified by the database UNIQUE constraint `idx_messages_dedup_null_safe` on `(type, conversation_key, text, COALESCE(sender_timestamp, 0))` where `type = 'CHAN'` — when an INSERT hits a duplicate, `_handle_duplicate_message()` in `packet_processor.py` adds the new path and, for outgoing messages only, increments the ack count. Incoming repeats add path data but do not change the ack count. There is no timestamp-windowed matching; deduplication is exact-match only.
|
||||||
|
|
||||||
|
**Incoming direct messages**: A separate unique index `idx_messages_incoming_priv_dedup` on `(type, conversation_key, text, COALESCE(sender_timestamp, 0), COALESCE(sender_key, ''))` where `type = 'PRIV' AND outgoing = 0` deduplicates incoming DMs. The additional `sender_key` term (added in migration 056) distinguishes room-server posts from different senders that arrive in the same second with identical text.
|
||||||
|
|
||||||
This message-layer echo/path handling is independent of raw-packet storage deduplication.
|
This message-layer echo/path handling is independent of raw-packet storage deduplication.
|
||||||
|
|
||||||
@@ -197,6 +199,7 @@ This message-layer echo/path handling is independent of raw-packet storage dedup
|
|||||||
│ ├── event_handlers.py # Radio events
|
│ ├── event_handlers.py # Radio events
|
||||||
│ ├── decoder.py # Packet decryption
|
│ ├── decoder.py # Packet decryption
|
||||||
│ ├── websocket.py # Real-time broadcasts
|
│ ├── websocket.py # Real-time broadcasts
|
||||||
|
│ ├── push/ # Web Push notification subsystem (VAPID keys, dispatch, send)
|
||||||
│ └── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise, SQS (see fanout/AGENTS_fanout.md)
|
│ └── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise, SQS (see fanout/AGENTS_fanout.md)
|
||||||
├── frontend/ # React frontend
|
├── frontend/ # React frontend
|
||||||
│ ├── AGENTS.md # Frontend documentation
|
│ ├── AGENTS.md # Frontend documentation
|
||||||
@@ -345,6 +348,7 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
|||||||
| POST | `/api/contacts/{public_key}/repeater/radio-settings` | Fetch repeater radio config via CLI |
|
| POST | `/api/contacts/{public_key}/repeater/radio-settings` | Fetch repeater radio config via CLI |
|
||||||
| POST | `/api/contacts/{public_key}/repeater/advert-intervals` | Fetch advert intervals |
|
| POST | `/api/contacts/{public_key}/repeater/advert-intervals` | Fetch advert intervals |
|
||||||
| POST | `/api/contacts/{public_key}/repeater/owner-info` | Fetch owner info |
|
| POST | `/api/contacts/{public_key}/repeater/owner-info` | Fetch owner info |
|
||||||
|
| GET | `/api/contacts/{public_key}/repeater/telemetry-history` | Stored telemetry history for a repeater (read-only, no radio access) |
|
||||||
| POST | `/api/contacts/{public_key}/room/login` | Log in to a room server |
|
| POST | `/api/contacts/{public_key}/room/login` | Log in to a room server |
|
||||||
| POST | `/api/contacts/{public_key}/room/status` | Fetch room-server status telemetry |
|
| POST | `/api/contacts/{public_key}/room/status` | Fetch room-server status telemetry |
|
||||||
| POST | `/api/contacts/{public_key}/room/lpp-telemetry` | Fetch room-server CayenneLPP sensor data |
|
| POST | `/api/contacts/{public_key}/room/lpp-telemetry` | Fetch room-server CayenneLPP sensor data |
|
||||||
@@ -374,12 +378,21 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
|||||||
| POST | `/api/settings/blocked-keys/toggle` | Toggle blocked key |
|
| POST | `/api/settings/blocked-keys/toggle` | Toggle blocked key |
|
||||||
| POST | `/api/settings/blocked-names/toggle` | Toggle blocked name |
|
| POST | `/api/settings/blocked-names/toggle` | Toggle blocked name |
|
||||||
| POST | `/api/settings/tracked-telemetry/toggle` | Toggle tracked telemetry repeater |
|
| POST | `/api/settings/tracked-telemetry/toggle` | Toggle tracked telemetry repeater |
|
||||||
|
| GET | `/api/settings/tracked-telemetry/schedule` | Current telemetry scheduling derivation and next-run-at timestamp |
|
||||||
| GET | `/api/fanout` | List all fanout configs |
|
| GET | `/api/fanout` | List all fanout configs |
|
||||||
| POST | `/api/fanout` | Create new fanout config |
|
| POST | `/api/fanout` | Create new fanout config |
|
||||||
| PATCH | `/api/fanout/{id}` | Update fanout config (triggers module reload) |
|
| PATCH | `/api/fanout/{id}` | Update fanout config (triggers module reload) |
|
||||||
| DELETE | `/api/fanout/{id}` | Delete fanout config (stops module) |
|
| DELETE | `/api/fanout/{id}` | Delete fanout config (stops module) |
|
||||||
| POST | `/api/fanout/bots/disable-until-restart` | Stop bot fanout modules and keep bots disabled until the process restarts |
|
| POST | `/api/fanout/bots/disable-until-restart` | Stop bot fanout modules and keep bots disabled until the process restarts |
|
||||||
| GET | `/api/statistics` | Aggregated mesh network statistics |
|
| GET | `/api/statistics` | Aggregated mesh network statistics |
|
||||||
|
| GET | `/api/push/vapid-public-key` | VAPID public key for browser push subscription |
|
||||||
|
| POST | `/api/push/subscribe` | Register/upsert a push subscription |
|
||||||
|
| GET | `/api/push/subscriptions` | List all push subscriptions |
|
||||||
|
| PATCH | `/api/push/subscriptions/{id}` | Update subscription label or filter preferences |
|
||||||
|
| DELETE | `/api/push/subscriptions/{id}` | Delete a push subscription |
|
||||||
|
| POST | `/api/push/subscriptions/{id}/test` | Send a test push notification |
|
||||||
|
| GET | `/api/push/conversations` | Global list of push-enabled conversation state keys |
|
||||||
|
| POST | `/api/push/conversations/toggle` | Add or remove a conversation from the global push list |
|
||||||
| WS | `/api/ws` | Real-time updates |
|
| WS | `/api/ws` | Real-time updates |
|
||||||
|
|
||||||
## Key Concepts
|
## Key Concepts
|
||||||
@@ -434,6 +447,17 @@ All external integrations are managed through the fanout bus (`app/fanout/`). Ea
|
|||||||
|
|
||||||
Community MQTT forwards raw packets only. Its derived `path` field, when present on direct packets, is a comma-separated list of hop identifiers as reported by the packet format. Token width therefore varies with the packet's path hash mode; it is intentionally not a flat per-byte rendering.
|
Community MQTT forwards raw packets only. Its derived `path` field, when present on direct packets, is a comma-separated list of hop identifiers as reported by the packet format. Token width therefore varies with the packet's path hash mode; it is intentionally not a flat per-byte rendering.
|
||||||
|
|
||||||
|
### Web Push Notifications
|
||||||
|
|
||||||
|
Web Push is a standalone subsystem (`app/push/`) that sends browser push notifications for incoming messages even when the browser tab is closed. It is **not** a fanout module — it manages its own per-browser subscriptions, while the set of push-enabled conversations is stored once per server instance.
|
||||||
|
|
||||||
|
- **Requires HTTPS** (self-signed certificates work) and outbound internet from the server to reach browser push services (Google FCM, Mozilla autopush).
|
||||||
|
- VAPID key pair is auto-generated on first startup and stored in `app_settings`.
|
||||||
|
- Each browser subscription is stored in `push_subscriptions` with device identity and delivery state. The set of push-enabled conversations is stored globally in `app_settings.push_conversations`, so all subscribed browsers receive the same configured rooms/DMs.
|
||||||
|
- `broadcast_event()` in `websocket.py` dispatches to `push_manager.dispatch_message()` alongside fanout for `message` events.
|
||||||
|
- Expired subscriptions (HTTP 404/410 from push service) are auto-deleted.
|
||||||
|
- Frontend: service worker (`sw.js`) handles push display and notification click navigation. The `BellRing` icon in `ChatHeader` toggles per-conversation push. Device management lives in Settings > Local.
|
||||||
|
|
||||||
### Server-Side Decryption
|
### Server-Side Decryption
|
||||||
|
|
||||||
The server can decrypt packets using stored keys, both in real-time and for historical packets.
|
The server can decrypt packets using stored keys, both in real-time and for historical packets.
|
||||||
@@ -480,7 +504,7 @@ mc.subscribe(EventType.ACK, handler)
|
|||||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | `false` | Switch the always-on radio audit task from hourly checks to aggressive 10-second polling; the audit checks both missed message drift and channel-slot cache drift |
|
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | `false` | Switch the always-on radio audit task from hourly checks to aggressive 10-second polling; the audit checks both missed message drift and channel-slot cache drift |
|
||||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | `false` | Disable channel-slot reuse and force `set_channel(...)` before every channel send, even on serial/BLE |
|
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | `false` | Disable channel-slot reuse and force `set_channel(...)` before every channel send, even on serial/BLE |
|
||||||
|
|
||||||
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `advert_interval`, `last_advert_time`, `last_message_times`, `flood_scope`, `blocked_keys`, `blocked_names`, `discovery_blocked_types`, `tracked_telemetry_repeaters`, and `auto_resend_channel`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `advert_interval`, `last_advert_time`, `last_message_times`, `flood_scope`, `blocked_keys`, `blocked_names`, `discovery_blocked_types`, `tracked_telemetry_repeaters`, `auto_resend_channel`, and `telemetry_interval_hours`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
||||||
|
|
||||||
Byte-perfect channel retries are user-triggered via `POST /api/messages/channel/{message_id}/resend` and are allowed for 30 seconds after the original send.
|
Byte-perfect channel retries are user-triggered via `POST /api/messages/channel/{message_id}/resend` and are allowed for 30 seconds after the original send.
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,43 @@
|
|||||||
|
## [3.12.0] - 2026-04-17
|
||||||
|
|
||||||
|
* Feature: Web Push -- get your mesh notifications on a locked phone or when your browser is closed!
|
||||||
|
* Feature: Add link to node from map display
|
||||||
|
* Feature: Map layers
|
||||||
|
* Feature: Better contact/channel selection for fanout
|
||||||
|
* Feature: Add glittering status dot option
|
||||||
|
* Feature: Add airtime math and average packets/min for repeater info displays
|
||||||
|
* Feature: Offer multiple timing intervals for repeater telemetry aurofetch
|
||||||
|
* Feature: Add ability to follow OS light/dark mode
|
||||||
|
* Bugfix: Clear 100% of messages from radio in fallback mode; don't stop at 100
|
||||||
|
* Bugfix: Don't stop DM retry just because the radio did not provide a radio ack on the wire
|
||||||
|
* Bugfix: Don't strip outgoing colons on DMs or room servers
|
||||||
|
* Bugfix: Patch statusbar overlap on PWA
|
||||||
|
* Bugfix: Patch default map upload URL
|
||||||
|
* Bugfix: Show learned path in routing override
|
||||||
|
* Bugfix: Centralize on "only means RF heard" for first_seen/last_seen
|
||||||
|
* Misc: Reduce frequency of time set failure chirping
|
||||||
|
* Misc: QoL improvements for Home Assistant integration
|
||||||
|
* Misc: Overhaul settings styling
|
||||||
|
* Misc: Documentation + tests updates
|
||||||
|
|
||||||
|
## [3.11.3] - 2026-04-12
|
||||||
|
|
||||||
|
* Bugfix: Add icons and screenshots for webmanifest
|
||||||
|
* Bugfix: Use incoming DMs, not just outgoing, for recency ranking for preferential radio contact load
|
||||||
|
|
||||||
|
## [3.11.2] - 2026-04-12
|
||||||
|
|
||||||
|
* Feature: Unread DMs are always at the top of the DM list no matter what
|
||||||
|
* Bugfix: Webmanifest needs withCredentials
|
||||||
|
|
||||||
|
## [3.11.1] - 2026-04-12
|
||||||
|
|
||||||
|
* Feature: Home Assistant MQTT fanout
|
||||||
|
* Feature: Add dummy service worker to enable PWA
|
||||||
|
* Bugfix: DB connection plurality issues
|
||||||
|
* Misc: Migration improvements
|
||||||
|
* Misc: Search keys from beginning
|
||||||
|
|
||||||
## [3.11.0] - 2026-04-10
|
## [3.11.0] - 2026-04-10
|
||||||
|
|
||||||
* Feature: Radio health and contact data accessible on fanout bus
|
* Feature: Radio health and contact data accessible on fanout bus
|
||||||
|
|||||||
+2
-2
@@ -13,7 +13,7 @@ RUN VITE_COMMIT_HASH=${COMMIT_HASH} npm run build
|
|||||||
|
|
||||||
|
|
||||||
# Stage 2: Python runtime
|
# Stage 2: Python runtime
|
||||||
FROM python:3.12-slim
|
FROM python:3.13-slim
|
||||||
|
|
||||||
ARG COMMIT_HASH=unknown
|
ARG COMMIT_HASH=unknown
|
||||||
|
|
||||||
@@ -22,7 +22,7 @@ WORKDIR /app
|
|||||||
ENV COMMIT_HASH=${COMMIT_HASH}
|
ENV COMMIT_HASH=${COMMIT_HASH}
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /usr/local/bin/uv
|
||||||
|
|
||||||
# Copy dependency files first for layer caching
|
# Copy dependency files first for layer caching
|
||||||
COPY pyproject.toml uv.lock ./
|
COPY pyproject.toml uv.lock ./
|
||||||
|
|||||||
+383
@@ -647,6 +647,389 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
### pywebpush (2.3.0) — MPL-2.0
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Full license text</summary>
|
||||||
|
|
||||||
|
```
|
||||||
|
Mozilla Public License Version 2.0
|
||||||
|
==================================
|
||||||
|
|
||||||
|
1. Definitions
|
||||||
|
--------------
|
||||||
|
|
||||||
|
1.1. "Contributor"
|
||||||
|
means each individual or legal entity that creates, contributes to
|
||||||
|
the creation of, or owns Covered Software.
|
||||||
|
|
||||||
|
1.2. "Contributor Version"
|
||||||
|
means the combination of the Contributions of others (if any) used
|
||||||
|
by a Contributor and that particular Contributor's Contribution.
|
||||||
|
|
||||||
|
1.3. "Contribution"
|
||||||
|
means Covered Software of a particular Contributor.
|
||||||
|
|
||||||
|
1.4. "Covered Software"
|
||||||
|
means Source Code Form to which the initial Contributor has attached
|
||||||
|
the notice in Exhibit A, the Executable Form of such Source Code
|
||||||
|
Form, and Modifications of such Source Code Form, in each case
|
||||||
|
including portions thereof.
|
||||||
|
|
||||||
|
1.5. "Incompatible With Secondary Licenses"
|
||||||
|
means
|
||||||
|
|
||||||
|
(a) that the initial Contributor has attached the notice described
|
||||||
|
in Exhibit B to the Covered Software; or
|
||||||
|
|
||||||
|
(b) that the Covered Software was made available under the terms of
|
||||||
|
version 1.1 or earlier of the License, but not also under the
|
||||||
|
terms of a Secondary License.
|
||||||
|
|
||||||
|
1.6. "Executable Form"
|
||||||
|
means any form of the work other than Source Code Form.
|
||||||
|
|
||||||
|
1.7. "Larger Work"
|
||||||
|
means a work that combines Covered Software with other material, in
|
||||||
|
a separate file or files, that is not Covered Software.
|
||||||
|
|
||||||
|
1.8. "License"
|
||||||
|
means this document.
|
||||||
|
|
||||||
|
1.9. "Licensable"
|
||||||
|
means having the right to grant, to the maximum extent possible,
|
||||||
|
whether at the time of the initial grant or subsequently, any and
|
||||||
|
all of the rights conveyed by this License.
|
||||||
|
|
||||||
|
1.10. "Modifications"
|
||||||
|
means any of the following:
|
||||||
|
|
||||||
|
(a) any file in Source Code Form that results from an addition to,
|
||||||
|
deletion from, or modification of the contents of Covered
|
||||||
|
Software; or
|
||||||
|
|
||||||
|
(b) any new file in Source Code Form that contains any Covered
|
||||||
|
Software.
|
||||||
|
|
||||||
|
1.11. "Patent Claims" of a Contributor
|
||||||
|
means any patent claim(s), including without limitation, method,
|
||||||
|
process, and apparatus claims, in any patent Licensable by such
|
||||||
|
Contributor that would be infringed, but for the grant of the
|
||||||
|
License, by the making, using, selling, offering for sale, having
|
||||||
|
made, import, or transfer of either its Contributions or its
|
||||||
|
Contributor Version.
|
||||||
|
|
||||||
|
1.12. "Secondary License"
|
||||||
|
means either the GNU General Public License, Version 2.0, the GNU
|
||||||
|
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||||
|
Public License, Version 3.0, or any later versions of those
|
||||||
|
licenses.
|
||||||
|
|
||||||
|
1.13. "Source Code Form"
|
||||||
|
means the form of the work preferred for making modifications.
|
||||||
|
|
||||||
|
1.14. "You" (or "Your")
|
||||||
|
means an individual or a legal entity exercising rights under this
|
||||||
|
License. For legal entities, "You" includes any entity that
|
||||||
|
controls, is controlled by, or is under common control with You. For
|
||||||
|
purposes of this definition, "control" means (a) the power, direct
|
||||||
|
or indirect, to cause the direction or management of such entity,
|
||||||
|
whether by contract or otherwise, or (b) ownership of more than
|
||||||
|
fifty percent (50%) of the outstanding shares or beneficial
|
||||||
|
ownership of such entity.
|
||||||
|
|
||||||
|
2. License Grants and Conditions
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
2.1. Grants
|
||||||
|
|
||||||
|
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||||
|
non-exclusive license:
|
||||||
|
|
||||||
|
(a) under intellectual property rights (other than patent or trademark)
|
||||||
|
Licensable by such Contributor to use, reproduce, make available,
|
||||||
|
modify, display, perform, distribute, and otherwise exploit its
|
||||||
|
Contributions, either on an unmodified basis, with Modifications, or
|
||||||
|
as part of a Larger Work; and
|
||||||
|
|
||||||
|
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||||
|
for sale, have made, import, and otherwise transfer either its
|
||||||
|
Contributions or its Contributor Version.
|
||||||
|
|
||||||
|
2.2. Effective Date
|
||||||
|
|
||||||
|
The licenses granted in Section 2.1 with respect to any Contribution
|
||||||
|
become effective for each Contribution on the date the Contributor first
|
||||||
|
distributes such Contribution.
|
||||||
|
|
||||||
|
2.3. Limitations on Grant Scope
|
||||||
|
|
||||||
|
The licenses granted in this Section 2 are the only rights granted under
|
||||||
|
this License. No additional rights or licenses will be implied from the
|
||||||
|
distribution or licensing of Covered Software under this License.
|
||||||
|
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||||
|
Contributor:
|
||||||
|
|
||||||
|
(a) for any code that a Contributor has removed from Covered Software;
|
||||||
|
or
|
||||||
|
|
||||||
|
(b) for infringements caused by: (i) Your and any other third party's
|
||||||
|
modifications of Covered Software, or (ii) the combination of its
|
||||||
|
Contributions with other software (except as part of its Contributor
|
||||||
|
Version); or
|
||||||
|
|
||||||
|
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||||
|
its Contributions.
|
||||||
|
|
||||||
|
This License does not grant any rights in the trademarks, service marks,
|
||||||
|
or logos of any Contributor (except as may be necessary to comply with
|
||||||
|
the notice requirements in Section 3.4).
|
||||||
|
|
||||||
|
2.4. Subsequent Licenses
|
||||||
|
|
||||||
|
No Contributor makes additional grants as a result of Your choice to
|
||||||
|
distribute the Covered Software under a subsequent version of this
|
||||||
|
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||||
|
permitted under the terms of Section 3.3).
|
||||||
|
|
||||||
|
2.5. Representation
|
||||||
|
|
||||||
|
Each Contributor represents that the Contributor believes its
|
||||||
|
Contributions are its original creation(s) or it has sufficient rights
|
||||||
|
to grant the rights to its Contributions conveyed by this License.
|
||||||
|
|
||||||
|
2.6. Fair Use
|
||||||
|
|
||||||
|
This License is not intended to limit any rights You have under
|
||||||
|
applicable copyright doctrines of fair use, fair dealing, or other
|
||||||
|
equivalents.
|
||||||
|
|
||||||
|
2.7. Conditions
|
||||||
|
|
||||||
|
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||||
|
in Section 2.1.
|
||||||
|
|
||||||
|
3. Responsibilities
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
3.1. Distribution of Source Form
|
||||||
|
|
||||||
|
All distribution of Covered Software in Source Code Form, including any
|
||||||
|
Modifications that You create or to which You contribute, must be under
|
||||||
|
the terms of this License. You must inform recipients that the Source
|
||||||
|
Code Form of the Covered Software is governed by the terms of this
|
||||||
|
License, and how they can obtain a copy of this License. You may not
|
||||||
|
attempt to alter or restrict the recipients' rights in the Source Code
|
||||||
|
Form.
|
||||||
|
|
||||||
|
3.2. Distribution of Executable Form
|
||||||
|
|
||||||
|
If You distribute Covered Software in Executable Form then:
|
||||||
|
|
||||||
|
(a) such Covered Software must also be made available in Source Code
|
||||||
|
Form, as described in Section 3.1, and You must inform recipients of
|
||||||
|
the Executable Form how they can obtain a copy of such Source Code
|
||||||
|
Form by reasonable means in a timely manner, at a charge no more
|
||||||
|
than the cost of distribution to the recipient; and
|
||||||
|
|
||||||
|
(b) You may distribute such Executable Form under the terms of this
|
||||||
|
License, or sublicense it under different terms, provided that the
|
||||||
|
license for the Executable Form does not attempt to limit or alter
|
||||||
|
the recipients' rights in the Source Code Form under this License.
|
||||||
|
|
||||||
|
3.3. Distribution of a Larger Work
|
||||||
|
|
||||||
|
You may create and distribute a Larger Work under terms of Your choice,
|
||||||
|
provided that You also comply with the requirements of this License for
|
||||||
|
the Covered Software. If the Larger Work is a combination of Covered
|
||||||
|
Software with a work governed by one or more Secondary Licenses, and the
|
||||||
|
Covered Software is not Incompatible With Secondary Licenses, this
|
||||||
|
License permits You to additionally distribute such Covered Software
|
||||||
|
under the terms of such Secondary License(s), so that the recipient of
|
||||||
|
the Larger Work may, at their option, further distribute the Covered
|
||||||
|
Software under the terms of either this License or such Secondary
|
||||||
|
License(s).
|
||||||
|
|
||||||
|
3.4. Notices
|
||||||
|
|
||||||
|
You may not remove or alter the substance of any license notices
|
||||||
|
(including copyright notices, patent notices, disclaimers of warranty,
|
||||||
|
or limitations of liability) contained within the Source Code Form of
|
||||||
|
the Covered Software, except that You may alter any license notices to
|
||||||
|
the extent required to remedy known factual inaccuracies.
|
||||||
|
|
||||||
|
3.5. Application of Additional Terms
|
||||||
|
|
||||||
|
You may choose to offer, and to charge a fee for, warranty, support,
|
||||||
|
indemnity or liability obligations to one or more recipients of Covered
|
||||||
|
Software. However, You may do so only on Your own behalf, and not on
|
||||||
|
behalf of any Contributor. You must make it absolutely clear that any
|
||||||
|
such warranty, support, indemnity, or liability obligation is offered by
|
||||||
|
You alone, and You hereby agree to indemnify every Contributor for any
|
||||||
|
liability incurred by such Contributor as a result of warranty, support,
|
||||||
|
indemnity or liability terms You offer. You may include additional
|
||||||
|
disclaimers of warranty and limitations of liability specific to any
|
||||||
|
jurisdiction.
|
||||||
|
|
||||||
|
4. Inability to Comply Due to Statute or Regulation
|
||||||
|
---------------------------------------------------
|
||||||
|
|
||||||
|
If it is impossible for You to comply with any of the terms of this
|
||||||
|
License with respect to some or all of the Covered Software due to
|
||||||
|
statute, judicial order, or regulation then You must: (a) comply with
|
||||||
|
the terms of this License to the maximum extent possible; and (b)
|
||||||
|
describe the limitations and the code they affect. Such description must
|
||||||
|
be placed in a text file included with all distributions of the Covered
|
||||||
|
Software under this License. Except to the extent prohibited by statute
|
||||||
|
or regulation, such description must be sufficiently detailed for a
|
||||||
|
recipient of ordinary skill to be able to understand it.
|
||||||
|
|
||||||
|
5. Termination
|
||||||
|
--------------
|
||||||
|
|
||||||
|
5.1. The rights granted under this License will terminate automatically
|
||||||
|
if You fail to comply with any of its terms. However, if You become
|
||||||
|
compliant, then the rights granted under this License from a particular
|
||||||
|
Contributor are reinstated (a) provisionally, unless and until such
|
||||||
|
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||||
|
ongoing basis, if such Contributor fails to notify You of the
|
||||||
|
non-compliance by some reasonable means prior to 60 days after You have
|
||||||
|
come back into compliance. Moreover, Your grants from a particular
|
||||||
|
Contributor are reinstated on an ongoing basis if such Contributor
|
||||||
|
notifies You of the non-compliance by some reasonable means, this is the
|
||||||
|
first time You have received notice of non-compliance with this License
|
||||||
|
from such Contributor, and You become compliant prior to 30 days after
|
||||||
|
Your receipt of the notice.
|
||||||
|
|
||||||
|
5.2. If You initiate litigation against any entity by asserting a patent
|
||||||
|
infringement claim (excluding declaratory judgment actions,
|
||||||
|
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||||
|
directly or indirectly infringes any patent, then the rights granted to
|
||||||
|
You by any and all Contributors for the Covered Software under Section
|
||||||
|
2.1 of this License shall terminate.
|
||||||
|
|
||||||
|
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||||
|
end user license agreements (excluding distributors and resellers) which
|
||||||
|
have been validly granted by You or Your distributors under this License
|
||||||
|
prior to termination shall survive termination.
|
||||||
|
|
||||||
|
************************************************************************
|
||||||
|
* *
|
||||||
|
* 6. Disclaimer of Warranty *
|
||||||
|
* ------------------------- *
|
||||||
|
* *
|
||||||
|
* Covered Software is provided under this License on an "as is" *
|
||||||
|
* basis, without warranty of any kind, either expressed, implied, or *
|
||||||
|
* statutory, including, without limitation, warranties that the *
|
||||||
|
* Covered Software is free of defects, merchantable, fit for a *
|
||||||
|
* particular purpose or non-infringing. The entire risk as to the *
|
||||||
|
* quality and performance of the Covered Software is with You. *
|
||||||
|
* Should any Covered Software prove defective in any respect, You *
|
||||||
|
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||||
|
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||||
|
* essential part of this License. No use of any Covered Software is *
|
||||||
|
* authorized under this License except under this disclaimer. *
|
||||||
|
* *
|
||||||
|
************************************************************************
|
||||||
|
|
||||||
|
************************************************************************
|
||||||
|
* *
|
||||||
|
* 7. Limitation of Liability *
|
||||||
|
* -------------------------- *
|
||||||
|
* *
|
||||||
|
* Under no circumstances and under no legal theory, whether tort *
|
||||||
|
* (including negligence), contract, or otherwise, shall any *
|
||||||
|
* Contributor, or anyone who distributes Covered Software as *
|
||||||
|
* permitted above, be liable to You for any direct, indirect, *
|
||||||
|
* special, incidental, or consequential damages of any character *
|
||||||
|
* including, without limitation, damages for lost profits, loss of *
|
||||||
|
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||||
|
* and all other commercial damages or losses, even if such party *
|
||||||
|
* shall have been informed of the possibility of such damages. This *
|
||||||
|
* limitation of liability shall not apply to liability for death or *
|
||||||
|
* personal injury resulting from such party's negligence to the *
|
||||||
|
* extent applicable law prohibits such limitation. Some *
|
||||||
|
* jurisdictions do not allow the exclusion or limitation of *
|
||||||
|
* incidental or consequential damages, so this exclusion and *
|
||||||
|
* limitation may not apply to You. *
|
||||||
|
* *
|
||||||
|
************************************************************************
|
||||||
|
|
||||||
|
8. Litigation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Any litigation relating to this License may be brought only in the
|
||||||
|
courts of a jurisdiction where the defendant maintains its principal
|
||||||
|
place of business and such litigation shall be governed by laws of that
|
||||||
|
jurisdiction, without reference to its conflict-of-law provisions.
|
||||||
|
Nothing in this Section shall prevent a party's ability to bring
|
||||||
|
cross-claims or counter-claims.
|
||||||
|
|
||||||
|
9. Miscellaneous
|
||||||
|
----------------
|
||||||
|
|
||||||
|
This License represents the complete agreement concerning the subject
|
||||||
|
matter hereof. If any provision of this License is held to be
|
||||||
|
unenforceable, such provision shall be reformed only to the extent
|
||||||
|
necessary to make it enforceable. Any law or regulation which provides
|
||||||
|
that the language of a contract shall be construed against the drafter
|
||||||
|
shall not be used to construe this License against a Contributor.
|
||||||
|
|
||||||
|
10. Versions of the License
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
10.1. New Versions
|
||||||
|
|
||||||
|
Mozilla Foundation is the license steward. Except as provided in Section
|
||||||
|
10.3, no one other than the license steward has the right to modify or
|
||||||
|
publish new versions of this License. Each version will be given a
|
||||||
|
distinguishing version number.
|
||||||
|
|
||||||
|
10.2. Effect of New Versions
|
||||||
|
|
||||||
|
You may distribute the Covered Software under the terms of the version
|
||||||
|
of the License under which You originally received the Covered Software,
|
||||||
|
or under the terms of any subsequent version published by the license
|
||||||
|
steward.
|
||||||
|
|
||||||
|
10.3. Modified Versions
|
||||||
|
|
||||||
|
If you create software not governed by this License, and you want to
|
||||||
|
create a new license for such software, you may create and use a
|
||||||
|
modified version of this License if you rename the license and remove
|
||||||
|
any references to the name of the license steward (except to note that
|
||||||
|
such modified license differs from this License).
|
||||||
|
|
||||||
|
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||||
|
Licenses
|
||||||
|
|
||||||
|
If You choose to distribute Source Code Form that is Incompatible With
|
||||||
|
Secondary Licenses under the terms of this version of the License, the
|
||||||
|
notice described in Exhibit B of this License must be attached.
|
||||||
|
|
||||||
|
Exhibit A - Source Code Form License Notice
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
If it is not possible or desirable to put the notice in a particular
|
||||||
|
file, then You may include the notice in a location (such as a LICENSE
|
||||||
|
file in a relevant directory) where a recipient would be likely to look
|
||||||
|
for such a notice.
|
||||||
|
|
||||||
|
You may add additional accurate notices of copyright ownership.
|
||||||
|
|
||||||
|
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||||
|
---------------------------------------------------------
|
||||||
|
|
||||||
|
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||||
|
defined by the Mozilla Public License, v. 2.0.
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
### uvicorn (0.40.0) — BSD-3-Clause
|
### uvicorn (0.40.0) — BSD-3-Clause
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
|
|||||||
+371
@@ -0,0 +1,371 @@
|
|||||||
|
# Home Assistant Integration
|
||||||
|
|
||||||
|
RemoteTerm can publish mesh network data to Home Assistant via MQTT Discovery. Devices and entities appear automatically in HA -- no custom component or HACS install needed.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Home Assistant with the [MQTT integration](https://www.home-assistant.io/integrations/mqtt/) configured
|
||||||
|
- An MQTT broker (e.g. Mosquitto) accessible to both HA and RemoteTerm
|
||||||
|
- RemoteTerm running and connected to a radio
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. In RemoteTerm, go to **Settings > Integrations > Add > Home Assistant MQTT Discovery**
|
||||||
|
2. Enter your MQTT broker host and port (same broker HA is connected to)
|
||||||
|
3. Optionally enter broker username/password and TLS settings
|
||||||
|
4. Select contacts for GPS tracking and repeaters for telemetry (see below)
|
||||||
|
5. Configure which messages should fire events (scope selector at the bottom)
|
||||||
|
6. Save and enable
|
||||||
|
|
||||||
|
Devices will appear in HA under **Settings > Devices & Services > MQTT** within a few seconds.
|
||||||
|
|
||||||
|
## How MeshCore IDs Map Into Home Assistant
|
||||||
|
|
||||||
|
RemoteTerm uses each node's public key to derive a stable short identifier:
|
||||||
|
|
||||||
|
- Full public key: `ae92577bae6c4f1d...`
|
||||||
|
- Node ID: `ae92577bae6c` (the first 12 hex characters, lowercased)
|
||||||
|
- Example entity ID: `device_tracker.meshcore_ae92577bae6c`
|
||||||
|
- Example runtime topic: `meshcore/ae92577bae6c/gps`
|
||||||
|
|
||||||
|
When this README shows `<node_id>`, it always means that 12-character value.
|
||||||
|
|
||||||
|
The same node ID appears in:
|
||||||
|
|
||||||
|
- Home Assistant entity IDs
|
||||||
|
- Home Assistant discovery topics under `homeassistant/...`
|
||||||
|
- Runtime MQTT state topics under your configured prefix, usually `meshcore/...`
|
||||||
|
|
||||||
|
You can also see these IDs in RemoteTerm's Home Assistant integration UI:
|
||||||
|
|
||||||
|
- `What gets created in Home Assistant`
|
||||||
|
- `Published Topic Summary`
|
||||||
|
|
||||||
|
## What Gets Created
|
||||||
|
|
||||||
|
### Local Radio Device
|
||||||
|
|
||||||
|
Always created. Updates every 60 seconds.
|
||||||
|
|
||||||
|
| Entity | Type | Description |
|
||||||
|
|--------|------|-------------|
|
||||||
|
| `binary_sensor.meshcore_<radio_node_id>_connected` | Connectivity | Radio online/offline |
|
||||||
|
| `sensor.meshcore_<radio_node_id>_noise_floor` | Signal strength | Radio noise floor (dBm) |
|
||||||
|
|
||||||
|
### Repeater Devices
|
||||||
|
|
||||||
|
One device per tracked repeater selected in the HA integration. Updates when telemetry is collected (auto-collect cycle (~8 hours or variable in settings), or when you manually fetch from the repeater dashboard).
|
||||||
|
|
||||||
|
Repeaters must first be added to the auto-telemetry tracking list in RemoteTerm's Radio settings section. Only auto-tracked repeaters appear in the HA integration's repeater picker.
|
||||||
|
|
||||||
|
| Entity | Type | Unit | Description |
|
||||||
|
|--------|------|------|-------------|
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_battery_voltage` | Voltage | V | Battery level |
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_noise_floor` | Signal strength | dBm | Local noise floor |
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_last_rssi` | Signal strength | dBm | Last received signal strength |
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_last_snr` | -- | dB | Last signal-to-noise ratio |
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_packets_received` | -- | count | Total packets received |
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_packets_sent` | -- | count | Total packets sent |
|
||||||
|
| `sensor.meshcore_<repeater_node_id>_uptime` | Duration | s | Uptime since last reboot |
|
||||||
|
|
||||||
|
If RemoteTerm already has a cached telemetry snapshot for that repeater, it republishes it on startup so HA can populate the sensors immediately instead of waiting for the next collection cycle.
|
||||||
|
|
||||||
|
### Contact Device Trackers
|
||||||
|
|
||||||
|
One `device_tracker` per tracked contact. Updates passively whenever RemoteTerm hears an advertisement with GPS coordinates from that contact. No radio commands are sent -- it piggybacks on normal mesh traffic.
|
||||||
|
|
||||||
|
| Entity | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `device_tracker.meshcore_<contact_node_id>` | GPS position (latitude/longitude) |
|
||||||
|
|
||||||
|
### Message Event Entity
|
||||||
|
|
||||||
|
A single radio-scoped event entity, `event.meshcore_<radio_node_id>_messages`, fires for each message matching your configured scope. Each event carries these attributes:
|
||||||
|
|
||||||
|
| Attribute | Example | Description |
|
||||||
|
|-----------|---------|-------------|
|
||||||
|
| `event_type` | `message_received` | Always `message_received` |
|
||||||
|
| `sender_name` | `Alice` | Display name of the sender |
|
||||||
|
| `sender_key` | `aabbccdd...` | Sender's public key |
|
||||||
|
| `text` | `hello` | Message body |
|
||||||
|
| `message_type` | `PRIV` or `CHAN` | Direct message or channel |
|
||||||
|
| `channel_name` | `#general` | Channel name (null for DMs) |
|
||||||
|
| `conversation_key` | `aabbccdd...` | Contact key (DM) or channel key |
|
||||||
|
| `outgoing` | `false` | Whether you sent this message |
|
||||||
|
|
||||||
|
## Entity Naming
|
||||||
|
|
||||||
|
Entity IDs use the first 12 characters of the node's public key as an identifier. For example, a contact with public key `ae92577bae6c...` gets entity ID `device_tracker.meshcore_ae92577bae6c`. You can rename entities in HA's UI without affecting the integration.
|
||||||
|
|
||||||
|
That same 12-character node ID is also used in the MQTT topic paths. For example:
|
||||||
|
|
||||||
|
- Local radio health: `meshcore/<radio_node_id>/health`
|
||||||
|
- Repeater telemetry: `meshcore/<repeater_node_id>/telemetry`
|
||||||
|
- Contact GPS: `meshcore/<contact_node_id>/gps`
|
||||||
|
- Message events: `meshcore/<radio_node_id>/events/message`
|
||||||
|
|
||||||
|
## What Appears When
|
||||||
|
|
||||||
|
- Always created: the local radio device and its entities
|
||||||
|
- Created when selected in the HA integration: tracked repeater devices and tracked contact device trackers
|
||||||
|
- Populated only after data exists: contact GPS trackers need an advert with GPS; repeater sensors need telemetry, although cached repeater telemetry is replayed on startup when available
|
||||||
|
- Message event entity: always created once the HA integration is enabled for a connected radio
|
||||||
|
|
||||||
|
## Common Automations
|
||||||
|
|
||||||
|
### Low repeater battery alert
|
||||||
|
|
||||||
|
Notify when a tracked repeater's battery drops below a threshold.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > Numeric state trigger on `sensor.meshcore_<repeater_node_id>_battery_voltage`, below `3.8`, action: notification.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Repeater battery low"
|
||||||
|
trigger:
|
||||||
|
- platform: numeric_state
|
||||||
|
entity_id: sensor.meshcore_aabbccddeeff_battery_voltage
|
||||||
|
below: 3.8
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Repeater Battery Low"
|
||||||
|
message: >-
|
||||||
|
{{ state_attr('sensor.meshcore_aabbccddeeff_battery_voltage', 'friendly_name') }}
|
||||||
|
is at {{ states('sensor.meshcore_aabbccddeeff_battery_voltage') }}V
|
||||||
|
```
|
||||||
|
|
||||||
|
### Radio offline alert
|
||||||
|
|
||||||
|
Notify if the radio has been disconnected for more than 5 minutes.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > State trigger on `binary_sensor.meshcore_<radio_node_id>_connected`, to `off`, for `00:05:00`, action: notification.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Radio offline"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: binary_sensor.meshcore_aabbccddeeff_connected
|
||||||
|
to: "off"
|
||||||
|
for: "00:05:00"
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "MeshCore Radio Offline"
|
||||||
|
message: "Radio has been disconnected for 5 minutes"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert on any message from a specific room
|
||||||
|
|
||||||
|
Trigger when a message arrives in a specific channel. Two approaches:
|
||||||
|
|
||||||
|
#### Option A: Scope filtering (fully GUI, no template)
|
||||||
|
|
||||||
|
If you only care about one room, configure the HA integration's message scope to "Only listed channels" and select that room. Then every event fire is from that room.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > State trigger on `event.meshcore_<radio_node_id>_messages`, action: notification.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Emergency channel alert"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_aabbccddeeff_messages
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Message in #emergency"
|
||||||
|
message: >-
|
||||||
|
{{ trigger.to_state.attributes.sender_name }}:
|
||||||
|
{{ trigger.to_state.attributes.text }}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Option B: Template condition (multiple rooms, one integration)
|
||||||
|
|
||||||
|
Keep scope as "All messages" and filter in the automation. The trigger is GUI, but the condition uses a one-line template.
|
||||||
|
|
||||||
|
**GUI:** Settings > Automations > Create > State trigger on `event.meshcore_<radio_node_id>_messages` > Add condition > Template > enter the template below.
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Emergency channel alert"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_aabbccddeeff_messages
|
||||||
|
condition:
|
||||||
|
- condition: template
|
||||||
|
value_template: >-
|
||||||
|
{{ trigger.to_state.attributes.channel_name == '#emergency' }}
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Message in #emergency"
|
||||||
|
message: >-
|
||||||
|
{{ trigger.to_state.attributes.sender_name }}:
|
||||||
|
{{ trigger.to_state.attributes.text }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert on DM from a specific contact
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "DM from Alice"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_aabbccddeeff_messages
|
||||||
|
condition:
|
||||||
|
- condition: template
|
||||||
|
value_template: >-
|
||||||
|
{{ trigger.to_state.attributes.message_type == 'PRIV'
|
||||||
|
and trigger.to_state.attributes.sender_name == 'Alice' }}
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "DM from Alice"
|
||||||
|
message: "{{ trigger.to_state.attributes.text }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert on messages containing a keyword
|
||||||
|
|
||||||
|
**YAML:**
|
||||||
|
```yaml
|
||||||
|
automation:
|
||||||
|
- alias: "Keyword alert"
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id: event.meshcore_aabbccddeeff_messages
|
||||||
|
condition:
|
||||||
|
- condition: template
|
||||||
|
value_template: >-
|
||||||
|
{{ 'emergency' in trigger.to_state.attributes.text | lower }}
|
||||||
|
action:
|
||||||
|
- service: notify.mobile_app_your_phone
|
||||||
|
data:
|
||||||
|
title: "Emergency keyword detected"
|
||||||
|
message: >-
|
||||||
|
{{ trigger.to_state.attributes.sender_name }} in
|
||||||
|
{{ trigger.to_state.attributes.channel_name or 'DM' }}:
|
||||||
|
{{ trigger.to_state.attributes.text }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Track a contact on the HA map
|
||||||
|
|
||||||
|
No automation needed. Once a contact is selected for GPS tracking, their `device_tracker` entity automatically appears on the HA map. Go to **Settings > Dashboards > Map** (or add a Map card to any dashboard) and the tracked contact will show up when they advertise their GPS position.
|
||||||
|
|
||||||
|
### Dashboard card showing repeater battery
|
||||||
|
|
||||||
|
Add a sensor card to any dashboard:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
type: sensor
|
||||||
|
entity: sensor.meshcore_aabbccddeeff_battery_voltage
|
||||||
|
name: "Hilltop Repeater Battery"
|
||||||
|
```
|
||||||
|
|
||||||
|
Or an entities card for multiple repeaters:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
type: entities
|
||||||
|
title: "Repeater Status"
|
||||||
|
entities:
|
||||||
|
- entity: sensor.meshcore_aabbccddeeff_battery_voltage
|
||||||
|
name: "Hilltop"
|
||||||
|
- entity: sensor.meshcore_ccdd11223344_battery_voltage
|
||||||
|
name: "Valley"
|
||||||
|
- entity: sensor.meshcore_eeff55667788_battery_voltage
|
||||||
|
name: "Ridge"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Devices don't appear in HA
|
||||||
|
|
||||||
|
- Verify the MQTT integration is configured in HA (**Settings > Devices & Services > MQTT**) and shows "Connected"
|
||||||
|
- Verify RemoteTerm's HA integration shows "Connected" (green dot)
|
||||||
|
- Check that both HA and RemoteTerm are using the same MQTT broker
|
||||||
|
- Subscribe to discovery topics to verify messages are flowing:
|
||||||
|
```
|
||||||
|
mosquitto_sub -h <broker> -t 'homeassistant/#' -v
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stale or duplicate devices
|
||||||
|
|
||||||
|
If you see unexpected devices (e.g. a generic "MeshCore Radio" alongside your named radio), clear the stale retained messages:
|
||||||
|
```
|
||||||
|
mosquitto_pub -h <broker> -t 'homeassistant/binary_sensor/meshcore_unknown/connected/config' -r -n
|
||||||
|
mosquitto_pub -h <broker> -t 'homeassistant/sensor/meshcore_unknown/noise_floor/config' -r -n
|
||||||
|
```
|
||||||
|
|
||||||
|
### Repeater sensors show "Unknown" or "Unavailable"
|
||||||
|
|
||||||
|
Repeater telemetry only updates when collected. Trigger a manual fetch by opening the repeater's dashboard in RemoteTerm and clicking "Status", or wait for the next auto-collect cycle (~8 hours).
|
||||||
|
|
||||||
|
If RemoteTerm already has cached telemetry for that repeater, it republishes the last known values on startup. If the sensors are still unknown or unavailable, it usually means no telemetry has ever been collected for that repeater yet.
|
||||||
|
|
||||||
|
### Contact device tracker shows "Unknown"
|
||||||
|
|
||||||
|
The contact's GPS position only updates when RemoteTerm hears an advertisement from that node that includes GPS coordinates. If the contact's device doesn't broadcast GPS or hasn't advertised recently, the tracker will show as unknown.
|
||||||
|
|
||||||
|
### Entity is "Unavailable"
|
||||||
|
|
||||||
|
Radio health entities have a 120-second expiry. If RemoteTerm stops sending health updates (e.g. it's shut down or loses connection to the broker), HA marks the entities as unavailable after 2 minutes. Restart RemoteTerm or check the broker connection.
|
||||||
|
|
||||||
|
## Removing the Integration
|
||||||
|
|
||||||
|
Disabling or deleting the HA integration in RemoteTerm's settings publishes empty retained messages to all discovery topics, which removes the devices and entities from HA automatically.
|
||||||
|
|
||||||
|
## Local Test Environment
|
||||||
|
|
||||||
|
For local development, RemoteTerm includes a helper that starts Mosquitto and Home Assistant with MQTT preconfigured:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/setup/start_ha_test_env.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
That gives you:
|
||||||
|
|
||||||
|
- Home Assistant at `http://localhost:8123`
|
||||||
|
- Mosquitto at `localhost:1883`
|
||||||
|
- A pre-created HA MQTT integration using that broker
|
||||||
|
|
||||||
|
To watch all MQTT traffic during testing:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker exec ha-test-mosquitto mosquitto_sub -h 127.0.0.1 -t '#' -v
|
||||||
|
```
|
||||||
|
|
||||||
|
To stop and clean up:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/setup/stop_ha_test_env.sh --clean
|
||||||
|
```
|
||||||
|
|
||||||
|
## MQTT Topics Reference
|
||||||
|
|
||||||
|
Runtime/state topics (where data is published):
|
||||||
|
|
||||||
|
| Topic | Content | Update frequency |
|
||||||
|
|-------|---------|-----------------|
|
||||||
|
| `meshcore/{node_id}/health` | `{"connected": bool, "noise_floor_dbm": int}` | Every 60s |
|
||||||
|
| `meshcore/{node_id}/telemetry` | `{"battery_volts": float, ...}` | ~8h or manual |
|
||||||
|
| `meshcore/{node_id}/gps` | `{"latitude": float, "longitude": float, ...}` | On advert |
|
||||||
|
| `meshcore/{node_id}/events/message` | `{"event_type": "message_received", ...}` | On message |
|
||||||
|
|
||||||
|
Discovery topics (entity registration, under `homeassistant/`):
|
||||||
|
|
||||||
|
| Pattern | Entity type |
|
||||||
|
|---------|------------|
|
||||||
|
| `homeassistant/binary_sensor/meshcore_<node_id>/connected/config` | Radio connectivity |
|
||||||
|
| `homeassistant/sensor/meshcore_<node_id>/noise_floor/config` | Noise floor sensor |
|
||||||
|
| `homeassistant/sensor/meshcore_<node_id>/battery_voltage/config` | Repeater battery |
|
||||||
|
| `homeassistant/sensor/meshcore_<node_id>/*/config` | Other repeater sensors |
|
||||||
|
| `homeassistant/device_tracker/meshcore_<node_id>/config` | Contact GPS tracker |
|
||||||
|
| `homeassistant/event/meshcore_<node_id>/messages/config` | Message event entity |
|
||||||
|
|
||||||
|
The `{node_id}` is always the first 12 characters of the node's public key, lowercased.
|
||||||
+38
-7
@@ -27,10 +27,10 @@ app/
|
|||||||
├── config.py # Env-driven runtime settings
|
├── config.py # Env-driven runtime settings
|
||||||
├── channel_constants.py # Public/default channel constants shared across sync/send logic
|
├── channel_constants.py # Public/default channel constants shared across sync/send logic
|
||||||
├── database.py # SQLite connection + base schema + migration runner
|
├── database.py # SQLite connection + base schema + migration runner
|
||||||
├── migrations.py # Schema migrations (SQLite user_version)
|
├── migrations/ # Schema migrations (SQLite user_version, per-version modules)
|
||||||
├── models.py # Pydantic request/response models and typed write contracts (for example ContactUpsert)
|
├── models.py # Pydantic request/response models and typed write contracts (for example ContactUpsert)
|
||||||
├── version_info.py # Unified version/build metadata resolution for debug + startup surfaces
|
├── version_info.py # Unified version/build metadata resolution for debug + startup surfaces
|
||||||
├── repository/ # Data access layer (contacts, channels, messages, raw_packets, settings, fanout)
|
├── repository/ # Data access layer (contacts, channels, messages, raw_packets, settings, fanout, push_subscriptions, repeater_telemetry)
|
||||||
├── services/ # Shared orchestration/domain services
|
├── services/ # Shared orchestration/domain services
|
||||||
│ ├── messages.py # Shared message creation, dedup, ACK application
|
│ ├── messages.py # Shared message creation, dedup, ACK application
|
||||||
│ ├── message_send.py # Direct send, channel send, resend workflows
|
│ ├── message_send.py # Direct send, channel send, resend workflows
|
||||||
@@ -50,8 +50,12 @@ app/
|
|||||||
├── events.py # Typed WS event payload serialization
|
├── events.py # Typed WS event payload serialization
|
||||||
├── websocket.py # WS manager + broadcast helpers
|
├── websocket.py # WS manager + broadcast helpers
|
||||||
├── security.py # Optional app-wide HTTP Basic auth middleware for HTTP + WS
|
├── security.py # Optional app-wide HTTP Basic auth middleware for HTTP + WS
|
||||||
|
├── push/ # Web Push notification subsystem
|
||||||
|
│ ├── vapid.py # VAPID key generation, storage, caching
|
||||||
|
│ ├── send.py # pywebpush wrapper (async via thread executor)
|
||||||
|
│ └── manager.py # Push dispatch: filter, build payload, concurrent send
|
||||||
├── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise, SQS (see fanout/AGENTS_fanout.md)
|
├── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise, SQS (see fanout/AGENTS_fanout.md)
|
||||||
├── dependencies.py # Shared FastAPI dependency providers
|
├── telemetry_interval.py # Shared telemetry interval math for tracked-repeater scheduler
|
||||||
├── path_utils.py # Path hex rendering and hop-width helpers
|
├── path_utils.py # Path hex rendering and hop-width helpers
|
||||||
├── region_scope.py # Normalize/validate regional flood-scope values
|
├── region_scope.py # Normalize/validate regional flood-scope values
|
||||||
├── keystore.py # Ephemeral private/public key storage for DM decryption
|
├── keystore.py # Ephemeral private/public key storage for DM decryption
|
||||||
@@ -66,11 +70,12 @@ app/
|
|||||||
├── packets.py
|
├── packets.py
|
||||||
├── read_state.py
|
├── read_state.py
|
||||||
├── rooms.py
|
├── rooms.py
|
||||||
├── server_control.py
|
├── server_control.py # Shared helpers for repeater/room CLI flows (not an APIRouter)
|
||||||
├── settings.py
|
├── settings.py
|
||||||
├── fanout.py
|
├── fanout.py
|
||||||
├── repeaters.py
|
├── repeaters.py
|
||||||
├── statistics.py
|
├── statistics.py
|
||||||
|
├── push.py
|
||||||
└── ws.py
|
└── ws.py
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -135,8 +140,9 @@ app/
|
|||||||
|
|
||||||
### Echo/repeat dedup
|
### Echo/repeat dedup
|
||||||
|
|
||||||
- Message uniqueness: `(type, conversation_key, text, sender_timestamp)`.
|
- Channel message uniqueness (`idx_messages_dedup_null_safe`): `(type, conversation_key, text, COALESCE(sender_timestamp, 0))` where `type = 'CHAN'`.
|
||||||
- Duplicate insert is treated as an echo/repeat: the new path (if any) is appended, and the ACK count is incremented only for outgoing channel messages. Incoming direct messages with the same conversation/text/sender timestamp also collapse onto one stored row, with later observations merging path data instead of creating a second DM.
|
- Incoming PRIV message uniqueness (`idx_messages_incoming_priv_dedup`): `(type, conversation_key, text, COALESCE(sender_timestamp, 0), COALESCE(sender_key, ''))` where `type = 'PRIV' AND outgoing = 0` — `sender_key` was added in migration 056 to distinguish room-server posts from different senders in the same second.
|
||||||
|
- Duplicate insert is treated as an echo/repeat: the new path (if any) is appended, and the ACK count is incremented only for outgoing channel messages. Incoming direct messages with the same dedup identity also collapse onto one stored row, with later observations merging path data instead of creating a second DM.
|
||||||
|
|
||||||
### Raw packet dedup policy
|
### Raw packet dedup policy
|
||||||
|
|
||||||
@@ -168,6 +174,17 @@ app/
|
|||||||
- Community MQTT publishes raw packets only, but its derived `path` field for direct packets is emitted as comma-separated hop identifiers, not flat path bytes.
|
- Community MQTT publishes raw packets only, but its derived `path` field for direct packets is emitted as comma-separated hop identifiers, not flat path bytes.
|
||||||
- See `app/fanout/AGENTS_fanout.md` for full architecture details and event payload shapes.
|
- See `app/fanout/AGENTS_fanout.md` for full architecture details and event payload shapes.
|
||||||
|
|
||||||
|
### Web Push notifications
|
||||||
|
|
||||||
|
Web Push is a standalone subsystem in `app/push/`, separate from the fanout module system. It sends browser push notifications for incoming messages even when the tab is closed.
|
||||||
|
|
||||||
|
- **Not a fanout module** — Web Push manages per-browser subscriptions (N browsers, each with its own endpoint and delivery state), unlike fanout which is one-config-to-one-destination.
|
||||||
|
- **VAPID keys**: auto-generated P-256 key pair on first startup, stored in `app_settings.vapid_private_key` / `vapid_public_key`. Cached in-module by `app/push/vapid.py`.
|
||||||
|
- **Dispatch**: `broadcast_event()` in `websocket.py` fires `push_manager.dispatch_message(data)` alongside fanout for `message` events. The manager checks the global `app_settings.push_conversations` list, then sends to all currently registered subscriptions via `pywebpush` (run in a thread executor).
|
||||||
|
- **Stale cleanup**: HTTP 404/410 from the push service triggers immediate subscription deletion.
|
||||||
|
- **Subscriptions stored** in `push_subscriptions` table with `UNIQUE(endpoint)` for upsert semantics.
|
||||||
|
- Requires HTTPS (self-signed OK) and outbound internet to reach browser push services.
|
||||||
|
|
||||||
## API Surface (all under `/api`)
|
## API Surface (all under `/api`)
|
||||||
|
|
||||||
### Health
|
### Health
|
||||||
@@ -208,6 +225,7 @@ app/
|
|||||||
- `POST /contacts/{public_key}/repeater/radio-settings`
|
- `POST /contacts/{public_key}/repeater/radio-settings`
|
||||||
- `POST /contacts/{public_key}/repeater/advert-intervals`
|
- `POST /contacts/{public_key}/repeater/advert-intervals`
|
||||||
- `POST /contacts/{public_key}/repeater/owner-info`
|
- `POST /contacts/{public_key}/repeater/owner-info`
|
||||||
|
- `GET /contacts/{public_key}/repeater/telemetry-history` — stored telemetry history for a repeater (read-only, no radio access)
|
||||||
- `POST /contacts/{public_key}/room/login`
|
- `POST /contacts/{public_key}/room/login`
|
||||||
- `POST /contacts/{public_key}/room/status`
|
- `POST /contacts/{public_key}/room/status`
|
||||||
- `POST /contacts/{public_key}/room/lpp-telemetry`
|
- `POST /contacts/{public_key}/room/lpp-telemetry`
|
||||||
@@ -247,6 +265,7 @@ app/
|
|||||||
- `POST /settings/blocked-keys/toggle`
|
- `POST /settings/blocked-keys/toggle`
|
||||||
- `POST /settings/blocked-names/toggle`
|
- `POST /settings/blocked-names/toggle`
|
||||||
- `POST /settings/tracked-telemetry/toggle`
|
- `POST /settings/tracked-telemetry/toggle`
|
||||||
|
- `GET /settings/tracked-telemetry/schedule` — current telemetry scheduling derivation, interval options, and next-run-at timestamp
|
||||||
|
|
||||||
### Fanout
|
### Fanout
|
||||||
- `GET /fanout` — list all fanout configs
|
- `GET /fanout` — list all fanout configs
|
||||||
@@ -258,6 +277,16 @@ app/
|
|||||||
### Statistics
|
### Statistics
|
||||||
- `GET /statistics` — aggregated mesh network stats (entity counts, message/packet splits, activity windows, busiest channels)
|
- `GET /statistics` — aggregated mesh network stats (entity counts, message/packet splits, activity windows, busiest channels)
|
||||||
|
|
||||||
|
### Push
|
||||||
|
- `GET /push/vapid-public-key` — VAPID public key for browser `PushManager.subscribe()`
|
||||||
|
- `POST /push/subscribe` — register/upsert push subscription (keyed by endpoint URL)
|
||||||
|
- `GET /push/subscriptions` — list all push subscriptions
|
||||||
|
- `PATCH /push/subscriptions/{id}` — update label or filter preferences
|
||||||
|
- `DELETE /push/subscriptions/{id}` — delete subscription
|
||||||
|
- `POST /push/subscriptions/{id}/test` — send test notification
|
||||||
|
- `GET /push/conversations` — global list of push-enabled conversation state keys
|
||||||
|
- `POST /push/conversations/toggle` — add or remove a conversation from the global push list
|
||||||
|
|
||||||
### WebSocket
|
### WebSocket
|
||||||
- `WS /ws`
|
- `WS /ws`
|
||||||
|
|
||||||
@@ -290,7 +319,8 @@ Main tables:
|
|||||||
- `contact_name_history` (tracks name changes over time)
|
- `contact_name_history` (tracks name changes over time)
|
||||||
- `repeater_telemetry_history` (time-series telemetry snapshots for tracked repeaters)
|
- `repeater_telemetry_history` (time-series telemetry snapshots for tracked repeaters)
|
||||||
- `fanout_configs` (MQTT, bot, webhook, Apprise, SQS integration configs)
|
- `fanout_configs` (MQTT, bot, webhook, Apprise, SQS integration configs)
|
||||||
- `app_settings`
|
- `push_subscriptions` (Web Push browser subscriptions with delivery metadata; UNIQUE on endpoint)
|
||||||
|
- `app_settings` (includes `vapid_private_key` and `vapid_public_key` for Web Push VAPID signing)
|
||||||
|
|
||||||
Contact route state is canonicalized on the backend:
|
Contact route state is canonicalized on the backend:
|
||||||
- stored route inputs: `direct_path`, `direct_path_len`, `direct_path_hash_mode`, `direct_path_updated_at`, plus optional `route_override_*`
|
- stored route inputs: `direct_path`, `direct_path_len`, `direct_path_hash_mode`, `direct_path_updated_at`, plus optional `route_override_*`
|
||||||
@@ -313,6 +343,7 @@ Repository writes should prefer typed models such as `ContactUpsert` over ad hoc
|
|||||||
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
|
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
|
||||||
- `tracked_telemetry_repeaters`
|
- `tracked_telemetry_repeaters`
|
||||||
- `auto_resend_channel`
|
- `auto_resend_channel`
|
||||||
|
- `telemetry_interval_hours`
|
||||||
|
|
||||||
Note: MQTT, community MQTT, and bot configs were migrated to the `fanout_configs` table (migrations 36-38).
|
Note: MQTT, community MQTT, and bot configs were migrated to the `fanout_configs` table (migrations 36-38).
|
||||||
|
|
||||||
|
|||||||
+86
-1
@@ -1,4 +1,7 @@
|
|||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from collections.abc import AsyncIterator
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import aiosqlite
|
import aiosqlite
|
||||||
@@ -108,7 +111,8 @@ CREATE TABLE IF NOT EXISTS app_settings (
|
|||||||
blocked_names TEXT DEFAULT '[]',
|
blocked_names TEXT DEFAULT '[]',
|
||||||
discovery_blocked_types TEXT DEFAULT '[]',
|
discovery_blocked_types TEXT DEFAULT '[]',
|
||||||
tracked_telemetry_repeaters TEXT DEFAULT '[]',
|
tracked_telemetry_repeaters TEXT DEFAULT '[]',
|
||||||
auto_resend_channel INTEGER DEFAULT 0
|
auto_resend_channel INTEGER DEFAULT 0,
|
||||||
|
telemetry_interval_hours INTEGER DEFAULT 8
|
||||||
);
|
);
|
||||||
INSERT OR IGNORE INTO app_settings (id) VALUES (1);
|
INSERT OR IGNORE INTO app_settings (id) VALUES (1);
|
||||||
|
|
||||||
@@ -164,9 +168,74 @@ CREATE INDEX IF NOT EXISTS idx_repeater_telemetry_pk_ts
|
|||||||
|
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
|
"""Single-connection aiosqlite wrapper with coroutine-level serialization.
|
||||||
|
|
||||||
|
Why the lock: aiosqlite runs one ``sqlite3.Connection`` on a background
|
||||||
|
worker thread and serializes statement execution there. But SQLite's
|
||||||
|
``COMMIT`` fails with ``OperationalError: cannot commit transaction -
|
||||||
|
SQL statements in progress`` whenever *any* cursor on the connection has
|
||||||
|
a live prepared statement (a ``SELECT`` that returned ``SQLITE_ROW`` but
|
||||||
|
hasn't been fully consumed or closed). Under concurrent coroutines, one
|
||||||
|
task's in-flight ``fetchone()`` can still be in ``SQLITE_ROW`` state when
|
||||||
|
another task's ``commit()`` runs on the worker — triggering the error.
|
||||||
|
|
||||||
|
Fix: all DB work goes through ``tx()`` (writes) or ``readonly()`` (reads),
|
||||||
|
both of which acquire ``self._lock``. The lock is non-reentrant (asyncio
|
||||||
|
default) by design — nested ``tx()`` calls are a bug. Repository methods
|
||||||
|
that compose multiple operations factor the raw SQL into private helpers
|
||||||
|
that take a ``conn`` and don't lock; the public method acquires the lock
|
||||||
|
once and calls those helpers.
|
||||||
|
|
||||||
|
Why reads are also locked: reads must also hold the lock, because a read
|
||||||
|
in ``SQLITE_ROW`` state is precisely the live statement that breaks a
|
||||||
|
concurrent writer's commit. Single-connection aiosqlite cannot safely
|
||||||
|
overlap reads and writes. If we ever split reader/writer connections in
|
||||||
|
the future, ``readonly()`` becomes the seam to point at the reader pool.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, db_path: str):
|
def __init__(self, db_path: str):
|
||||||
self.db_path = db_path
|
self.db_path = db_path
|
||||||
self._connection: aiosqlite.Connection | None = None
|
self._connection: aiosqlite.Connection | None = None
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def tx(self) -> AsyncIterator[aiosqlite.Connection]:
|
||||||
|
"""Acquire the connection for a write transaction.
|
||||||
|
|
||||||
|
Commits on clean exit, rolls back on exception. Callers MUST close
|
||||||
|
every cursor opened inside the block (use ``async with conn.execute(...)
|
||||||
|
as cursor:``) so no prepared statement is alive when commit runs.
|
||||||
|
|
||||||
|
The lock serializes concurrent writers AND ensures no reader's cursor
|
||||||
|
is alive during the commit. Nested calls will deadlock — factor shared
|
||||||
|
SQL into helpers that accept ``conn`` and do not re-enter ``tx()``.
|
||||||
|
"""
|
||||||
|
async with self._lock:
|
||||||
|
if self._connection is None:
|
||||||
|
raise RuntimeError("Database not connected")
|
||||||
|
conn = self._connection
|
||||||
|
try:
|
||||||
|
yield conn
|
||||||
|
except BaseException:
|
||||||
|
await conn.rollback()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
await conn.commit()
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def readonly(self) -> AsyncIterator[aiosqlite.Connection]:
|
||||||
|
"""Acquire the connection for a read. No commit, no rollback.
|
||||||
|
|
||||||
|
Locked for the same reason writes are: on a single connection, an
|
||||||
|
active read statement blocks a concurrent writer's commit. Callers
|
||||||
|
MUST fully consume or close cursors before the block exits (use
|
||||||
|
``async with conn.execute(...) as cursor:`` + ``fetchall`` /
|
||||||
|
``fetchone``; avoid holding a cursor across ``await`` on other IO).
|
||||||
|
"""
|
||||||
|
async with self._lock:
|
||||||
|
if self._connection is None:
|
||||||
|
raise RuntimeError("Database not connected")
|
||||||
|
yield self._connection
|
||||||
|
|
||||||
async def connect(self) -> None:
|
async def connect(self) -> None:
|
||||||
logger.info("Connecting to database at %s", self.db_path)
|
logger.info("Connecting to database at %s", self.db_path)
|
||||||
@@ -178,6 +247,22 @@ class Database:
|
|||||||
# Persists in the DB file but we set it explicitly on every connection.
|
# Persists in the DB file but we set it explicitly on every connection.
|
||||||
await self._connection.execute("PRAGMA journal_mode = WAL")
|
await self._connection.execute("PRAGMA journal_mode = WAL")
|
||||||
|
|
||||||
|
# synchronous = NORMAL is safe with WAL — only the most recent
|
||||||
|
# transaction can be lost on an OS crash (no corruption risk).
|
||||||
|
# Reduces fsync overhead vs. the default FULL.
|
||||||
|
await self._connection.execute("PRAGMA synchronous = NORMAL")
|
||||||
|
|
||||||
|
# Retry for up to 5s on lock contention instead of failing instantly.
|
||||||
|
# Matters when a second connection (e.g. VACUUM) touches the DB.
|
||||||
|
await self._connection.execute("PRAGMA busy_timeout = 5000")
|
||||||
|
|
||||||
|
# Bump page cache to ~64 MB (negative value = KB). Keeps hot pages
|
||||||
|
# in memory for read-heavy queries (unreads, pagination, search).
|
||||||
|
await self._connection.execute("PRAGMA cache_size = -64000")
|
||||||
|
|
||||||
|
# Keep temp tables and sort spills in memory instead of on disk.
|
||||||
|
await self._connection.execute("PRAGMA temp_store = MEMORY")
|
||||||
|
|
||||||
# Incremental auto-vacuum: freed pages are reclaimable via
|
# Incremental auto-vacuum: freed pages are reclaimable via
|
||||||
# PRAGMA incremental_vacuum without a full VACUUM. Must be set before
|
# PRAGMA incremental_vacuum without a full VACUUM. Must be set before
|
||||||
# the first table is created (for new databases); for existing databases
|
# the first table is created (for new databases); for existing databases
|
||||||
|
|||||||
@@ -237,7 +237,13 @@ async def on_new_contact(event: "Event") -> None:
|
|||||||
logger.debug("New contact: %s", public_key[:12])
|
logger.debug("New contact: %s", public_key[:12])
|
||||||
|
|
||||||
contact_upsert = ContactUpsert.from_radio_dict(public_key.lower(), payload, on_radio=False)
|
contact_upsert = ContactUpsert.from_radio_dict(public_key.lower(), payload, on_radio=False)
|
||||||
contact_upsert.last_seen = int(time.time())
|
# Intentionally do not set first_seen or last_seen here: NEW_CONTACT
|
||||||
|
# fires from the radio's stored contact DB, not an RF observation.
|
||||||
|
# Both first_seen and last_seen are RF-only timestamps — they track
|
||||||
|
# the first and most recent time we actually heard this pubkey over
|
||||||
|
# the air (adverts, messages, path updates). Contacts synced from the
|
||||||
|
# radio's internal DB without any RF activity stay NULL until a real
|
||||||
|
# RF observation fills them in.
|
||||||
await ContactRepository.upsert(contact_upsert)
|
await ContactRepository.upsert(contact_upsert)
|
||||||
promoted_keys = await promote_prefix_contacts_for_contact(
|
promoted_keys = await promote_prefix_contacts_for_contact(
|
||||||
public_key=public_key,
|
public_key=public_key,
|
||||||
|
|||||||
@@ -144,8 +144,8 @@ Amazon SQS delivery. Config blob:
|
|||||||
- Supports both decoded messages and raw packets via normal scope selection
|
- Supports both decoded messages and raw packets via normal scope selection
|
||||||
|
|
||||||
### map_upload (map_upload.py)
|
### map_upload (map_upload.py)
|
||||||
Uploads heard repeater and room-server advertisements to map.meshcore.dev. Config blob:
|
Uploads heard repeater and room-server advertisements to map.meshcore.io. Config blob:
|
||||||
- `api_url` (optional, default `""`) — upload endpoint; empty falls back to the public map.meshcore.dev API
|
- `api_url` (optional, default `""`) — upload endpoint; empty falls back to the public map.meshcore.io API
|
||||||
- `dry_run` (bool, default `true`) — when true, logs the payload at INFO level without sending
|
- `dry_run` (bool, default `true`) — when true, logs the payload at INFO level without sending
|
||||||
- `geofence_enabled` (bool, default `false`) — when true, only uploads nodes within `geofence_radius_km` of the radio's own configured lat/lon
|
- `geofence_enabled` (bool, default `false`) — when true, only uploads nodes within `geofence_radius_km` of the radio's own configured lat/lon
|
||||||
- `geofence_radius_km` (float, default `0`) — filter radius in kilometres
|
- `geofence_radius_km` (float, default `0`) — filter radius in kilometres
|
||||||
|
|||||||
@@ -31,12 +31,14 @@ def _register_module_types() -> None:
|
|||||||
from app.fanout.bot import BotModule
|
from app.fanout.bot import BotModule
|
||||||
from app.fanout.map_upload import MapUploadModule
|
from app.fanout.map_upload import MapUploadModule
|
||||||
from app.fanout.mqtt_community import MqttCommunityModule
|
from app.fanout.mqtt_community import MqttCommunityModule
|
||||||
|
from app.fanout.mqtt_ha import MqttHaModule
|
||||||
from app.fanout.mqtt_private import MqttPrivateModule
|
from app.fanout.mqtt_private import MqttPrivateModule
|
||||||
from app.fanout.sqs import SqsModule
|
from app.fanout.sqs import SqsModule
|
||||||
from app.fanout.webhook import WebhookModule
|
from app.fanout.webhook import WebhookModule
|
||||||
|
|
||||||
_MODULE_TYPES["mqtt_private"] = MqttPrivateModule
|
_MODULE_TYPES["mqtt_private"] = MqttPrivateModule
|
||||||
_MODULE_TYPES["mqtt_community"] = MqttCommunityModule
|
_MODULE_TYPES["mqtt_community"] = MqttCommunityModule
|
||||||
|
_MODULE_TYPES["mqtt_ha"] = MqttHaModule
|
||||||
_MODULE_TYPES["bot"] = BotModule
|
_MODULE_TYPES["bot"] = BotModule
|
||||||
_MODULE_TYPES["webhook"] = WebhookModule
|
_MODULE_TYPES["webhook"] = WebhookModule
|
||||||
_MODULE_TYPES["apprise"] = AppriseModule
|
_MODULE_TYPES["apprise"] = AppriseModule
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
"""Fanout module for uploading heard advert packets to map.meshcore.dev.
|
"""Fanout module for uploading heard advert packets to map.meshcore.io.
|
||||||
|
|
||||||
Mirrors the logic of the standalone map.meshcore.dev-uploader project:
|
Mirrors the logic of the standalone map.meshcore.dev-uploader project
|
||||||
|
(historical name; the live service is now hosted at map.meshcore.io):
|
||||||
- Listens on raw RF packets via on_raw
|
- Listens on raw RF packets via on_raw
|
||||||
- Filters for ADVERT packets, only processes repeaters (role 2) and rooms (role 3)
|
- Filters for ADVERT packets, only processes repeaters (role 2) and rooms (role 3)
|
||||||
- Skips nodes with no valid location (lat/lon None)
|
- Skips nodes with no valid location (lat/lon None)
|
||||||
@@ -16,7 +17,7 @@ the raw hex link.
|
|||||||
Config keys
|
Config keys
|
||||||
-----------
|
-----------
|
||||||
api_url : str, default ""
|
api_url : str, default ""
|
||||||
Upload endpoint. Empty string falls back to the public map.meshcore.dev API.
|
Upload endpoint. Empty string falls back to the public map.meshcore.io API.
|
||||||
dry_run : bool, default True
|
dry_run : bool, default True
|
||||||
When True, log the payload at INFO level instead of sending it.
|
When True, log the payload at INFO level instead of sending it.
|
||||||
geofence_enabled : bool, default False
|
geofence_enabled : bool, default False
|
||||||
@@ -46,7 +47,7 @@ from app.services.radio_runtime import radio_runtime
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_DEFAULT_API_URL = "https://map.meshcore.dev/api/v1/uploader/node"
|
_DEFAULT_API_URL = "https://map.meshcore.io/api/v1/uploader/node"
|
||||||
|
|
||||||
# Re-upload guard: skip re-uploading a pubkey seen within this window (AU parity)
|
# Re-upload guard: skip re-uploading a pubkey seen within this window (AU parity)
|
||||||
_REUPLOAD_SECONDS = 3600
|
_REUPLOAD_SECONDS = 3600
|
||||||
|
|||||||
@@ -0,0 +1,780 @@
|
|||||||
|
"""Home Assistant MQTT Discovery fanout module.
|
||||||
|
|
||||||
|
Publishes HA-compatible discovery configs and state updates so that mesh
|
||||||
|
network devices appear natively in Home Assistant via its built-in MQTT
|
||||||
|
integration. No custom HA component is needed.
|
||||||
|
|
||||||
|
Entity types created:
|
||||||
|
- Local radio: binary_sensor (connectivity) + sensors (noise floor, battery,
|
||||||
|
uptime, RSSI, SNR, airtime, packet counts)
|
||||||
|
- Per tracked repeater: sensor entities for telemetry fields
|
||||||
|
- Per tracked contact: device_tracker for GPS position
|
||||||
|
- Messages: event entity for scope-matched messages
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import ssl
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from app.fanout.base import FanoutModule, get_fanout_message_text
|
||||||
|
from app.fanout.mqtt_base import BaseMqttPublisher
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# ── Repeater telemetry sensor definitions ─────────────────────────────────
|
||||||
|
|
||||||
|
_REPEATER_SENSORS: list[dict[str, Any]] = [
|
||||||
|
{
|
||||||
|
"field": "battery_volts",
|
||||||
|
"name": "Battery Voltage",
|
||||||
|
"object_id": "battery_voltage",
|
||||||
|
"device_class": "voltage",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "V",
|
||||||
|
"precision": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "noise_floor_dbm",
|
||||||
|
"name": "Noise Floor",
|
||||||
|
"object_id": "noise_floor",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_rssi_dbm",
|
||||||
|
"name": "Last RSSI",
|
||||||
|
"object_id": "last_rssi",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_snr_db",
|
||||||
|
"name": "Last SNR",
|
||||||
|
"object_id": "last_snr",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dB",
|
||||||
|
"precision": 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_received",
|
||||||
|
"name": "Packets Received",
|
||||||
|
"object_id": "packets_received",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_sent",
|
||||||
|
"name": "Packets Sent",
|
||||||
|
"object_id": "packets_sent",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "uptime_seconds",
|
||||||
|
"name": "Uptime",
|
||||||
|
"object_id": "uptime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": None,
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# ── LPP sensor metadata ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
_LPP_HA_META: dict[str, dict[str, Any]] = {
|
||||||
|
"temperature": {"device_class": "temperature", "unit": "°C", "precision": 1},
|
||||||
|
"humidity": {"device_class": "humidity", "unit": "%", "precision": 1},
|
||||||
|
"barometer": {"device_class": "atmospheric_pressure", "unit": "hPa", "precision": 1},
|
||||||
|
"voltage": {"device_class": "voltage", "unit": "V", "precision": 2},
|
||||||
|
"current": {"device_class": "current", "unit": "mA", "precision": 1},
|
||||||
|
"luminosity": {"device_class": "illuminance", "unit": "lux", "precision": 0},
|
||||||
|
"power": {"device_class": "power", "unit": "W", "precision": 1},
|
||||||
|
"energy": {"device_class": "energy", "unit": "kWh", "precision": 2},
|
||||||
|
"distance": {"device_class": "distance", "unit": "mm", "precision": 0},
|
||||||
|
"concentration": {"device_class": None, "unit": "ppm", "precision": 0},
|
||||||
|
"direction": {"device_class": None, "unit": "°", "precision": 0},
|
||||||
|
"altitude": {"device_class": None, "unit": "m", "precision": 1},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _lpp_sensor_key(type_name: str, channel: int) -> str:
|
||||||
|
"""Build the flat telemetry-payload key for an LPP sensor."""
|
||||||
|
return f"lpp_{type_name}_ch{channel}"
|
||||||
|
|
||||||
|
|
||||||
|
def _repeater_telemetry_payload(data: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Build the flat HA state payload for a repeater telemetry snapshot."""
|
||||||
|
payload: dict[str, Any] = {}
|
||||||
|
for sensor in _REPEATER_SENSORS:
|
||||||
|
field = sensor["field"]
|
||||||
|
if field is not None:
|
||||||
|
payload[field] = data.get(field)
|
||||||
|
|
||||||
|
for sensor in data.get("lpp_sensors", []) or []:
|
||||||
|
key = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||||
|
payload[key] = sensor.get("value")
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def _lpp_discovery_configs(
|
||||||
|
prefix: str,
|
||||||
|
pub_key: str,
|
||||||
|
device: dict,
|
||||||
|
lpp_sensors: list[dict],
|
||||||
|
state_topic: str,
|
||||||
|
) -> list[tuple[str, dict]]:
|
||||||
|
"""Build HA discovery configs for a repeater's LPP sensors."""
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
for sensor in lpp_sensors:
|
||||||
|
type_name = sensor.get("type_name", "unknown")
|
||||||
|
channel = sensor.get("channel", 0)
|
||||||
|
field = _lpp_sensor_key(type_name, channel)
|
||||||
|
meta = _LPP_HA_META.get(type_name, {})
|
||||||
|
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
object_id = field
|
||||||
|
display = type_name.replace("_", " ").title()
|
||||||
|
name = f"{display} (Ch {channel})"
|
||||||
|
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": name,
|
||||||
|
"unique_id": f"meshcore_{nid}_{object_id}",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ value_json." + field + " }}",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"expire_after": 36000,
|
||||||
|
}
|
||||||
|
if meta.get("device_class"):
|
||||||
|
cfg["device_class"] = meta["device_class"]
|
||||||
|
if meta.get("unit"):
|
||||||
|
cfg["unit_of_measurement"] = meta["unit"]
|
||||||
|
if meta.get("precision") is not None:
|
||||||
|
cfg["suggested_display_precision"] = meta["precision"]
|
||||||
|
|
||||||
|
topic = f"homeassistant/sensor/meshcore_{nid}/{object_id}/config"
|
||||||
|
configs.append((topic, cfg))
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
# ── Local radio sensor definitions ────────────────────────────────────────
|
||||||
|
|
||||||
|
_RADIO_SENSORS: list[dict[str, Any]] = [
|
||||||
|
{
|
||||||
|
"field": "noise_floor_dbm",
|
||||||
|
"name": "Noise Floor",
|
||||||
|
"object_id": "noise_floor",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "battery_volts",
|
||||||
|
"name": "Battery",
|
||||||
|
"object_id": "battery",
|
||||||
|
"device_class": "voltage",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "V",
|
||||||
|
"precision": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "uptime_secs",
|
||||||
|
"name": "Uptime",
|
||||||
|
"object_id": "uptime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": None,
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_rssi",
|
||||||
|
"name": "Last RSSI",
|
||||||
|
"object_id": "last_rssi",
|
||||||
|
"device_class": "signal_strength",
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dBm",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "last_snr",
|
||||||
|
"name": "Last SNR",
|
||||||
|
"object_id": "last_snr",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "measurement",
|
||||||
|
"unit": "dB",
|
||||||
|
"precision": 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "tx_air_secs",
|
||||||
|
"name": "TX Airtime",
|
||||||
|
"object_id": "tx_airtime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "rx_air_secs",
|
||||||
|
"name": "RX Airtime",
|
||||||
|
"object_id": "rx_airtime",
|
||||||
|
"device_class": "duration",
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": "s",
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_recv",
|
||||||
|
"name": "Packets Received",
|
||||||
|
"object_id": "packets_received",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "packets_sent",
|
||||||
|
"name": "Packets Sent",
|
||||||
|
"object_id": "packets_sent",
|
||||||
|
"device_class": None,
|
||||||
|
"state_class": "total_increasing",
|
||||||
|
"unit": None,
|
||||||
|
"precision": 0,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _node_id(public_key: str) -> str:
|
||||||
|
"""Derive a stable, MQTT-safe node identifier from a public key."""
|
||||||
|
return public_key[:12].lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _device_payload(
|
||||||
|
public_key: str,
|
||||||
|
name: str,
|
||||||
|
model: str,
|
||||||
|
*,
|
||||||
|
via_device_key: str | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Build an HA device registry fragment."""
|
||||||
|
dev: dict[str, Any] = {
|
||||||
|
"identifiers": [f"meshcore_{_node_id(public_key)}"],
|
||||||
|
"name": name or public_key[:12],
|
||||||
|
"manufacturer": "MeshCore",
|
||||||
|
"model": model,
|
||||||
|
}
|
||||||
|
if via_device_key:
|
||||||
|
dev["via_device"] = f"meshcore_{_node_id(via_device_key)}"
|
||||||
|
return dev
|
||||||
|
|
||||||
|
|
||||||
|
# ── MQTT publisher subclass ───────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
class _HaMqttPublisher(BaseMqttPublisher):
|
||||||
|
"""Thin MQTT lifecycle wrapper for the HA discovery module."""
|
||||||
|
|
||||||
|
_backoff_max = 30
|
||||||
|
_log_prefix = "HA-MQTT"
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._on_connected_callback: Any = None
|
||||||
|
|
||||||
|
def _is_configured(self) -> bool:
|
||||||
|
s = self._settings
|
||||||
|
return bool(s and s.broker_host)
|
||||||
|
|
||||||
|
def _build_client_kwargs(self, settings: object) -> dict[str, Any]:
|
||||||
|
s: Any = settings
|
||||||
|
kw: dict[str, Any] = {
|
||||||
|
"hostname": s.broker_host,
|
||||||
|
"port": s.broker_port,
|
||||||
|
"username": s.username or None,
|
||||||
|
"password": s.password or None,
|
||||||
|
}
|
||||||
|
if s.use_tls:
|
||||||
|
ctx = ssl.create_default_context()
|
||||||
|
if s.tls_insecure:
|
||||||
|
ctx.check_hostname = False
|
||||||
|
ctx.verify_mode = ssl.CERT_NONE
|
||||||
|
kw["tls_context"] = ctx
|
||||||
|
return kw
|
||||||
|
|
||||||
|
def _on_connected(self, settings: object) -> tuple[str, str]:
|
||||||
|
s: Any = settings
|
||||||
|
return ("HA MQTT connected", f"{s.broker_host}:{s.broker_port}")
|
||||||
|
|
||||||
|
def _on_error(self) -> tuple[str, str]:
|
||||||
|
return ("HA MQTT connection failure", "Please correct the settings or disable.")
|
||||||
|
|
||||||
|
async def _on_connected_async(self, settings: object) -> None:
|
||||||
|
if self._on_connected_callback:
|
||||||
|
await self._on_connected_callback()
|
||||||
|
|
||||||
|
|
||||||
|
# ── Discovery config builders ─────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
def _radio_discovery_configs(
|
||||||
|
prefix: str,
|
||||||
|
radio_key: str,
|
||||||
|
radio_name: str,
|
||||||
|
) -> list[tuple[str, dict]]:
|
||||||
|
"""Build HA discovery config payloads for the local radio device."""
|
||||||
|
nid = _node_id(radio_key)
|
||||||
|
device = _device_payload(radio_key, radio_name, "Radio")
|
||||||
|
state_topic = f"{prefix}/{nid}/health"
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
|
||||||
|
# binary_sensor: connected
|
||||||
|
configs.append(
|
||||||
|
(
|
||||||
|
f"homeassistant/binary_sensor/meshcore_{nid}/connected/config",
|
||||||
|
{
|
||||||
|
"name": "Connected",
|
||||||
|
"unique_id": f"meshcore_{nid}_connected",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ 'ON' if value_json.connected else 'OFF' }}",
|
||||||
|
"device_class": "connectivity",
|
||||||
|
"payload_on": "ON",
|
||||||
|
"payload_off": "OFF",
|
||||||
|
"expire_after": 120,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# sensors from _RADIO_SENSORS (noise floor, battery, uptime, RSSI, etc.)
|
||||||
|
for sensor in _RADIO_SENSORS:
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": sensor["name"],
|
||||||
|
"unique_id": f"meshcore_{nid}_{sensor['object_id']}",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ value_json." + sensor["field"] + " }}", # type: ignore[operator]
|
||||||
|
"expire_after": 120,
|
||||||
|
}
|
||||||
|
if sensor["device_class"]:
|
||||||
|
cfg["device_class"] = sensor["device_class"]
|
||||||
|
if sensor["state_class"]:
|
||||||
|
cfg["state_class"] = sensor["state_class"]
|
||||||
|
if sensor["unit"]:
|
||||||
|
cfg["unit_of_measurement"] = sensor["unit"]
|
||||||
|
if sensor.get("precision") is not None:
|
||||||
|
cfg["suggested_display_precision"] = sensor["precision"]
|
||||||
|
|
||||||
|
topic = f"homeassistant/sensor/meshcore_{nid}/{sensor['object_id']}/config"
|
||||||
|
configs.append((topic, cfg))
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
def _repeater_discovery_configs(
|
||||||
|
prefix: str,
|
||||||
|
pub_key: str,
|
||||||
|
name: str,
|
||||||
|
radio_key: str | None,
|
||||||
|
) -> list[tuple[str, dict]]:
|
||||||
|
"""Build HA discovery config payloads for a tracked repeater."""
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
device = _device_payload(pub_key, name, "Repeater", via_device_key=radio_key)
|
||||||
|
state_topic = f"{prefix}/{nid}/telemetry"
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
|
||||||
|
for sensor in _REPEATER_SENSORS:
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": sensor["name"],
|
||||||
|
"unique_id": f"meshcore_{nid}_{sensor['object_id']}",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": state_topic,
|
||||||
|
"value_template": "{{ value_json." + sensor["field"] + " }}", # type: ignore[operator]
|
||||||
|
}
|
||||||
|
if sensor["device_class"]:
|
||||||
|
cfg["device_class"] = sensor["device_class"]
|
||||||
|
if sensor["state_class"]:
|
||||||
|
cfg["state_class"] = sensor["state_class"]
|
||||||
|
if sensor["unit"]:
|
||||||
|
cfg["unit_of_measurement"] = sensor["unit"]
|
||||||
|
if sensor.get("precision") is not None:
|
||||||
|
cfg["suggested_display_precision"] = sensor["precision"]
|
||||||
|
# 10 hours — margin over the 8-hour auto-collect cycle
|
||||||
|
cfg["expire_after"] = 36000
|
||||||
|
|
||||||
|
topic = f"homeassistant/sensor/meshcore_{nid}/{sensor['object_id']}/config"
|
||||||
|
configs.append((topic, cfg))
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
def _contact_tracker_discovery_config(
|
||||||
|
prefix: str,
|
||||||
|
pub_key: str,
|
||||||
|
name: str,
|
||||||
|
radio_key: str | None,
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""Build HA discovery config for a tracked contact's device_tracker."""
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
device = _device_payload(pub_key, name, "Node", via_device_key=radio_key)
|
||||||
|
topic = f"homeassistant/device_tracker/meshcore_{nid}/config"
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": name or pub_key[:12],
|
||||||
|
"unique_id": f"meshcore_{nid}_tracker",
|
||||||
|
"device": device,
|
||||||
|
"json_attributes_topic": f"{prefix}/{nid}/gps",
|
||||||
|
"source_type": "gps",
|
||||||
|
}
|
||||||
|
return topic, cfg
|
||||||
|
|
||||||
|
|
||||||
|
def _message_event_discovery_config(
|
||||||
|
prefix: str, radio_key: str, radio_name: str
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""Build HA discovery config for the message event entity."""
|
||||||
|
nid = _node_id(radio_key)
|
||||||
|
device = _device_payload(radio_key, radio_name, "Radio")
|
||||||
|
topic = f"homeassistant/event/meshcore_{nid}/messages/config"
|
||||||
|
cfg: dict[str, Any] = {
|
||||||
|
"name": "MeshCore Messages",
|
||||||
|
"unique_id": f"meshcore_{nid}_messages",
|
||||||
|
"device": device,
|
||||||
|
"state_topic": f"{prefix}/{nid}/events/message",
|
||||||
|
"event_types": ["message_received"],
|
||||||
|
}
|
||||||
|
return topic, cfg
|
||||||
|
|
||||||
|
|
||||||
|
# ── Module class ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
def _config_to_settings(config: dict) -> SimpleNamespace:
|
||||||
|
return SimpleNamespace(
|
||||||
|
broker_host=config.get("broker_host", ""),
|
||||||
|
broker_port=config.get("broker_port", 1883),
|
||||||
|
username=config.get("username", ""),
|
||||||
|
password=config.get("password", ""),
|
||||||
|
use_tls=config.get("use_tls", False),
|
||||||
|
tls_insecure=config.get("tls_insecure", False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MqttHaModule(FanoutModule):
|
||||||
|
"""Home Assistant MQTT Discovery fanout module."""
|
||||||
|
|
||||||
|
def __init__(self, config_id: str, config: dict, *, name: str = "") -> None:
|
||||||
|
super().__init__(config_id, config, name=name)
|
||||||
|
self._publisher = _HaMqttPublisher()
|
||||||
|
self._publisher.set_integration_name(name or config_id)
|
||||||
|
self._publisher._on_connected_callback = self._publish_discovery
|
||||||
|
self._discovery_topics: list[str] = []
|
||||||
|
self._radio_key: str | None = None
|
||||||
|
self._radio_name: str | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _prefix(self) -> str:
|
||||||
|
return self.config.get("topic_prefix", "meshcore")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _tracked_contacts(self) -> list[str]:
|
||||||
|
return self.config.get("tracked_contacts") or []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _tracked_repeaters(self) -> list[str]:
|
||||||
|
return self.config.get("tracked_repeaters") or []
|
||||||
|
|
||||||
|
# ── Lifecycle ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
self._seed_radio_identity_from_runtime()
|
||||||
|
settings = _config_to_settings(self.config)
|
||||||
|
await self._publisher.start(settings)
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
await self._remove_discovery()
|
||||||
|
await self._publisher.stop()
|
||||||
|
self._discovery_topics.clear()
|
||||||
|
|
||||||
|
# ── Discovery publishing ──────────────────────────────────────────
|
||||||
|
|
||||||
|
async def _publish_discovery(self) -> None:
|
||||||
|
"""Publish HA discovery configs and one-shot cached repeater state."""
|
||||||
|
if not self._radio_key:
|
||||||
|
# Don't publish discovery until we know the radio identity —
|
||||||
|
# the first health heartbeat will provide it and trigger this.
|
||||||
|
return
|
||||||
|
|
||||||
|
configs: list[tuple[str, dict]] = []
|
||||||
|
cached_repeater_states: list[tuple[str, dict[str, Any]]] = []
|
||||||
|
|
||||||
|
radio_name = self._radio_name or "MeshCore Radio"
|
||||||
|
configs.extend(_radio_discovery_configs(self._prefix, self._radio_key, radio_name))
|
||||||
|
|
||||||
|
# Tracked repeaters — resolve names and LPP sensors from DB best-effort
|
||||||
|
for pub_key in self._tracked_repeaters:
|
||||||
|
rname = await self._resolve_contact_name(pub_key)
|
||||||
|
configs.extend(
|
||||||
|
_repeater_discovery_configs(self._prefix, pub_key, rname, self._radio_key)
|
||||||
|
)
|
||||||
|
latest = await self._resolve_latest_telemetry(pub_key)
|
||||||
|
latest_data = latest.get("data", {}) if latest else {}
|
||||||
|
# Dynamic LPP sensor entities from last known telemetry snapshot
|
||||||
|
lpp_sensors = latest_data.get("lpp_sensors", [])
|
||||||
|
if lpp_sensors:
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
device = _device_payload(pub_key, rname, "Repeater", via_device_key=self._radio_key)
|
||||||
|
state_topic = f"{self._prefix}/{nid}/telemetry"
|
||||||
|
configs.extend(
|
||||||
|
_lpp_discovery_configs(self._prefix, pub_key, device, lpp_sensors, state_topic)
|
||||||
|
)
|
||||||
|
if latest_data:
|
||||||
|
cached_repeater_states.append(
|
||||||
|
(
|
||||||
|
f"{self._prefix}/{_node_id(pub_key)}/telemetry",
|
||||||
|
_repeater_telemetry_payload(latest_data),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Tracked contacts — resolve names from DB best-effort
|
||||||
|
for pub_key in self._tracked_contacts:
|
||||||
|
cname = await self._resolve_contact_name(pub_key)
|
||||||
|
configs.append(
|
||||||
|
_contact_tracker_discovery_config(self._prefix, pub_key, cname, self._radio_key)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Message event entity (namespaced to this radio)
|
||||||
|
configs.append(_message_event_discovery_config(self._prefix, self._radio_key, radio_name))
|
||||||
|
|
||||||
|
self._discovery_topics = [topic for topic, _ in configs]
|
||||||
|
|
||||||
|
for topic, payload in configs:
|
||||||
|
await self._publisher.publish(topic, payload, retain=True)
|
||||||
|
|
||||||
|
for topic, payload in cached_repeater_states:
|
||||||
|
# Replay cached state after discovery so newly created HA entities
|
||||||
|
# populate immediately, but do not retain it or HA will treat a
|
||||||
|
# broker reconnect as fresh telemetry and reset expire_after.
|
||||||
|
await self._publisher.publish(topic, payload)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"HA MQTT: published %d discovery configs (%d repeaters, %d contacts, %d cached telemetry states)",
|
||||||
|
len(configs),
|
||||||
|
len(self._tracked_repeaters),
|
||||||
|
len(self._tracked_contacts),
|
||||||
|
len(cached_repeater_states),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _clear_retained_topics(self, topics: list[str]) -> None:
|
||||||
|
"""Publish empty retained payloads to remove entries from broker."""
|
||||||
|
for topic in topics:
|
||||||
|
try:
|
||||||
|
if self._publisher._client:
|
||||||
|
await self._publisher._client.publish(topic, b"", retain=True)
|
||||||
|
except Exception:
|
||||||
|
pass # best-effort cleanup
|
||||||
|
|
||||||
|
async def _remove_discovery(self) -> None:
|
||||||
|
"""Publish empty retained payloads to remove all HA entities."""
|
||||||
|
if not self._publisher.connected or not self._discovery_topics:
|
||||||
|
return
|
||||||
|
await self._clear_retained_topics(self._discovery_topics)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _resolve_contact_name(pub_key: str) -> str:
|
||||||
|
"""Look up a contact's display name, falling back to 12-char prefix."""
|
||||||
|
try:
|
||||||
|
from app.repository.contacts import ContactRepository
|
||||||
|
|
||||||
|
contact = await ContactRepository.get_by_key(pub_key)
|
||||||
|
if contact and contact.name:
|
||||||
|
return contact.name
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return pub_key[:12]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _resolve_latest_telemetry(pub_key: str) -> dict | None:
|
||||||
|
"""Return the most recent telemetry row for a repeater, or None."""
|
||||||
|
try:
|
||||||
|
from app.repository.repeater_telemetry import RepeaterTelemetryRepository
|
||||||
|
|
||||||
|
return await RepeaterTelemetryRepository.get_latest(pub_key)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _seed_radio_identity_from_runtime(self) -> None:
|
||||||
|
"""Best-effort bootstrap from the currently connected radio session."""
|
||||||
|
try:
|
||||||
|
from app.services.radio_runtime import radio_runtime
|
||||||
|
|
||||||
|
if not radio_runtime.is_connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
mc = radio_runtime.meshcore
|
||||||
|
self_info = mc.self_info if mc is not None else None
|
||||||
|
if not isinstance(self_info, dict):
|
||||||
|
return
|
||||||
|
|
||||||
|
pub_key = self_info.get("public_key")
|
||||||
|
if isinstance(pub_key, str) and pub_key.strip():
|
||||||
|
self._radio_key = pub_key.strip().lower()
|
||||||
|
|
||||||
|
name = self_info.get("name")
|
||||||
|
if isinstance(name, str) and name.strip():
|
||||||
|
self._radio_name = name.strip()
|
||||||
|
except Exception:
|
||||||
|
logger.debug("HA MQTT: failed to seed radio identity from runtime", exc_info=True)
|
||||||
|
|
||||||
|
# ── Event handlers ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async def on_health(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Cache radio identity for discovery config generation
|
||||||
|
pub_key = data.get("public_key")
|
||||||
|
if pub_key:
|
||||||
|
new_name = data.get("name")
|
||||||
|
key_changed = pub_key != self._radio_key
|
||||||
|
name_changed = new_name and new_name != self._radio_name
|
||||||
|
|
||||||
|
if key_changed:
|
||||||
|
old_key = self._radio_key
|
||||||
|
old_topics = list(self._discovery_topics)
|
||||||
|
if old_topics:
|
||||||
|
await self._clear_retained_topics(old_topics)
|
||||||
|
self._discovery_topics.clear()
|
||||||
|
self._radio_key = pub_key
|
||||||
|
self._radio_name = new_name
|
||||||
|
# Remove stale discovery entries from the old identity (e.g.
|
||||||
|
# "unknown" placeholder from before the radio key was known),
|
||||||
|
# then re-publish with the real identity.
|
||||||
|
if old_key is not None and not old_topics:
|
||||||
|
await self._clear_retained_topics(
|
||||||
|
[t for t, _ in _radio_discovery_configs(self._prefix, old_key, "")]
|
||||||
|
)
|
||||||
|
await self._publish_discovery()
|
||||||
|
elif name_changed:
|
||||||
|
self._radio_name = new_name
|
||||||
|
await self._publish_discovery()
|
||||||
|
|
||||||
|
# Don't publish health state until we know the radio identity —
|
||||||
|
# otherwise we create a stale "unknown" device in HA.
|
||||||
|
if not self._radio_key:
|
||||||
|
return
|
||||||
|
|
||||||
|
nid = _node_id(self._radio_key)
|
||||||
|
payload: dict[str, Any] = {"connected": data.get("connected", False)}
|
||||||
|
for sensor in _RADIO_SENSORS:
|
||||||
|
field = sensor["field"]
|
||||||
|
if field is not None:
|
||||||
|
payload[field] = data.get(field)
|
||||||
|
|
||||||
|
# Normalize battery from millivolts to volts for consistency with
|
||||||
|
# repeater battery and the discovery config (unit: V, precision: 2).
|
||||||
|
battery_mv = data.get("battery_mv")
|
||||||
|
if battery_mv is not None:
|
||||||
|
payload["battery_volts"] = battery_mv / 1000.0
|
||||||
|
|
||||||
|
await self._publisher.publish(f"{self._prefix}/{nid}/health", payload)
|
||||||
|
|
||||||
|
async def on_contact(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
pub_key = data.get("public_key", "")
|
||||||
|
if pub_key not in self._tracked_contacts:
|
||||||
|
return
|
||||||
|
|
||||||
|
lat = data.get("lat")
|
||||||
|
lon = data.get("lon")
|
||||||
|
if lat is None or lon is None or (lat == 0.0 and lon == 0.0):
|
||||||
|
return
|
||||||
|
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
await self._publisher.publish(
|
||||||
|
f"{self._prefix}/{nid}/gps",
|
||||||
|
{
|
||||||
|
"latitude": lat,
|
||||||
|
"longitude": lon,
|
||||||
|
"gps_accuracy": 0,
|
||||||
|
"source_type": "gps",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def on_telemetry(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
pub_key = data.get("public_key", "")
|
||||||
|
if pub_key not in self._tracked_repeaters:
|
||||||
|
return
|
||||||
|
|
||||||
|
nid = _node_id(pub_key)
|
||||||
|
# Publish the full telemetry dict — HA sensors use value_template
|
||||||
|
# to extract individual fields
|
||||||
|
payload = _repeater_telemetry_payload(data)
|
||||||
|
lpp_sensors: list[dict] = data.get("lpp_sensors", [])
|
||||||
|
rediscover = False
|
||||||
|
for sensor in lpp_sensors:
|
||||||
|
# Check if discovery for this sensor has been published yet
|
||||||
|
key = _lpp_sensor_key(sensor.get("type_name", "unknown"), sensor.get("channel", 0))
|
||||||
|
expected_topic = f"homeassistant/sensor/meshcore_{nid}/{key}/config"
|
||||||
|
if expected_topic not in self._discovery_topics:
|
||||||
|
rediscover = True
|
||||||
|
|
||||||
|
# If new LPP sensor types appeared, re-publish discovery *before*
|
||||||
|
# the state payload so HA already knows the entity when the value arrives.
|
||||||
|
if rediscover:
|
||||||
|
await self._publish_discovery()
|
||||||
|
|
||||||
|
await self._publisher.publish(f"{self._prefix}/{nid}/telemetry", payload)
|
||||||
|
|
||||||
|
async def on_message(self, data: dict) -> None:
|
||||||
|
if not self._publisher.connected or not self._radio_key:
|
||||||
|
return
|
||||||
|
|
||||||
|
text = get_fanout_message_text(data)
|
||||||
|
nid = _node_id(self._radio_key)
|
||||||
|
await self._publisher.publish(
|
||||||
|
f"{self._prefix}/{nid}/events/message",
|
||||||
|
{
|
||||||
|
"event_type": "message_received",
|
||||||
|
"sender_name": data.get("sender_name", ""),
|
||||||
|
"sender_key": data.get("sender_key", ""),
|
||||||
|
"text": text,
|
||||||
|
"conversation_key": data.get("conversation_key", ""),
|
||||||
|
"message_type": data.get("type", ""),
|
||||||
|
"channel_name": data.get("channel_name"),
|
||||||
|
"outgoing": data.get("outgoing", False),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# ── Status ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> str:
|
||||||
|
if not self.config.get("broker_host"):
|
||||||
|
return "disconnected"
|
||||||
|
if self._publisher.last_error:
|
||||||
|
return "error"
|
||||||
|
return "connected" if self._publisher.connected else "disconnected"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_error(self) -> str | None:
|
||||||
|
return self._publisher.last_error
|
||||||
@@ -135,7 +135,34 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
|||||||
"display_override": ["window-controls-overlay", "standalone", "fullscreen"],
|
"display_override": ["window-controls-overlay", "standalone", "fullscreen"],
|
||||||
"theme_color": "#111419",
|
"theme_color": "#111419",
|
||||||
"background_color": "#111419",
|
"background_color": "#111419",
|
||||||
|
# Icons are PNG-only on purpose. iOS Safari's manifest parser has
|
||||||
|
# historically been unreliable with SVG icons, and Android/Chrome
|
||||||
|
# PWA install flows prefer PNG for the install prompt.
|
||||||
|
#
|
||||||
|
# The "any" purpose entries are what iOS and desktop Chrome use
|
||||||
|
# for the home-screen / install icon. "maskable" entries are
|
||||||
|
# Android-only (adaptive icon with safe-zone crop); iOS does not
|
||||||
|
# apply the safe-zone mask, so a maskable-only icon set would
|
||||||
|
# render with excessive padding.
|
||||||
"icons": [
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": f"{base}favicon-96x96.png",
|
||||||
|
"sizes": "96x96",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}apple-touch-icon.png",
|
||||||
|
"sizes": "180x180",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}favicon-256x256.png",
|
||||||
|
"sizes": "256x256",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "any",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"src": f"{base}web-app-manifest-192x192.png",
|
"src": f"{base}web-app-manifest-192x192.png",
|
||||||
"sizes": "192x192",
|
"sizes": "192x192",
|
||||||
@@ -149,6 +176,27 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
|||||||
"purpose": "maskable",
|
"purpose": "maskable",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
"screenshots": [
|
||||||
|
{
|
||||||
|
"src": f"{base}screenshot-wide.png",
|
||||||
|
"sizes": "1367x909",
|
||||||
|
"type": "image/png",
|
||||||
|
"form_factor": "wide",
|
||||||
|
"label": "RemoteTerm desktop view",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}screenshot-mobile.png",
|
||||||
|
"sizes": "1170x2532",
|
||||||
|
"type": "image/png",
|
||||||
|
"label": "RemoteTerm mobile view",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": f"{base}screenshot-mobile-2.png",
|
||||||
|
"sizes": "750x1334",
|
||||||
|
"type": "image/png",
|
||||||
|
"label": "RemoteTerm mobile conversation",
|
||||||
|
},
|
||||||
|
],
|
||||||
}
|
}
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
manifest,
|
manifest,
|
||||||
|
|||||||
+10
@@ -67,6 +67,7 @@ from app.routers import (
|
|||||||
health,
|
health,
|
||||||
messages,
|
messages,
|
||||||
packets,
|
packets,
|
||||||
|
push,
|
||||||
radio,
|
radio,
|
||||||
read_state,
|
read_state,
|
||||||
repeaters,
|
repeaters,
|
||||||
@@ -102,6 +103,14 @@ async def lifespan(app: FastAPI):
|
|||||||
await db.connect()
|
await db.connect()
|
||||||
logger.info("Database connected")
|
logger.info("Database connected")
|
||||||
|
|
||||||
|
# Initialize VAPID keys for Web Push (generates on first run)
|
||||||
|
from app.push.vapid import ensure_vapid_keys
|
||||||
|
|
||||||
|
try:
|
||||||
|
await ensure_vapid_keys()
|
||||||
|
except Exception:
|
||||||
|
logger.warning("Failed to initialize VAPID keys for Web Push", exc_info=True)
|
||||||
|
|
||||||
# Ensure default channels exist in the database even before the radio
|
# Ensure default channels exist in the database even before the radio
|
||||||
# connects. Without this, a fresh or disconnected instance would return
|
# connects. Without this, a fresh or disconnected instance would return
|
||||||
# zero channels from GET /channels until the first successful radio sync.
|
# zero channels from GET /channels until the first successful radio sync.
|
||||||
@@ -185,6 +194,7 @@ app.include_router(packets.router, prefix="/api")
|
|||||||
app.include_router(read_state.router, prefix="/api")
|
app.include_router(read_state.router, prefix="/api")
|
||||||
app.include_router(settings.router, prefix="/api")
|
app.include_router(settings.router, prefix="/api")
|
||||||
app.include_router(statistics.router, prefix="/api")
|
app.include_router(statistics.router, prefix="/api")
|
||||||
|
app.include_router(push.router, prefix="/api")
|
||||||
app.include_router(ws.router, prefix="/api")
|
app.include_router(ws.router, prefix="/api")
|
||||||
|
|
||||||
# Serve frontend static files in production
|
# Serve frontend static files in production
|
||||||
|
|||||||
@@ -0,0 +1,22 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import aiosqlite
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def migrate(conn: aiosqlite.Connection) -> None:
|
||||||
|
"""Add telemetry_interval_hours integer column to app_settings."""
|
||||||
|
tables_cursor = await conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||||
|
if "app_settings" not in {row[0] for row in await tables_cursor.fetchall()}:
|
||||||
|
await conn.commit()
|
||||||
|
return
|
||||||
|
col_cursor = await conn.execute("PRAGMA table_info(app_settings)")
|
||||||
|
columns = {row[1] for row in await col_cursor.fetchall()}
|
||||||
|
if "telemetry_interval_hours" not in columns:
|
||||||
|
# Default to 8 hours, matching the previous hard-coded interval
|
||||||
|
# so existing users see no behavior change until they opt in.
|
||||||
|
await conn.execute(
|
||||||
|
"ALTER TABLE app_settings ADD COLUMN telemetry_interval_hours INTEGER DEFAULT 8"
|
||||||
|
)
|
||||||
|
await conn.commit()
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import aiosqlite
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def migrate(conn: aiosqlite.Connection) -> None:
|
||||||
|
"""Add Web Push support: VAPID keys, push subscriptions table, and global conversation list."""
|
||||||
|
|
||||||
|
# VAPID key pair + global push conversation list in app_settings
|
||||||
|
table_check = await conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='app_settings'"
|
||||||
|
)
|
||||||
|
if await table_check.fetchone():
|
||||||
|
cursor = await conn.execute("PRAGMA table_info(app_settings)")
|
||||||
|
columns = {row[1] for row in await cursor.fetchall()}
|
||||||
|
|
||||||
|
if "vapid_private_key" not in columns:
|
||||||
|
await conn.execute(
|
||||||
|
"ALTER TABLE app_settings ADD COLUMN vapid_private_key TEXT DEFAULT ''"
|
||||||
|
)
|
||||||
|
if "vapid_public_key" not in columns:
|
||||||
|
await conn.execute(
|
||||||
|
"ALTER TABLE app_settings ADD COLUMN vapid_public_key TEXT DEFAULT ''"
|
||||||
|
)
|
||||||
|
if "push_conversations" not in columns:
|
||||||
|
await conn.execute(
|
||||||
|
"ALTER TABLE app_settings ADD COLUMN push_conversations TEXT DEFAULT '[]'"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Push subscriptions — one row per browser/device
|
||||||
|
await conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
endpoint TEXT NOT NULL,
|
||||||
|
p256dh TEXT NOT NULL,
|
||||||
|
auth TEXT NOT NULL,
|
||||||
|
label TEXT NOT NULL DEFAULT '',
|
||||||
|
created_at INTEGER NOT NULL,
|
||||||
|
last_success_at INTEGER,
|
||||||
|
failure_count INTEGER DEFAULT 0,
|
||||||
|
UNIQUE(endpoint)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
await conn.commit()
|
||||||
@@ -842,6 +842,14 @@ class AppSettings(BaseModel):
|
|||||||
default_factory=list,
|
default_factory=list,
|
||||||
description="Public keys of repeaters opted into periodic telemetry collection (max 8)",
|
description="Public keys of repeaters opted into periodic telemetry collection (max 8)",
|
||||||
)
|
)
|
||||||
|
telemetry_interval_hours: int = Field(
|
||||||
|
default=8,
|
||||||
|
description=(
|
||||||
|
"User-preferred telemetry collection interval in hours. The backend "
|
||||||
|
"clamps this up to the shortest legal interval given the number of "
|
||||||
|
"tracked repeaters so daily checks stay under a 24/day ceiling."
|
||||||
|
),
|
||||||
|
)
|
||||||
auto_resend_channel: bool = Field(
|
auto_resend_channel: bool = Field(
|
||||||
default=False,
|
default=False,
|
||||||
description=(
|
description=(
|
||||||
|
|||||||
+13
-16
@@ -9,6 +9,7 @@ The path_len wire byte is packed as [hash_mode:2][hop_count:6]:
|
|||||||
Mode 3 (hash_size=4) is reserved and rejected.
|
Mode 3 (hash_size=4) is reserved and rejected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Iterable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
MAX_PATH_SIZE = 64
|
MAX_PATH_SIZE = 64
|
||||||
@@ -246,30 +247,26 @@ def parse_explicit_hop_route(route_text: str) -> tuple[str, int, int]:
|
|||||||
return "".join(hops), len(hops), hash_size - 1
|
return "".join(hops), len(hops), hash_size - 1
|
||||||
|
|
||||||
|
|
||||||
async def bucket_path_hash_widths(cursor, *, batch_size: int = 500) -> dict[str, int | float]:
|
def bucket_path_hash_widths(rows: Iterable) -> dict[str, int | float]:
|
||||||
"""Bucket raw packet rows by hop hash width and return counts + percentages.
|
"""Bucket raw packet rows by hop hash width and return counts + percentages.
|
||||||
|
|
||||||
*cursor* must be an already-executed async cursor whose rows have a ``data``
|
*rows* must be an already-fetched list whose elements have a ``data``
|
||||||
column containing raw packet bytes.
|
column containing raw packet bytes.
|
||||||
"""
|
"""
|
||||||
single_byte = 0
|
single_byte = 0
|
||||||
double_byte = 0
|
double_byte = 0
|
||||||
triple_byte = 0
|
triple_byte = 0
|
||||||
|
|
||||||
while True:
|
for row in rows:
|
||||||
rows = await cursor.fetchmany(batch_size)
|
envelope = parse_packet_envelope(bytes(row["data"]))
|
||||||
if not rows:
|
if envelope is None:
|
||||||
break
|
continue
|
||||||
for row in rows:
|
if envelope.hash_size == 1:
|
||||||
envelope = parse_packet_envelope(bytes(row["data"]))
|
single_byte += 1
|
||||||
if envelope is None:
|
elif envelope.hash_size == 2:
|
||||||
continue
|
double_byte += 1
|
||||||
if envelope.hash_size == 1:
|
elif envelope.hash_size == 3:
|
||||||
single_byte += 1
|
triple_byte += 1
|
||||||
elif envelope.hash_size == 2:
|
|
||||||
double_byte += 1
|
|
||||||
elif envelope.hash_size == 3:
|
|
||||||
triple_byte += 1
|
|
||||||
|
|
||||||
total = single_byte + double_byte + triple_byte
|
total = single_byte + double_byte + triple_byte
|
||||||
if total == 0:
|
if total == 0:
|
||||||
|
|||||||
@@ -0,0 +1,172 @@
|
|||||||
|
"""Web Push dispatch manager.
|
||||||
|
|
||||||
|
Checks the global push-enabled conversation list (stored in app_settings)
|
||||||
|
and sends push notifications to ALL registered devices when a matching
|
||||||
|
incoming message arrives.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from pywebpush import WebPushException
|
||||||
|
|
||||||
|
from app.push.send import send_push
|
||||||
|
from app.push.vapid import get_vapid_private_key
|
||||||
|
from app.repository.push_subscriptions import PushSubscriptionRepository
|
||||||
|
from app.repository.settings import AppSettingsRepository
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_SEND_TIMEOUT = 15 # seconds per push send
|
||||||
|
_VAPID_CLAIMS = {"sub": "mailto:noreply@meshcore.local"}
|
||||||
|
|
||||||
|
|
||||||
|
def _state_key_for_message(data: dict) -> str:
|
||||||
|
"""Derive the conversation state key from a message event payload."""
|
||||||
|
msg_type = data.get("type", "")
|
||||||
|
conversation_key = data.get("conversation_key", "")
|
||||||
|
if msg_type == "PRIV":
|
||||||
|
return f"contact-{conversation_key}"
|
||||||
|
return f"channel-{conversation_key}"
|
||||||
|
|
||||||
|
|
||||||
|
def _build_payload(data: dict) -> str:
|
||||||
|
"""Build the push notification JSON payload from a message event."""
|
||||||
|
msg_type = data.get("type", "")
|
||||||
|
text = data.get("text", "")
|
||||||
|
sender_name = data.get("sender_name") or ""
|
||||||
|
channel_name = data.get("channel_name") or ""
|
||||||
|
|
||||||
|
if msg_type == "PRIV":
|
||||||
|
title = f"Message from {sender_name}" if sender_name else "New direct message"
|
||||||
|
body = text
|
||||||
|
else:
|
||||||
|
title = channel_name if channel_name else "Channel message"
|
||||||
|
body = text
|
||||||
|
|
||||||
|
conversation_key = data.get("conversation_key", "")
|
||||||
|
state_key = _state_key_for_message(data)
|
||||||
|
if msg_type == "PRIV":
|
||||||
|
url_hash = f"#contact/{conversation_key}"
|
||||||
|
else:
|
||||||
|
url_hash = f"#channel/{conversation_key}"
|
||||||
|
|
||||||
|
return json.dumps(
|
||||||
|
{
|
||||||
|
"title": title,
|
||||||
|
"body": body,
|
||||||
|
# Tag per conversation so different conversations coexist in the
|
||||||
|
# notification tray, while repeated messages in the same
|
||||||
|
# conversation replace each other.
|
||||||
|
"tag": f"meshcore-{state_key}",
|
||||||
|
"url_hash": url_hash,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _subscription_info(sub: dict) -> dict:
|
||||||
|
"""Build the subscription_info dict that pywebpush expects."""
|
||||||
|
return {
|
||||||
|
"endpoint": sub["endpoint"],
|
||||||
|
"keys": {
|
||||||
|
"p256dh": sub["p256dh"],
|
||||||
|
"auth": sub["auth"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class _SendResult:
|
||||||
|
sub_id: str
|
||||||
|
success: bool = False
|
||||||
|
expired: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class PushManager:
|
||||||
|
async def dispatch_message(self, data: dict) -> None:
|
||||||
|
"""Send push notifications for a message event to all devices."""
|
||||||
|
# Don't notify for messages the operator just sent themselves
|
||||||
|
if data.get("outgoing"):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check the global conversation list
|
||||||
|
state_key = _state_key_for_message(data)
|
||||||
|
try:
|
||||||
|
push_conversations = await AppSettingsRepository.get_push_conversations()
|
||||||
|
except Exception:
|
||||||
|
logger.debug("Push dispatch: failed to load push_conversations", exc_info=True)
|
||||||
|
return
|
||||||
|
|
||||||
|
if state_key not in push_conversations:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
subs = await PushSubscriptionRepository.get_all()
|
||||||
|
except Exception:
|
||||||
|
logger.debug("Push dispatch: failed to load subscriptions", exc_info=True)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not subs:
|
||||||
|
return
|
||||||
|
|
||||||
|
payload = _build_payload(data)
|
||||||
|
vapid_key = get_vapid_private_key()
|
||||||
|
if not vapid_key:
|
||||||
|
logger.debug("Push dispatch: no VAPID key configured, skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
results = await asyncio.gather(
|
||||||
|
*(self._send_one(sub, payload, vapid_key) for sub in subs),
|
||||||
|
return_exceptions=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Batch-update all delivery outcomes in one transaction.
|
||||||
|
success_ids: list[str] = []
|
||||||
|
failure_ids: list[str] = []
|
||||||
|
remove_ids: list[str] = []
|
||||||
|
for r in results:
|
||||||
|
if isinstance(r, _SendResult):
|
||||||
|
if r.expired:
|
||||||
|
remove_ids.append(r.sub_id)
|
||||||
|
elif r.success:
|
||||||
|
success_ids.append(r.sub_id)
|
||||||
|
else:
|
||||||
|
failure_ids.append(r.sub_id)
|
||||||
|
if success_ids or failure_ids or remove_ids:
|
||||||
|
try:
|
||||||
|
await PushSubscriptionRepository.batch_record_outcomes(
|
||||||
|
success_ids, failure_ids, remove_ids
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.debug("Push dispatch: failed to record outcomes", exc_info=True)
|
||||||
|
|
||||||
|
async def _send_one(self, sub: dict, payload: str, vapid_key: str) -> _SendResult:
|
||||||
|
sub_id = sub["id"]
|
||||||
|
result = _SendResult(sub_id=sub_id)
|
||||||
|
try:
|
||||||
|
async with asyncio.timeout(_SEND_TIMEOUT):
|
||||||
|
await send_push(
|
||||||
|
subscription_info=_subscription_info(sub),
|
||||||
|
payload=payload,
|
||||||
|
vapid_private_key=vapid_key,
|
||||||
|
vapid_claims=_VAPID_CLAIMS,
|
||||||
|
)
|
||||||
|
result.success = True
|
||||||
|
except WebPushException as e:
|
||||||
|
status = getattr(e, "response", None)
|
||||||
|
status_code = getattr(status, "status_code", 0) if status else 0
|
||||||
|
if status_code in (403, 404, 410):
|
||||||
|
logger.info("Push subscription expired (HTTP %d), removing %s", status_code, sub_id)
|
||||||
|
result.expired = True
|
||||||
|
else:
|
||||||
|
logger.warning("Push send failed for %s: %s", sub_id, e)
|
||||||
|
except TimeoutError:
|
||||||
|
logger.warning("Push send timed out for %s", sub_id)
|
||||||
|
except Exception:
|
||||||
|
logger.debug("Push send error for %s", sub_id, exc_info=True)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
push_manager = PushManager()
|
||||||
@@ -0,0 +1,231 @@
|
|||||||
|
"""Thin wrapper around pywebpush for sending push notifications.
|
||||||
|
|
||||||
|
Isolates the pywebpush dependency and runs the synchronous send in
|
||||||
|
a thread executor to avoid blocking the event loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import socket
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import urllib3.connection
|
||||||
|
import urllib3.connectionpool
|
||||||
|
from pywebpush import webpush
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
from requests.exceptions import ConnectionError as RequestsConnectionError
|
||||||
|
from requests.exceptions import ConnectTimeout as RequestsConnectTimeout
|
||||||
|
from urllib3.exceptions import ConnectTimeoutError, NameResolutionError, NewConnectionError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DEFAULT_TIMEOUT = object()
|
||||||
|
DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS = 3
|
||||||
|
IPV4_FALLBACK_CONNECT_TIMEOUT_SECONDS = 10
|
||||||
|
DEFAULT_PUSH_READ_TIMEOUT_SECONDS = 10
|
||||||
|
|
||||||
|
|
||||||
|
def _create_ipv4_connection(
|
||||||
|
address: tuple[str, int],
|
||||||
|
timeout: float | None | object = DEFAULT_TIMEOUT,
|
||||||
|
source_address: tuple[str, int] | None = None,
|
||||||
|
socket_options=None,
|
||||||
|
) -> socket.socket:
|
||||||
|
"""Create a socket connection using IPv4 only."""
|
||||||
|
host, port = address
|
||||||
|
if host.startswith("["):
|
||||||
|
host = host.strip("[]")
|
||||||
|
|
||||||
|
err: OSError | None = None
|
||||||
|
for res in socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM):
|
||||||
|
af, socktype, proto, _, sa = res
|
||||||
|
sock = None
|
||||||
|
try:
|
||||||
|
sock = socket.socket(af, socktype, proto)
|
||||||
|
if socket_options:
|
||||||
|
for opt in socket_options:
|
||||||
|
sock.setsockopt(*opt)
|
||||||
|
if timeout is not DEFAULT_TIMEOUT:
|
||||||
|
sock.settimeout(cast(float | None, timeout))
|
||||||
|
if source_address:
|
||||||
|
sock.bind(source_address)
|
||||||
|
sock.connect(sa)
|
||||||
|
return sock
|
||||||
|
except OSError as exc:
|
||||||
|
err = exc
|
||||||
|
if sock is not None:
|
||||||
|
sock.close()
|
||||||
|
|
||||||
|
if err is not None:
|
||||||
|
raise err
|
||||||
|
raise OSError("getaddrinfo returns an empty list")
|
||||||
|
|
||||||
|
|
||||||
|
class IPv4HTTPConnection(urllib3.connection.HTTPConnection):
|
||||||
|
"""urllib3 HTTP connection that resolves and connects via IPv4 only."""
|
||||||
|
|
||||||
|
def _new_conn(self) -> socket.socket:
|
||||||
|
try:
|
||||||
|
return _create_ipv4_connection(
|
||||||
|
(self._dns_host, self.port),
|
||||||
|
self.timeout,
|
||||||
|
source_address=self.source_address,
|
||||||
|
socket_options=self.socket_options,
|
||||||
|
)
|
||||||
|
except socket.gaierror as exc:
|
||||||
|
raise NameResolutionError(self.host, self, exc) from exc
|
||||||
|
except TimeoutError as exc:
|
||||||
|
raise ConnectTimeoutError(
|
||||||
|
self,
|
||||||
|
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
|
||||||
|
) from exc
|
||||||
|
except OSError as exc:
|
||||||
|
raise NewConnectionError(self, f"Failed to establish a new connection: {exc}") from exc
|
||||||
|
|
||||||
|
|
||||||
|
class IPv4HTTPSConnection(urllib3.connection.HTTPSConnection):
|
||||||
|
"""urllib3 HTTPS connection that resolves and connects via IPv4 only."""
|
||||||
|
|
||||||
|
def _new_conn(self) -> socket.socket:
|
||||||
|
try:
|
||||||
|
return _create_ipv4_connection(
|
||||||
|
(self._dns_host, self.port),
|
||||||
|
self.timeout,
|
||||||
|
source_address=self.source_address,
|
||||||
|
socket_options=self.socket_options,
|
||||||
|
)
|
||||||
|
except socket.gaierror as exc:
|
||||||
|
raise NameResolutionError(self.host, self, exc) from exc
|
||||||
|
except TimeoutError as exc:
|
||||||
|
raise ConnectTimeoutError(
|
||||||
|
self,
|
||||||
|
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
|
||||||
|
) from exc
|
||||||
|
except OSError as exc:
|
||||||
|
raise NewConnectionError(self, f"Failed to establish a new connection: {exc}") from exc
|
||||||
|
|
||||||
|
|
||||||
|
class IPv4HTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
|
ConnectionCls = cast(Any, IPv4HTTPConnection)
|
||||||
|
|
||||||
|
|
||||||
|
class IPv4HTTPSConnectionPool(urllib3.connectionpool.HTTPSConnectionPool):
|
||||||
|
ConnectionCls = cast(Any, IPv4HTTPSConnection)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_pool_manager_for_ipv4(manager: Any) -> None:
|
||||||
|
manager.pool_classes_by_scheme = manager.pool_classes_by_scheme.copy()
|
||||||
|
manager.pool_classes_by_scheme["http"] = IPv4HTTPConnectionPool
|
||||||
|
manager.pool_classes_by_scheme["https"] = IPv4HTTPSConnectionPool
|
||||||
|
|
||||||
|
|
||||||
|
class IPv4HTTPAdapter(HTTPAdapter):
|
||||||
|
"""requests adapter that uses IPv4-only urllib3 connection pools."""
|
||||||
|
|
||||||
|
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
|
||||||
|
super().init_poolmanager(connections, maxsize, block=block, **pool_kwargs)
|
||||||
|
_configure_pool_manager_for_ipv4(self.poolmanager)
|
||||||
|
|
||||||
|
def proxy_manager_for(self, *args, **kwargs):
|
||||||
|
manager = super().proxy_manager_for(*args, **kwargs)
|
||||||
|
_configure_pool_manager_for_ipv4(manager)
|
||||||
|
return manager
|
||||||
|
|
||||||
|
|
||||||
|
def _build_default_requests_session() -> requests.Session:
|
||||||
|
return requests.Session()
|
||||||
|
|
||||||
|
|
||||||
|
def _build_ipv4_requests_session() -> requests.Session:
|
||||||
|
session = requests.Session()
|
||||||
|
adapter = IPv4HTTPAdapter()
|
||||||
|
session.mount("http://", adapter)
|
||||||
|
session.mount("https://", adapter)
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
def _send_push_with_session(
|
||||||
|
*,
|
||||||
|
subscription_info: dict,
|
||||||
|
payload: str,
|
||||||
|
vapid_private_key: str,
|
||||||
|
vapid_claims: dict,
|
||||||
|
session: requests.Session,
|
||||||
|
connect_timeout_seconds: int,
|
||||||
|
) -> int:
|
||||||
|
response = webpush(
|
||||||
|
subscription_info=subscription_info,
|
||||||
|
data=payload,
|
||||||
|
vapid_private_key=vapid_private_key,
|
||||||
|
vapid_claims=vapid_claims,
|
||||||
|
content_encoding="aes128gcm",
|
||||||
|
timeout=cast(Any, (connect_timeout_seconds, DEFAULT_PUSH_READ_TIMEOUT_SECONDS)),
|
||||||
|
requests_session=session,
|
||||||
|
)
|
||||||
|
return response.status_code # type: ignore[union-attr]
|
||||||
|
|
||||||
|
|
||||||
|
def _send_push_with_fallback(
|
||||||
|
subscription_info: dict,
|
||||||
|
payload: str,
|
||||||
|
vapid_private_key: str,
|
||||||
|
vapid_claims: dict,
|
||||||
|
) -> int:
|
||||||
|
"""Send using normal dual-stack resolution, then retry with IPv4-only on connect failures."""
|
||||||
|
session = _build_default_requests_session()
|
||||||
|
try:
|
||||||
|
return _send_push_with_session(
|
||||||
|
subscription_info=subscription_info,
|
||||||
|
payload=payload,
|
||||||
|
vapid_private_key=vapid_private_key,
|
||||||
|
vapid_claims=vapid_claims,
|
||||||
|
session=session,
|
||||||
|
connect_timeout_seconds=DEFAULT_PUSH_CONNECT_TIMEOUT_SECONDS,
|
||||||
|
)
|
||||||
|
except (RequestsConnectTimeout, RequestsConnectionError) as exc:
|
||||||
|
logger.info("Push delivery retrying via IPv4 after initial network failure: %s", exc)
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
session = _build_ipv4_requests_session()
|
||||||
|
try:
|
||||||
|
return _send_push_with_session(
|
||||||
|
subscription_info=subscription_info,
|
||||||
|
payload=payload,
|
||||||
|
vapid_private_key=vapid_private_key,
|
||||||
|
vapid_claims=vapid_claims,
|
||||||
|
session=session,
|
||||||
|
connect_timeout_seconds=IPV4_FALLBACK_CONNECT_TIMEOUT_SECONDS,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def send_push(
|
||||||
|
subscription_info: dict,
|
||||||
|
payload: str,
|
||||||
|
vapid_private_key: str,
|
||||||
|
vapid_claims: dict,
|
||||||
|
) -> int:
|
||||||
|
"""Send an encrypted push notification.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subscription_info: {"endpoint": ..., "keys": {"p256dh": ..., "auth": ...}}
|
||||||
|
payload: JSON string to encrypt and send
|
||||||
|
vapid_private_key: base64url-encoded raw EC private key scalar
|
||||||
|
vapid_claims: {"sub": "mailto:..."} or {"sub": "https://..."}
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
HTTP status code from the push service.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
WebPushException: on push service error (caller handles 404/410 cleanup).
|
||||||
|
"""
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
return await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
lambda: _send_push_with_fallback(
|
||||||
|
subscription_info, payload, vapid_private_key, vapid_claims
|
||||||
|
),
|
||||||
|
)
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
"""VAPID key management for Web Push.
|
||||||
|
|
||||||
|
Generates a P-256 key pair on first use and caches it in app_settings
|
||||||
|
via ``AppSettingsRepository``. The public key is served to browsers
|
||||||
|
for ``PushManager.subscribe()``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
|
||||||
|
from py_vapid import Vapid
|
||||||
|
|
||||||
|
from app.repository.settings import AppSettingsRepository
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_cached_private_key: str = ""
|
||||||
|
_cached_public_key: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
async def ensure_vapid_keys() -> tuple[str, str]:
|
||||||
|
"""Read or generate VAPID keys. Call once at startup after DB connect."""
|
||||||
|
global _cached_private_key, _cached_public_key
|
||||||
|
|
||||||
|
private, public = await AppSettingsRepository.get_vapid_keys()
|
||||||
|
if private and public:
|
||||||
|
_cached_private_key = private
|
||||||
|
_cached_public_key = public
|
||||||
|
logger.info("VAPID keys loaded from database")
|
||||||
|
return _cached_private_key, _cached_public_key
|
||||||
|
|
||||||
|
# Generate new key pair
|
||||||
|
vapid = Vapid()
|
||||||
|
vapid.generate_keys()
|
||||||
|
|
||||||
|
# Private key as base64url-encoded raw 32-byte EC scalar — the format
|
||||||
|
# that pywebpush passes to ``Vapid.from_string()``.
|
||||||
|
raw_priv = vapid.private_key.private_numbers().private_value.to_bytes(32, "big") # type: ignore[union-attr]
|
||||||
|
_cached_private_key = base64.urlsafe_b64encode(raw_priv).rstrip(b"=").decode("ascii")
|
||||||
|
|
||||||
|
# Public key as uncompressed P-256 point, base64url-encoded (no padding)
|
||||||
|
# for the browser Push API's applicationServerKey
|
||||||
|
raw_pub = vapid.public_key.public_bytes(Encoding.X962, PublicFormat.UncompressedPoint) # type: ignore[union-attr]
|
||||||
|
_cached_public_key = base64.urlsafe_b64encode(raw_pub).rstrip(b"=").decode("ascii")
|
||||||
|
|
||||||
|
await AppSettingsRepository.set_vapid_keys(_cached_private_key, _cached_public_key)
|
||||||
|
logger.info("Generated and stored new VAPID key pair")
|
||||||
|
|
||||||
|
return _cached_private_key, _cached_public_key
|
||||||
|
|
||||||
|
|
||||||
|
def get_vapid_public_key() -> str:
|
||||||
|
"""Return the cached VAPID public key (base64url). Must call ensure_vapid_keys() first."""
|
||||||
|
return _cached_public_key
|
||||||
|
|
||||||
|
|
||||||
|
def get_vapid_private_key() -> str:
|
||||||
|
"""Return the cached VAPID private key (base64url). Must call ensure_vapid_keys() first."""
|
||||||
|
return _cached_private_key
|
||||||
+171
-72
@@ -14,6 +14,7 @@ import logging
|
|||||||
import math
|
import math
|
||||||
import time
|
import time
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
from datetime import UTC, datetime, timedelta
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
from meshcore import EventType, MeshCore
|
from meshcore import EventType, MeshCore
|
||||||
@@ -36,6 +37,7 @@ from app.services.contact_reconciliation import (
|
|||||||
)
|
)
|
||||||
from app.services.messages import create_fallback_channel_message
|
from app.services.messages import create_fallback_channel_message
|
||||||
from app.services.radio_runtime import radio_runtime as radio_manager
|
from app.services.radio_runtime import radio_runtime as radio_manager
|
||||||
|
from app.telemetry_interval import clamp_telemetry_interval
|
||||||
from app.websocket import broadcast_error, broadcast_event
|
from app.websocket import broadcast_error, broadcast_event
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -159,10 +161,10 @@ MIN_ADVERT_INTERVAL = 3600
|
|||||||
# Periodic telemetry collection task handle
|
# Periodic telemetry collection task handle
|
||||||
_telemetry_collect_task: asyncio.Task | None = None
|
_telemetry_collect_task: asyncio.Task | None = None
|
||||||
|
|
||||||
# Telemetry collection interval (8 hours)
|
# Initial delay before the scheduler starts (let radio settle). After this,
|
||||||
TELEMETRY_COLLECT_INTERVAL = 8 * 3600
|
# the loop wakes at each UTC top-of-hour and decides whether to run a cycle
|
||||||
|
# based on the user's telemetry_interval_hours preference, clamped up to
|
||||||
# Initial delay before the first telemetry collection cycle (let radio settle)
|
# the shortest-legal interval for the current tracked-repeater count.
|
||||||
TELEMETRY_COLLECT_INITIAL_DELAY = 60
|
TELEMETRY_COLLECT_INITIAL_DELAY = 60
|
||||||
|
|
||||||
# Counter to pause polling during repeater operations (supports nested pauses)
|
# Counter to pause polling during repeater operations (supports nested pauses)
|
||||||
@@ -459,9 +461,8 @@ async def drain_pending_messages(mc: MeshCore) -> int:
|
|||||||
Returns the count of messages retrieved.
|
Returns the count of messages retrieved.
|
||||||
"""
|
"""
|
||||||
count = 0
|
count = 0
|
||||||
max_iterations = 100 # Safety limit
|
|
||||||
|
|
||||||
for _ in range(max_iterations):
|
while True:
|
||||||
try:
|
try:
|
||||||
result = await mc.commands.get_msg(timeout=2.0)
|
result = await mc.commands.get_msg(timeout=2.0)
|
||||||
|
|
||||||
@@ -853,7 +854,7 @@ async def _attempt_clock_wraparound(mc: MeshCore, *, now: int, observed_radio_ti
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
async def sync_radio_time(mc: MeshCore) -> bool:
|
async def sync_radio_time(mc: MeshCore, *, warn_on_failure: bool = True) -> bool:
|
||||||
"""Sync the radio's clock with the system time.
|
"""Sync the radio's clock with the system time.
|
||||||
|
|
||||||
The firmware only accepts forward time adjustments (new >= current).
|
The firmware only accepts forward time adjustments (new >= current).
|
||||||
@@ -868,9 +869,15 @@ async def sync_radio_time(mc: MeshCore) -> bool:
|
|||||||
only once; if it doesn't help (hardware RTC persists the wrong time),
|
only once; if it doesn't help (hardware RTC persists the wrong time),
|
||||||
the skew is logged as a warning on subsequent syncs.
|
the skew is logged as a warning on subsequent syncs.
|
||||||
|
|
||||||
|
``warn_on_failure`` controls log severity for rejected/failed sync attempts.
|
||||||
|
Startup and reconnect setup should leave this enabled so operators see the
|
||||||
|
initial skew problem. Periodic maintenance syncs pass ``False`` to avoid
|
||||||
|
repeating the same warning every few minutes after startup.
|
||||||
|
|
||||||
Returns True if the radio accepted the new time, False otherwise.
|
Returns True if the radio accepted the new time, False otherwise.
|
||||||
"""
|
"""
|
||||||
global _clock_reboot_attempted # noqa: PLW0603
|
global _clock_reboot_attempted # noqa: PLW0603
|
||||||
|
log_failure = logger.warning if warn_on_failure else logger.debug
|
||||||
try:
|
try:
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
preflight_radio_time: int | None = None
|
preflight_radio_time: int | None = None
|
||||||
@@ -899,7 +906,7 @@ async def sync_radio_time(mc: MeshCore) -> bool:
|
|||||||
|
|
||||||
if radio_time is not None:
|
if radio_time is not None:
|
||||||
delta = radio_time - now
|
delta = radio_time - now
|
||||||
logger.warning(
|
log_failure(
|
||||||
"Radio rejected time sync: radio clock is %+d seconds "
|
"Radio rejected time sync: radio clock is %+d seconds "
|
||||||
"(%+.1f hours) from system time (radio=%d, system=%d).",
|
"(%+.1f hours) from system time (radio=%d, system=%d).",
|
||||||
delta,
|
delta,
|
||||||
@@ -909,7 +916,7 @@ async def sync_radio_time(mc: MeshCore) -> bool:
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
delta = None
|
delta = None
|
||||||
logger.warning(
|
log_failure(
|
||||||
"Radio rejected time sync (set_time returned %s) "
|
"Radio rejected time sync (set_time returned %s) "
|
||||||
"and get_time query failed; cannot determine clock skew.",
|
"and get_time query failed; cannot determine clock skew.",
|
||||||
result.type,
|
result.type,
|
||||||
@@ -934,14 +941,14 @@ async def sync_radio_time(mc: MeshCore) -> bool:
|
|||||||
# reboot, allowing the next post-connect sync to succeed.
|
# reboot, allowing the next post-connect sync to succeed.
|
||||||
if not _clock_reboot_attempted and (delta is None or delta > 30):
|
if not _clock_reboot_attempted and (delta is None or delta > 30):
|
||||||
_clock_reboot_attempted = True
|
_clock_reboot_attempted = True
|
||||||
logger.warning(
|
log_failure(
|
||||||
"Rebooting radio to reset clock skew. Boards with a "
|
"Rebooting radio to reset clock skew. Boards with a "
|
||||||
"volatile RTC will accept the correct time after restart."
|
"volatile RTC will accept the correct time after restart."
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await mc.commands.reboot()
|
await mc.commands.reboot()
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.warning("Reboot command failed", exc_info=True)
|
log_failure("Reboot command failed", exc_info=True)
|
||||||
elif _clock_reboot_attempted:
|
elif _clock_reboot_attempted:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Clock skew persists after reboot (hardware RTC); ignoring until next session."
|
"Clock skew persists after reboot (hardware RTC); ignoring until next session."
|
||||||
@@ -949,7 +956,7 @@ async def sync_radio_time(mc: MeshCore) -> bool:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Failed to sync radio time: %s", e, exc_info=True)
|
log_failure("Failed to sync radio time: %s", e, exc_info=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@@ -969,7 +976,7 @@ async def _periodic_sync_loop():
|
|||||||
) as mc:
|
) as mc:
|
||||||
if await should_run_full_periodic_sync(mc):
|
if await should_run_full_periodic_sync(mc):
|
||||||
await sync_and_offload_all(mc)
|
await sync_and_offload_all(mc)
|
||||||
await sync_radio_time(mc)
|
await sync_radio_time(mc, warn_on_failure=False)
|
||||||
except RadioOperationBusyError:
|
except RadioOperationBusyError:
|
||||||
logger.debug("Skipping periodic sync: radio busy")
|
logger.debug("Skipping periodic sync: radio busy")
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
@@ -1295,7 +1302,13 @@ async def stop_background_contact_reconciliation() -> None:
|
|||||||
|
|
||||||
|
|
||||||
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
||||||
"""Return the contacts that would be loaded onto the radio right now."""
|
"""Return the contacts that would be loaded onto the radio right now.
|
||||||
|
|
||||||
|
Fill order:
|
||||||
|
1. Favorites (up to full capacity)
|
||||||
|
2. Most recently DM-active non-repeaters (sent or received, up to 80% refill target)
|
||||||
|
3. Most recently advertised non-repeaters (up to 80% refill target)
|
||||||
|
"""
|
||||||
app_settings = await AppSettingsRepository.get()
|
app_settings = await AppSettingsRepository.get()
|
||||||
max_contacts = _effective_radio_capacity(app_settings.max_radio_contacts)
|
max_contacts = _effective_radio_capacity(app_settings.max_radio_contacts)
|
||||||
refill_target, _full_sync_trigger = _compute_radio_contact_limits(max_contacts)
|
refill_target, _full_sync_trigger = _compute_radio_contact_limits(max_contacts)
|
||||||
@@ -1315,7 +1328,7 @@ async def get_contacts_selected_for_radio_sync() -> list[Contact]:
|
|||||||
break
|
break
|
||||||
|
|
||||||
if len(selected_contacts) < refill_target:
|
if len(selected_contacts) < refill_target:
|
||||||
for contact in await ContactRepository.get_recently_contacted_non_repeaters(
|
for contact in await ContactRepository.get_recently_dm_active_non_repeaters(
|
||||||
limit=max_contacts
|
limit=max_contacts
|
||||||
):
|
):
|
||||||
key = contact.public_key.lower()
|
key = contact.public_key.lower()
|
||||||
@@ -1354,8 +1367,8 @@ async def _sync_contacts_to_radio_inner(mc: MeshCore) -> dict:
|
|||||||
|
|
||||||
Fill order is:
|
Fill order is:
|
||||||
1. Favorite contacts
|
1. Favorite contacts
|
||||||
2. Most recently interacted-with non-repeaters
|
2. Most recently DM-active non-repeaters (sent or received)
|
||||||
3. Most recently advert-heard non-repeaters without interaction history
|
3. Most recently advert-heard non-repeaters
|
||||||
|
|
||||||
Favorite contacts are always reloaded first, up to the configured capacity.
|
Favorite contacts are always reloaded first, up to the configured capacity.
|
||||||
Additional non-favorite fill stops at the refill target (80% of capacity).
|
Additional non-favorite fill stops at the refill target (80% of capacity).
|
||||||
@@ -1489,8 +1502,8 @@ async def sync_recent_contacts_to_radio(force: bool = False, mc: MeshCore | None
|
|||||||
"""
|
"""
|
||||||
Load contacts to the radio for DM ACK support.
|
Load contacts to the radio for DM ACK support.
|
||||||
|
|
||||||
Fill order is favorites, then recently contacted non-repeaters,
|
Fill order is favorites, then recently DM-active non-repeaters (sent or
|
||||||
then recently advert-heard non-repeaters. Favorites are always reloaded
|
received), then recently advert-heard non-repeaters. Favorites are always reloaded
|
||||||
up to the configured capacity; additional non-favorite fill stops at the
|
up to the configured capacity; additional non-favorite fill stops at the
|
||||||
80% refill target.
|
80% refill target.
|
||||||
Only runs at most once every CONTACT_SYNC_THROTTLE_SECONDS unless forced.
|
Only runs at most once every CONTACT_SYNC_THROTTLE_SECONDS unless forced.
|
||||||
@@ -1584,6 +1597,35 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
|||||||
"full_events": status.get("full_evts", 0),
|
"full_events": status.get("full_evts", 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Best-effort LPP sensor fetch — failure here does not fail the overall
|
||||||
|
# collection; status telemetry is still recorded without sensor data.
|
||||||
|
try:
|
||||||
|
lpp_raw = await mc.commands.req_telemetry_sync(
|
||||||
|
contact.public_key, timeout=10, min_timeout=5
|
||||||
|
)
|
||||||
|
if lpp_raw:
|
||||||
|
lpp_sensors = []
|
||||||
|
for entry in lpp_raw:
|
||||||
|
value = entry.get("value", 0)
|
||||||
|
# Skip multi-value sensors (GPS, accelerometer, etc.)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
continue
|
||||||
|
lpp_sensors.append(
|
||||||
|
{
|
||||||
|
"channel": entry.get("channel", 0),
|
||||||
|
"type_name": str(entry.get("type", "unknown")),
|
||||||
|
"value": value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if lpp_sensors:
|
||||||
|
data["lpp_sensors"] = lpp_sensors
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(
|
||||||
|
"Telemetry collect: LPP sensor fetch failed for %s (non-fatal): %s",
|
||||||
|
contact.public_key[:12],
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
await RepeaterTelemetryRepository.record(
|
await RepeaterTelemetryRepository.record(
|
||||||
@@ -1621,62 +1663,122 @@ async def _collect_repeater_telemetry(mc: MeshCore, contact: Contact) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def _run_telemetry_cycle() -> None:
|
||||||
|
"""Collect one telemetry sample from every tracked repeater."""
|
||||||
|
if not radio_manager.is_connected:
|
||||||
|
logger.debug("Telemetry collect: radio not connected, skipping cycle")
|
||||||
|
return
|
||||||
|
|
||||||
|
app_settings = await AppSettingsRepository.get()
|
||||||
|
tracked = app_settings.tracked_telemetry_repeaters
|
||||||
|
if not tracked:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Telemetry collect: starting cycle for %d repeater(s)", len(tracked))
|
||||||
|
collected = 0
|
||||||
|
|
||||||
|
for pub_key in tracked:
|
||||||
|
contact = await ContactRepository.get_by_key(pub_key)
|
||||||
|
if not contact or contact.type != 2:
|
||||||
|
logger.debug(
|
||||||
|
"Telemetry collect: skipping %s (not found or not repeater)",
|
||||||
|
pub_key[:12],
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with radio_manager.radio_operation(
|
||||||
|
"telemetry_collect",
|
||||||
|
blocking=False,
|
||||||
|
suspend_auto_fetch=True,
|
||||||
|
) as mc:
|
||||||
|
if await _collect_repeater_telemetry(mc, contact):
|
||||||
|
collected += 1
|
||||||
|
except RadioOperationBusyError:
|
||||||
|
logger.debug(
|
||||||
|
"Telemetry collect: radio busy, skipping %s",
|
||||||
|
pub_key[:12],
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Telemetry collect: cycle complete, %d/%d successful",
|
||||||
|
collected,
|
||||||
|
len(tracked),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _sleep_until_next_utc_top_of_hour() -> None:
|
||||||
|
"""Sleep until the next UTC top-of-hour (or a minimum of 1 second)."""
|
||||||
|
now = datetime.now(UTC)
|
||||||
|
next_top = now.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
|
||||||
|
delay = (next_top - now).total_seconds()
|
||||||
|
if delay < 1:
|
||||||
|
delay = 1
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
|
||||||
|
|
||||||
|
async def _maybe_run_scheduled_cycle(now: datetime) -> None:
|
||||||
|
"""Evaluate the modulo gate for the given UTC time and run a cycle if due.
|
||||||
|
|
||||||
|
Factored out of the loop so we can also invoke it immediately after the
|
||||||
|
post-boot initial delay — otherwise a restart within the initial-delay
|
||||||
|
window before a scheduled boundary would carry the task past that boundary
|
||||||
|
and skip a due cycle (for 24h cadence users, that's a full day of missed
|
||||||
|
telemetry).
|
||||||
|
"""
|
||||||
|
app_settings = await AppSettingsRepository.get()
|
||||||
|
tracked_count = len(app_settings.tracked_telemetry_repeaters)
|
||||||
|
if tracked_count == 0:
|
||||||
|
return
|
||||||
|
effective_hours = clamp_telemetry_interval(app_settings.telemetry_interval_hours, tracked_count)
|
||||||
|
if effective_hours <= 0:
|
||||||
|
return
|
||||||
|
if now.hour % effective_hours != 0:
|
||||||
|
return
|
||||||
|
await _run_telemetry_cycle()
|
||||||
|
|
||||||
|
|
||||||
async def _telemetry_collect_loop() -> None:
|
async def _telemetry_collect_loop() -> None:
|
||||||
"""Background task that collects telemetry from tracked repeaters every 8 hours.
|
"""Background task that runs tracked-repeater telemetry collection.
|
||||||
|
|
||||||
Runs a first cycle after a short initial delay (so newly tracked repeaters
|
After an initial post-boot delay we evaluate the modulo gate once
|
||||||
get a sample promptly), then sleeps the full interval between subsequent cycles.
|
(covers the edge case where the initial delay crossed a scheduled
|
||||||
|
boundary on restart). Then we wake at every UTC top-of-hour and
|
||||||
|
evaluate the gate again. A cycle runs only when
|
||||||
|
``current_utc_hour % effective_interval_hours == 0``, where the
|
||||||
|
effective interval is the user preference clamped up to the shortest
|
||||||
|
legal interval for the current tracked-repeater count. This keeps the
|
||||||
|
total daily check count bounded at ``DAILY_CHECK_CEILING`` (24).
|
||||||
|
|
||||||
Acquires the radio lock per-repeater (non-blocking) so manual operations can
|
The loop never updates the stored user preference. If the user picks a
|
||||||
|
short interval and then adds repeaters that make it illegal, they keep
|
||||||
|
their pick stored and we silently use the clamped value until they drop
|
||||||
|
repeaters.
|
||||||
|
|
||||||
|
Radio lock is acquired per-repeater (non-blocking) so manual ops can
|
||||||
interleave. Failures are logged and skipped.
|
interleave. Failures are logged and skipped.
|
||||||
"""
|
"""
|
||||||
first_run = True
|
try:
|
||||||
|
await asyncio.sleep(TELEMETRY_COLLECT_INITIAL_DELAY)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.info("Telemetry collect task cancelled before initial delay")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Post-boot boundary check: if the delay carried us into a matching hour
|
||||||
|
# (or we booted exactly at a matching hour), run now rather than waiting
|
||||||
|
# another full cycle.
|
||||||
|
try:
|
||||||
|
await _maybe_run_scheduled_cycle(datetime.now(UTC))
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.info("Telemetry collect task cancelled after initial delay")
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Error in post-boot telemetry check: %s", e, exc_info=True)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
delay = TELEMETRY_COLLECT_INITIAL_DELAY if first_run else TELEMETRY_COLLECT_INTERVAL
|
await _sleep_until_next_utc_top_of_hour()
|
||||||
await asyncio.sleep(delay)
|
await _maybe_run_scheduled_cycle(datetime.now(UTC))
|
||||||
first_run = False
|
|
||||||
|
|
||||||
if not radio_manager.is_connected:
|
|
||||||
logger.debug("Telemetry collect: radio not connected, skipping cycle")
|
|
||||||
continue
|
|
||||||
|
|
||||||
app_settings = await AppSettingsRepository.get()
|
|
||||||
tracked = app_settings.tracked_telemetry_repeaters
|
|
||||||
if not tracked:
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger.info("Telemetry collect: starting cycle for %d repeater(s)", len(tracked))
|
|
||||||
collected = 0
|
|
||||||
|
|
||||||
for pub_key in tracked:
|
|
||||||
contact = await ContactRepository.get_by_key(pub_key)
|
|
||||||
if not contact or contact.type != 2:
|
|
||||||
logger.debug(
|
|
||||||
"Telemetry collect: skipping %s (not found or not repeater)",
|
|
||||||
pub_key[:12],
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with radio_manager.radio_operation(
|
|
||||||
"telemetry_collect",
|
|
||||||
blocking=False,
|
|
||||||
suspend_auto_fetch=True,
|
|
||||||
) as mc:
|
|
||||||
if await _collect_repeater_telemetry(mc, contact):
|
|
||||||
collected += 1
|
|
||||||
except RadioOperationBusyError:
|
|
||||||
logger.debug(
|
|
||||||
"Telemetry collect: radio busy, skipping %s",
|
|
||||||
pub_key[:12],
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"Telemetry collect: cycle complete, %d/%d successful",
|
|
||||||
collected,
|
|
||||||
len(tracked),
|
|
||||||
)
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
logger.info("Telemetry collect task cancelled")
|
logger.info("Telemetry collect task cancelled")
|
||||||
@@ -1690,10 +1792,7 @@ def start_telemetry_collect() -> None:
|
|||||||
global _telemetry_collect_task
|
global _telemetry_collect_task
|
||||||
if _telemetry_collect_task is None or _telemetry_collect_task.done():
|
if _telemetry_collect_task is None or _telemetry_collect_task.done():
|
||||||
_telemetry_collect_task = asyncio.create_task(_telemetry_collect_loop())
|
_telemetry_collect_task = asyncio.create_task(_telemetry_collect_loop())
|
||||||
logger.info(
|
logger.info("Started periodic telemetry collection (UTC-hourly scheduler)")
|
||||||
"Started periodic telemetry collection (interval: %ds)",
|
|
||||||
TELEMETRY_COLLECT_INTERVAL,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def stop_telemetry_collect() -> None:
|
async def stop_telemetry_collect() -> None:
|
||||||
|
|||||||
+69
-60
@@ -8,31 +8,33 @@ class ChannelRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def upsert(key: str, name: str, is_hashtag: bool = False, on_radio: bool = False) -> None:
|
async def upsert(key: str, name: str, is_hashtag: bool = False, on_radio: bool = False) -> None:
|
||||||
"""Upsert a channel. Key is 32-char hex string."""
|
"""Upsert a channel. Key is 32-char hex string."""
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
INSERT INTO channels (key, name, is_hashtag, on_radio, flood_scope_override)
|
"""
|
||||||
VALUES (?, ?, ?, ?, NULL)
|
INSERT INTO channels (key, name, is_hashtag, on_radio, flood_scope_override)
|
||||||
ON CONFLICT(key) DO UPDATE SET
|
VALUES (?, ?, ?, ?, NULL)
|
||||||
name = excluded.name,
|
ON CONFLICT(key) DO UPDATE SET
|
||||||
is_hashtag = excluded.is_hashtag,
|
name = excluded.name,
|
||||||
on_radio = excluded.on_radio
|
is_hashtag = excluded.is_hashtag,
|
||||||
""",
|
on_radio = excluded.on_radio
|
||||||
(key.upper(), name, is_hashtag, on_radio),
|
""",
|
||||||
)
|
(key.upper(), name, is_hashtag, on_radio),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_key(key: str) -> Channel | None:
|
async def get_by_key(key: str) -> Channel | None:
|
||||||
"""Get a channel by its key (32-char hex string)."""
|
"""Get a channel by its key (32-char hex string)."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
"""
|
||||||
FROM channels
|
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
||||||
WHERE key = ?
|
FROM channels
|
||||||
""",
|
WHERE key = ?
|
||||||
(key.upper(),),
|
""",
|
||||||
)
|
(key.upper(),),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if row:
|
if row:
|
||||||
return Channel(
|
return Channel(
|
||||||
key=row["key"],
|
key=row["key"],
|
||||||
@@ -48,14 +50,15 @@ class ChannelRepository:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all() -> list[Channel]:
|
async def get_all() -> list[Channel]:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
"""
|
||||||
FROM channels
|
SELECT key, name, is_hashtag, on_radio, flood_scope_override, path_hash_mode_override, last_read_at, favorite
|
||||||
ORDER BY name
|
FROM channels
|
||||||
"""
|
ORDER BY name
|
||||||
)
|
"""
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [
|
return [
|
||||||
Channel(
|
Channel(
|
||||||
key=row["key"],
|
key=row["key"],
|
||||||
@@ -73,21 +76,23 @@ class ChannelRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def set_favorite(key: str, value: bool) -> bool:
|
async def set_favorite(key: str, value: bool) -> bool:
|
||||||
"""Set or clear the favorite flag for a channel. Returns True if row was found."""
|
"""Set or clear the favorite flag for a channel. Returns True if row was found."""
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE channels SET favorite = ? WHERE key = ?",
|
async with conn.execute(
|
||||||
(1 if value else 0, key.upper()),
|
"UPDATE channels SET favorite = ? WHERE key = ?",
|
||||||
)
|
(1 if value else 0, key.upper()),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount > 0
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def delete(key: str) -> None:
|
async def delete(key: str) -> None:
|
||||||
"""Delete a channel by key."""
|
"""Delete a channel by key."""
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"DELETE FROM channels WHERE key = ?",
|
async with conn.execute(
|
||||||
(key.upper(),),
|
"DELETE FROM channels WHERE key = ?",
|
||||||
)
|
(key.upper(),),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_last_read_at(key: str, timestamp: int | None = None) -> bool:
|
async def update_last_read_at(key: str, timestamp: int | None = None) -> bool:
|
||||||
@@ -96,35 +101,39 @@ class ChannelRepository:
|
|||||||
Returns True if a row was updated, False if channel not found.
|
Returns True if a row was updated, False if channel not found.
|
||||||
"""
|
"""
|
||||||
ts = timestamp if timestamp is not None else int(time.time())
|
ts = timestamp if timestamp is not None else int(time.time())
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE channels SET last_read_at = ? WHERE key = ?",
|
async with conn.execute(
|
||||||
(ts, key.upper()),
|
"UPDATE channels SET last_read_at = ? WHERE key = ?",
|
||||||
)
|
(ts, key.upper()),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount > 0
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_flood_scope_override(key: str, flood_scope_override: str | None) -> bool:
|
async def update_flood_scope_override(key: str, flood_scope_override: str | None) -> bool:
|
||||||
"""Set or clear a channel's flood-scope override."""
|
"""Set or clear a channel's flood-scope override."""
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE channels SET flood_scope_override = ? WHERE key = ?",
|
async with conn.execute(
|
||||||
(flood_scope_override, key.upper()),
|
"UPDATE channels SET flood_scope_override = ? WHERE key = ?",
|
||||||
)
|
(flood_scope_override, key.upper()),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount > 0
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_path_hash_mode_override(key: str, path_hash_mode_override: int | None) -> bool:
|
async def update_path_hash_mode_override(key: str, path_hash_mode_override: int | None) -> bool:
|
||||||
"""Set or clear a channel's path hash mode override."""
|
"""Set or clear a channel's path hash mode override."""
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE channels SET path_hash_mode_override = ? WHERE key = ?",
|
async with conn.execute(
|
||||||
(path_hash_mode_override, key.upper()),
|
"UPDATE channels SET path_hash_mode_override = ? WHERE key = ?",
|
||||||
)
|
(path_hash_mode_override, key.upper()),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount > 0
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def mark_all_read(timestamp: int) -> None:
|
async def mark_all_read(timestamp: int) -> None:
|
||||||
"""Mark all channels as read at the given timestamp."""
|
"""Mark all channels as read at the given timestamp."""
|
||||||
await db.conn.execute("UPDATE channels SET last_read_at = ?", (timestamp,))
|
async with db.tx() as conn:
|
||||||
await db.conn.commit()
|
async with conn.execute("UPDATE channels SET last_read_at = ?", (timestamp,)):
|
||||||
|
pass
|
||||||
|
|||||||
+467
-356
@@ -61,66 +61,72 @@ class ContactRepository:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
INSERT INTO contacts (public_key, name, type, flags, direct_path, direct_path_len,
|
"""
|
||||||
direct_path_hash_mode, direct_path_updated_at,
|
INSERT INTO contacts (public_key, name, type, flags, direct_path, direct_path_len,
|
||||||
route_override_path, route_override_len,
|
direct_path_hash_mode, direct_path_updated_at,
|
||||||
route_override_hash_mode,
|
route_override_path, route_override_len,
|
||||||
last_advert, lat, lon, last_seen,
|
route_override_hash_mode,
|
||||||
on_radio, last_contacted, first_seen)
|
last_advert, lat, lon, last_seen,
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
on_radio, last_contacted, first_seen)
|
||||||
ON CONFLICT(public_key) DO UPDATE SET
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
name = COALESCE(excluded.name, contacts.name),
|
ON CONFLICT(public_key) DO UPDATE SET
|
||||||
type = CASE WHEN excluded.type = 0 THEN contacts.type ELSE excluded.type END,
|
name = COALESCE(excluded.name, contacts.name),
|
||||||
flags = excluded.flags,
|
type = CASE WHEN excluded.type = 0 THEN contacts.type ELSE excluded.type END,
|
||||||
direct_path = COALESCE(excluded.direct_path, contacts.direct_path),
|
flags = excluded.flags,
|
||||||
direct_path_len = COALESCE(excluded.direct_path_len, contacts.direct_path_len),
|
direct_path = COALESCE(excluded.direct_path, contacts.direct_path),
|
||||||
direct_path_hash_mode = COALESCE(
|
direct_path_len = COALESCE(excluded.direct_path_len, contacts.direct_path_len),
|
||||||
excluded.direct_path_hash_mode, contacts.direct_path_hash_mode
|
direct_path_hash_mode = COALESCE(
|
||||||
|
excluded.direct_path_hash_mode, contacts.direct_path_hash_mode
|
||||||
|
),
|
||||||
|
direct_path_updated_at = COALESCE(
|
||||||
|
excluded.direct_path_updated_at, contacts.direct_path_updated_at
|
||||||
|
),
|
||||||
|
route_override_path = COALESCE(
|
||||||
|
excluded.route_override_path, contacts.route_override_path
|
||||||
|
),
|
||||||
|
route_override_len = COALESCE(
|
||||||
|
excluded.route_override_len, contacts.route_override_len
|
||||||
|
),
|
||||||
|
route_override_hash_mode = COALESCE(
|
||||||
|
excluded.route_override_hash_mode, contacts.route_override_hash_mode
|
||||||
|
),
|
||||||
|
last_advert = COALESCE(excluded.last_advert, contacts.last_advert),
|
||||||
|
lat = COALESCE(excluded.lat, contacts.lat),
|
||||||
|
lon = COALESCE(excluded.lon, contacts.lon),
|
||||||
|
last_seen = CASE
|
||||||
|
WHEN excluded.last_seen IS NULL THEN contacts.last_seen
|
||||||
|
WHEN contacts.last_seen IS NULL THEN excluded.last_seen
|
||||||
|
WHEN excluded.last_seen > contacts.last_seen THEN excluded.last_seen
|
||||||
|
ELSE contacts.last_seen
|
||||||
|
END,
|
||||||
|
on_radio = COALESCE(excluded.on_radio, contacts.on_radio),
|
||||||
|
last_contacted = COALESCE(excluded.last_contacted, contacts.last_contacted),
|
||||||
|
first_seen = COALESCE(contacts.first_seen, excluded.first_seen)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
contact_row.public_key.lower(),
|
||||||
|
contact_row.name,
|
||||||
|
contact_row.type,
|
||||||
|
contact_row.flags,
|
||||||
|
direct_path,
|
||||||
|
direct_path_len,
|
||||||
|
direct_path_hash_mode,
|
||||||
|
contact_row.direct_path_updated_at,
|
||||||
|
route_override_path,
|
||||||
|
route_override_len,
|
||||||
|
route_override_hash_mode,
|
||||||
|
contact_row.last_advert,
|
||||||
|
contact_row.lat,
|
||||||
|
contact_row.lon,
|
||||||
|
contact_row.last_seen,
|
||||||
|
contact_row.on_radio,
|
||||||
|
contact_row.last_contacted,
|
||||||
|
contact_row.first_seen,
|
||||||
),
|
),
|
||||||
direct_path_updated_at = COALESCE(
|
):
|
||||||
excluded.direct_path_updated_at, contacts.direct_path_updated_at
|
pass
|
||||||
),
|
|
||||||
route_override_path = COALESCE(
|
|
||||||
excluded.route_override_path, contacts.route_override_path
|
|
||||||
),
|
|
||||||
route_override_len = COALESCE(
|
|
||||||
excluded.route_override_len, contacts.route_override_len
|
|
||||||
),
|
|
||||||
route_override_hash_mode = COALESCE(
|
|
||||||
excluded.route_override_hash_mode, contacts.route_override_hash_mode
|
|
||||||
),
|
|
||||||
last_advert = COALESCE(excluded.last_advert, contacts.last_advert),
|
|
||||||
lat = COALESCE(excluded.lat, contacts.lat),
|
|
||||||
lon = COALESCE(excluded.lon, contacts.lon),
|
|
||||||
last_seen = excluded.last_seen,
|
|
||||||
on_radio = COALESCE(excluded.on_radio, contacts.on_radio),
|
|
||||||
last_contacted = COALESCE(excluded.last_contacted, contacts.last_contacted),
|
|
||||||
first_seen = COALESCE(contacts.first_seen, excluded.first_seen)
|
|
||||||
""",
|
|
||||||
(
|
|
||||||
contact_row.public_key.lower(),
|
|
||||||
contact_row.name,
|
|
||||||
contact_row.type,
|
|
||||||
contact_row.flags,
|
|
||||||
direct_path,
|
|
||||||
direct_path_len,
|
|
||||||
direct_path_hash_mode,
|
|
||||||
contact_row.direct_path_updated_at,
|
|
||||||
route_override_path,
|
|
||||||
route_override_len,
|
|
||||||
route_override_hash_mode,
|
|
||||||
contact_row.last_advert,
|
|
||||||
contact_row.lat,
|
|
||||||
contact_row.lon,
|
|
||||||
contact_row.last_seen if contact_row.last_seen is not None else int(time.time()),
|
|
||||||
contact_row.on_radio,
|
|
||||||
contact_row.last_contacted,
|
|
||||||
contact_row.first_seen,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
await db.conn.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _row_to_contact(row) -> Contact:
|
def _row_to_contact(row) -> Contact:
|
||||||
@@ -178,10 +184,11 @@ class ContactRepository:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_key(public_key: str) -> Contact | None:
|
async def get_by_key(public_key: str) -> Contact | None:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM contacts WHERE public_key = ?", (public_key.lower(),)
|
async with conn.execute(
|
||||||
)
|
"SELECT * FROM contacts WHERE public_key = ?", (public_key.lower(),)
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return ContactRepository._row_to_contact(row) if row else None
|
return ContactRepository._row_to_contact(row) if row else None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -195,11 +202,12 @@ class ContactRepository:
|
|||||||
exact = await ContactRepository.get_by_key(normalized_prefix)
|
exact = await ContactRepository.get_by_key(normalized_prefix)
|
||||||
if exact:
|
if exact:
|
||||||
return exact
|
return exact
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT 2",
|
async with conn.execute(
|
||||||
(f"{normalized_prefix}%",),
|
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT 2",
|
||||||
)
|
(f"{normalized_prefix}%",),
|
||||||
rows = list(await cursor.fetchall())
|
) as cursor:
|
||||||
|
rows = list(await cursor.fetchall())
|
||||||
if len(rows) != 1:
|
if len(rows) != 1:
|
||||||
return None
|
return None
|
||||||
return ContactRepository._row_to_contact(rows[0])
|
return ContactRepository._row_to_contact(rows[0])
|
||||||
@@ -207,11 +215,12 @@ class ContactRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def _get_prefix_matches(prefix: str, limit: int = 2) -> list[Contact]:
|
async def _get_prefix_matches(prefix: str, limit: int = 2) -> list[Contact]:
|
||||||
"""Get contacts matching a key prefix, up to limit."""
|
"""Get contacts matching a key prefix, up to limit."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT ?",
|
async with conn.execute(
|
||||||
(f"{prefix.lower()}%", limit),
|
"SELECT * FROM contacts WHERE public_key LIKE ? ORDER BY public_key LIMIT ?",
|
||||||
)
|
(f"{prefix.lower()}%", limit),
|
||||||
rows = list(await cursor.fetchall())
|
) as cursor:
|
||||||
|
rows = list(await cursor.fetchall())
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -237,8 +246,9 @@ class ContactRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_name(name: str) -> list[Contact]:
|
async def get_by_name(name: str) -> list[Contact]:
|
||||||
"""Get all contacts with the given exact name."""
|
"""Get all contacts with the given exact name."""
|
||||||
cursor = await db.conn.execute("SELECT * FROM contacts WHERE name = ?", (name,))
|
async with db.readonly() as conn:
|
||||||
rows = await cursor.fetchall()
|
async with conn.execute("SELECT * FROM contacts WHERE name = ?", (name,)) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -254,8 +264,9 @@ class ContactRepository:
|
|||||||
normalized = [p.lower() for p in prefixes]
|
normalized = [p.lower() for p in prefixes]
|
||||||
conditions = " OR ".join(["public_key LIKE ?"] * len(normalized))
|
conditions = " OR ".join(["public_key LIKE ?"] * len(normalized))
|
||||||
params = [f"{p}%" for p in normalized]
|
params = [f"{p}%" for p in normalized]
|
||||||
cursor = await db.conn.execute(f"SELECT * FROM contacts WHERE {conditions}", params)
|
async with db.readonly() as conn:
|
||||||
rows = await cursor.fetchall()
|
async with conn.execute(f"SELECT * FROM contacts WHERE {conditions}", params) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
# Group by which prefix each row matches
|
# Group by which prefix each row matches
|
||||||
prefix_to_rows: dict[str, list] = {p: [] for p in normalized}
|
prefix_to_rows: dict[str, list] = {p: [] for p in normalized}
|
||||||
for row in rows:
|
for row in rows:
|
||||||
@@ -272,41 +283,67 @@ class ContactRepository:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all(limit: int = 100, offset: int = 0) -> list[Contact]:
|
async def get_all(limit: int = 100, offset: int = 0) -> list[Contact]:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM contacts ORDER BY COALESCE(name, public_key) LIMIT ? OFFSET ?",
|
async with conn.execute(
|
||||||
(limit, offset),
|
"SELECT * FROM contacts ORDER BY COALESCE(name, public_key) LIMIT ? OFFSET ?",
|
||||||
)
|
(limit, offset),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_recently_contacted_non_repeaters(limit: int = 200) -> list[Contact]:
|
async def get_recently_contacted_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||||
"""Get recently interacted-with non-repeater contacts."""
|
"""Get recently interacted-with non-repeater contacts."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT * FROM contacts
|
"""
|
||||||
WHERE type != 2 AND last_contacted IS NOT NULL AND length(public_key) = 64
|
SELECT * FROM contacts
|
||||||
ORDER BY last_contacted DESC
|
WHERE type != 2 AND last_contacted IS NOT NULL AND length(public_key) = 64
|
||||||
LIMIT ?
|
ORDER BY last_contacted DESC
|
||||||
""",
|
LIMIT ?
|
||||||
(limit,),
|
""",
|
||||||
)
|
(limit,),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_recently_dm_active_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||||
|
"""Get non-repeater contacts with the most recent DM activity (sent or received)."""
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT c.*
|
||||||
|
FROM contacts c
|
||||||
|
INNER JOIN (
|
||||||
|
SELECT conversation_key, MAX(received_at) AS last_dm
|
||||||
|
FROM messages
|
||||||
|
WHERE type = 'PRIV'
|
||||||
|
GROUP BY conversation_key
|
||||||
|
) m ON c.public_key = m.conversation_key
|
||||||
|
WHERE c.type != 2 AND length(c.public_key) = 64
|
||||||
|
ORDER BY m.last_dm DESC
|
||||||
|
LIMIT ?
|
||||||
|
""",
|
||||||
|
(limit,),
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_recently_advertised_non_repeaters(limit: int = 200) -> list[Contact]:
|
async def get_recently_advertised_non_repeaters(limit: int = 200) -> list[Contact]:
|
||||||
"""Get recently advert-heard non-repeater contacts."""
|
"""Get recently advert-heard non-repeater contacts."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT * FROM contacts
|
"""
|
||||||
WHERE type != 2 AND last_advert IS NOT NULL AND length(public_key) = 64
|
SELECT * FROM contacts
|
||||||
ORDER BY last_advert DESC
|
WHERE type != 2 AND last_advert IS NOT NULL AND length(public_key) = 64
|
||||||
LIMIT ?
|
ORDER BY last_advert DESC
|
||||||
""",
|
LIMIT ?
|
||||||
(limit,),
|
""",
|
||||||
)
|
(limit,),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -317,27 +354,44 @@ class ContactRepository:
|
|||||||
path_hash_mode: int | None = None,
|
path_hash_mode: int | None = None,
|
||||||
updated_at: int | None = None,
|
updated_at: int | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""Persist a learned direct route for a contact.
|
||||||
|
|
||||||
|
Both callers (the RF PATH packet processor and the firmware PATH_UPDATE
|
||||||
|
event handler) are RF-backed: firmware ``onContactPathUpdated`` only
|
||||||
|
fires from ``onContactPathRecv`` during RF PATH packet reception. So
|
||||||
|
this method also advances ``last_seen`` monotonically. Never moves
|
||||||
|
``last_seen`` backwards if an out-of-order arrival lands with an older
|
||||||
|
timestamp.
|
||||||
|
"""
|
||||||
normalized_path, normalized_path_len, normalized_hash_mode = normalize_contact_route(
|
normalized_path, normalized_path_len, normalized_hash_mode = normalize_contact_route(
|
||||||
path,
|
path,
|
||||||
path_len,
|
path_len,
|
||||||
path_hash_mode,
|
path_hash_mode,
|
||||||
)
|
)
|
||||||
ts = updated_at if updated_at is not None else int(time.time())
|
ts = updated_at if updated_at is not None else int(time.time())
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""UPDATE contacts SET direct_path = ?, direct_path_len = ?,
|
async with conn.execute(
|
||||||
direct_path_hash_mode = COALESCE(?, direct_path_hash_mode),
|
"""UPDATE contacts SET direct_path = ?, direct_path_len = ?,
|
||||||
direct_path_updated_at = ?,
|
direct_path_hash_mode = COALESCE(?, direct_path_hash_mode),
|
||||||
last_seen = ? WHERE public_key = ?""",
|
direct_path_updated_at = ?,
|
||||||
(
|
last_seen = CASE
|
||||||
normalized_path,
|
WHEN last_seen IS NULL THEN ?
|
||||||
normalized_path_len,
|
WHEN ? > last_seen THEN ?
|
||||||
normalized_hash_mode,
|
ELSE last_seen
|
||||||
ts,
|
END
|
||||||
ts,
|
WHERE public_key = ?""",
|
||||||
public_key.lower(),
|
(
|
||||||
),
|
normalized_path,
|
||||||
)
|
normalized_path_len,
|
||||||
await db.conn.commit()
|
normalized_hash_mode,
|
||||||
|
ts,
|
||||||
|
ts,
|
||||||
|
ts,
|
||||||
|
ts,
|
||||||
|
public_key.lower(),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def set_routing_override(
|
async def set_routing_override(
|
||||||
@@ -351,65 +405,71 @@ class ContactRepository:
|
|||||||
path_len,
|
path_len,
|
||||||
path_hash_mode,
|
path_hash_mode,
|
||||||
)
|
)
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
UPDATE contacts
|
"""
|
||||||
SET route_override_path = ?, route_override_len = ?, route_override_hash_mode = ?
|
UPDATE contacts
|
||||||
WHERE public_key = ?
|
SET route_override_path = ?, route_override_len = ?, route_override_hash_mode = ?
|
||||||
""",
|
WHERE public_key = ?
|
||||||
(
|
""",
|
||||||
normalized_path,
|
(
|
||||||
normalized_len,
|
normalized_path,
|
||||||
normalized_hash_mode,
|
normalized_len,
|
||||||
public_key.lower(),
|
normalized_hash_mode,
|
||||||
),
|
public_key.lower(),
|
||||||
)
|
),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def clear_routing_override(public_key: str) -> None:
|
async def clear_routing_override(public_key: str) -> None:
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
UPDATE contacts
|
"""
|
||||||
SET route_override_path = NULL,
|
UPDATE contacts
|
||||||
route_override_len = NULL,
|
SET route_override_path = NULL,
|
||||||
route_override_hash_mode = NULL
|
route_override_len = NULL,
|
||||||
WHERE public_key = ?
|
route_override_hash_mode = NULL
|
||||||
""",
|
WHERE public_key = ?
|
||||||
(public_key.lower(),),
|
""",
|
||||||
)
|
(public_key.lower(),),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def clear_on_radio_except(keep_keys: list[str]) -> None:
|
async def clear_on_radio_except(keep_keys: list[str]) -> None:
|
||||||
"""Set on_radio=False for all contacts NOT in keep_keys."""
|
"""Set on_radio=False for all contacts NOT in keep_keys."""
|
||||||
if not keep_keys:
|
async with db.tx() as conn:
|
||||||
await db.conn.execute("UPDATE contacts SET on_radio = 0 WHERE on_radio = 1")
|
if not keep_keys:
|
||||||
else:
|
async with conn.execute("UPDATE contacts SET on_radio = 0 WHERE on_radio = 1"):
|
||||||
placeholders = ",".join("?" * len(keep_keys))
|
pass
|
||||||
await db.conn.execute(
|
else:
|
||||||
f"UPDATE contacts SET on_radio = 0 WHERE on_radio = 1 AND public_key NOT IN ({placeholders})",
|
placeholders = ",".join("?" * len(keep_keys))
|
||||||
keep_keys,
|
async with conn.execute(
|
||||||
)
|
f"UPDATE contacts SET on_radio = 0 WHERE on_radio = 1 AND public_key NOT IN ({placeholders})",
|
||||||
await db.conn.commit()
|
keep_keys,
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_favorites() -> list[Contact]:
|
async def get_favorites() -> list[Contact]:
|
||||||
"""Return all contacts marked as favorite."""
|
"""Return all contacts marked as favorite."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM contacts WHERE favorite = 1 AND LENGTH(public_key) = 64"
|
async with conn.execute(
|
||||||
)
|
"SELECT * FROM contacts WHERE favorite = 1 AND LENGTH(public_key) = 64"
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def set_favorite(public_key: str, value: bool) -> None:
|
async def set_favorite(public_key: str, value: bool) -> None:
|
||||||
"""Set or clear the favorite flag for a contact."""
|
"""Set or clear the favorite flag for a contact."""
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE contacts SET favorite = ? WHERE public_key = ?",
|
async with conn.execute(
|
||||||
(1 if value else 0, public_key.lower()),
|
"UPDATE contacts SET favorite = ? WHERE public_key = ?",
|
||||||
)
|
(1 if value else 0, public_key.lower()),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def delete(public_key: str) -> None:
|
async def delete(public_key: str) -> None:
|
||||||
@@ -417,18 +477,53 @@ class ContactRepository:
|
|||||||
# contact_name_history and contact_advert_paths cascade via FK.
|
# contact_name_history and contact_advert_paths cascade via FK.
|
||||||
# Messages are intentionally preserved so history re-surfaces
|
# Messages are intentionally preserved so history re-surfaces
|
||||||
# if the contact is re-added later.
|
# if the contact is re-added later.
|
||||||
await db.conn.execute("DELETE FROM contacts WHERE public_key = ?", (normalized,))
|
async with db.tx() as conn:
|
||||||
await db.conn.commit()
|
async with conn.execute("DELETE FROM contacts WHERE public_key = ?", (normalized,)):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_last_contacted(public_key: str, timestamp: int | None = None) -> None:
|
async def update_last_contacted(public_key: str, timestamp: int | None = None) -> None:
|
||||||
"""Update the last_contacted timestamp for a contact."""
|
"""Update the last_contacted timestamp for a contact.
|
||||||
|
|
||||||
|
``last_contacted`` tracks the most recent direct-conversation activity
|
||||||
|
with this contact in either direction (incoming or outgoing DM). It is
|
||||||
|
the field that powers "recent conversations" ordering on the frontend.
|
||||||
|
|
||||||
|
It deliberately does not touch ``last_seen``: ``last_seen`` is reserved
|
||||||
|
for actual RF reception from the contact, and outgoing sends are not
|
||||||
|
evidence that we heard from them. RF observations from DM ingest update
|
||||||
|
``last_seen`` via :meth:`touch_last_seen` on incoming DMs only.
|
||||||
|
"""
|
||||||
ts = timestamp if timestamp is not None else int(time.time())
|
ts = timestamp if timestamp is not None else int(time.time())
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE contacts SET last_contacted = ?, last_seen = ? WHERE public_key = ?",
|
async with conn.execute(
|
||||||
(ts, ts, public_key.lower()),
|
"UPDATE contacts SET last_contacted = ? WHERE public_key = ?",
|
||||||
)
|
(ts, public_key.lower()),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def touch_last_seen(public_key: str, timestamp: int) -> None:
|
||||||
|
"""Monotonically bump last_seen for a contact from an RF observation.
|
||||||
|
|
||||||
|
Never moves last_seen backwards; a no-op if the contact row does not
|
||||||
|
exist. Use this from packet-ingest paths that have attributed a packet
|
||||||
|
to a specific contact pubkey (advert, incoming DM, decrypted PATH, etc.).
|
||||||
|
"""
|
||||||
|
async with db.tx() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE contacts
|
||||||
|
SET last_seen = CASE
|
||||||
|
WHEN last_seen IS NULL THEN ?
|
||||||
|
WHEN ? > last_seen THEN ?
|
||||||
|
ELSE last_seen
|
||||||
|
END
|
||||||
|
WHERE public_key = ?
|
||||||
|
""",
|
||||||
|
(timestamp, timestamp, timestamp, public_key.lower()),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_last_read_at(public_key: str, timestamp: int | None = None) -> bool:
|
async def update_last_read_at(public_key: str, timestamp: int | None = None) -> bool:
|
||||||
@@ -437,22 +532,25 @@ class ContactRepository:
|
|||||||
Returns True if a row was updated, False if contact not found.
|
Returns True if a row was updated, False if contact not found.
|
||||||
"""
|
"""
|
||||||
ts = timestamp if timestamp is not None else int(time.time())
|
ts = timestamp if timestamp is not None else int(time.time())
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE contacts SET last_read_at = ? WHERE public_key = ?",
|
async with conn.execute(
|
||||||
(ts, public_key.lower()),
|
"UPDATE contacts SET last_read_at = ? WHERE public_key = ?",
|
||||||
)
|
(ts, public_key.lower()),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount > 0
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def promote_prefix_placeholders(full_key: str) -> list[str]:
|
async def promote_prefix_placeholders(full_key: str) -> list[str]:
|
||||||
"""Promote prefix-only placeholder contacts to a resolved full key.
|
"""Promote prefix-only placeholder contacts to a resolved full key.
|
||||||
|
|
||||||
Returns the placeholder public keys that were merged into the full key.
|
Returns the placeholder public keys that were merged into the full key.
|
||||||
|
All operations for the promotion happen inside one ``db.tx()`` so
|
||||||
|
partial promotions never leak to readers between steps.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async def migrate_child_rows(old_key: str, new_key: str) -> None:
|
async def migrate_child_rows(conn, old_key: str, new_key: str) -> None:
|
||||||
await db.conn.execute(
|
async with conn.execute(
|
||||||
"""
|
"""
|
||||||
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
||||||
SELECT ?, name, first_seen, last_seen
|
SELECT ?, name, first_seen, last_seen
|
||||||
@@ -463,8 +561,9 @@ class ContactRepository:
|
|||||||
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
||||||
""",
|
""",
|
||||||
(new_key, old_key),
|
(new_key, old_key),
|
||||||
)
|
):
|
||||||
await db.conn.execute(
|
pass
|
||||||
|
async with conn.execute(
|
||||||
"""
|
"""
|
||||||
INSERT INTO contact_advert_paths
|
INSERT INTO contact_advert_paths
|
||||||
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
||||||
@@ -477,132 +576,138 @@ class ContactRepository:
|
|||||||
heard_count = contact_advert_paths.heard_count + excluded.heard_count
|
heard_count = contact_advert_paths.heard_count + excluded.heard_count
|
||||||
""",
|
""",
|
||||||
(new_key, old_key),
|
(new_key, old_key),
|
||||||
)
|
):
|
||||||
await db.conn.execute(
|
pass
|
||||||
|
async with conn.execute(
|
||||||
"DELETE FROM contact_name_history WHERE public_key = ?",
|
"DELETE FROM contact_name_history WHERE public_key = ?",
|
||||||
(old_key,),
|
(old_key,),
|
||||||
)
|
):
|
||||||
await db.conn.execute(
|
pass
|
||||||
|
async with conn.execute(
|
||||||
"DELETE FROM contact_advert_paths WHERE public_key = ?",
|
"DELETE FROM contact_advert_paths WHERE public_key = ?",
|
||||||
(old_key,),
|
(old_key,),
|
||||||
)
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
normalized_full_key = full_key.lower()
|
normalized_full_key = full_key.lower()
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT public_key, last_seen, last_contacted, first_seen, last_read_at
|
|
||||||
FROM contacts
|
|
||||||
WHERE length(public_key) < 64
|
|
||||||
AND ? LIKE public_key || '%'
|
|
||||||
ORDER BY length(public_key) DESC, public_key
|
|
||||||
""",
|
|
||||||
(normalized_full_key,),
|
|
||||||
)
|
|
||||||
rows = list(await cursor.fetchall())
|
|
||||||
if not rows:
|
|
||||||
return []
|
|
||||||
|
|
||||||
promoted_keys: list[str] = []
|
promoted_keys: list[str] = []
|
||||||
|
async with db.tx() as conn:
|
||||||
for row in rows:
|
async with conn.execute(
|
||||||
old_key = row["public_key"]
|
|
||||||
if old_key == normalized_full_key:
|
|
||||||
continue
|
|
||||||
|
|
||||||
match_cursor = await db.conn.execute(
|
|
||||||
"""
|
"""
|
||||||
SELECT COUNT(*) AS match_count
|
SELECT public_key, last_seen, last_contacted, first_seen, last_read_at
|
||||||
FROM contacts
|
FROM contacts
|
||||||
WHERE length(public_key) = 64
|
WHERE length(public_key) < 64
|
||||||
AND public_key LIKE ? || '%'
|
AND ? LIKE public_key || '%'
|
||||||
|
ORDER BY length(public_key) DESC, public_key
|
||||||
""",
|
""",
|
||||||
(old_key,),
|
(normalized_full_key,),
|
||||||
)
|
) as cursor:
|
||||||
match_row = await match_cursor.fetchone()
|
rows = list(await cursor.fetchall())
|
||||||
match_count = match_row["match_count"] if match_row is not None else 0
|
if not rows:
|
||||||
if match_count != 1:
|
return []
|
||||||
logger.warning(
|
|
||||||
"Skipping prefix promotion for %s: %d full-key contacts match (expected 1)",
|
|
||||||
old_key,
|
|
||||||
match_count,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
await migrate_child_rows(old_key, normalized_full_key)
|
for row in rows:
|
||||||
|
old_key = row["public_key"]
|
||||||
|
if old_key == normalized_full_key:
|
||||||
|
continue
|
||||||
|
|
||||||
# Merge timestamp metadata from the old prefix contact into the
|
async with conn.execute(
|
||||||
# full-key contact (which all callers guarantee already exists),
|
"""
|
||||||
# then delete the prefix placeholder.
|
SELECT COUNT(*) AS match_count
|
||||||
await db.conn.execute(
|
FROM contacts
|
||||||
"""
|
WHERE length(public_key) = 64
|
||||||
UPDATE contacts
|
AND public_key LIKE ? || '%'
|
||||||
SET last_seen = CASE
|
""",
|
||||||
WHEN contacts.last_seen IS NULL THEN ?
|
(old_key,),
|
||||||
WHEN ? IS NULL THEN contacts.last_seen
|
) as match_cursor:
|
||||||
WHEN ? > contacts.last_seen THEN ?
|
match_row = await match_cursor.fetchone()
|
||||||
ELSE contacts.last_seen
|
match_count = match_row["match_count"] if match_row is not None else 0
|
||||||
END,
|
if match_count != 1:
|
||||||
last_contacted = CASE
|
logger.warning(
|
||||||
WHEN contacts.last_contacted IS NULL THEN ?
|
"Skipping prefix promotion for %s: %d full-key contacts match (expected 1)",
|
||||||
WHEN ? IS NULL THEN contacts.last_contacted
|
old_key,
|
||||||
WHEN ? > contacts.last_contacted THEN ?
|
match_count,
|
||||||
ELSE contacts.last_contacted
|
)
|
||||||
END,
|
continue
|
||||||
first_seen = CASE
|
|
||||||
WHEN contacts.first_seen IS NULL THEN ?
|
|
||||||
WHEN ? IS NULL THEN contacts.first_seen
|
|
||||||
WHEN ? < contacts.first_seen THEN ?
|
|
||||||
ELSE contacts.first_seen
|
|
||||||
END,
|
|
||||||
last_read_at = CASE
|
|
||||||
WHEN contacts.last_read_at IS NULL THEN ?
|
|
||||||
WHEN ? IS NULL THEN contacts.last_read_at
|
|
||||||
WHEN ? > contacts.last_read_at THEN ?
|
|
||||||
ELSE contacts.last_read_at
|
|
||||||
END
|
|
||||||
WHERE public_key = ?
|
|
||||||
""",
|
|
||||||
(
|
|
||||||
row["last_seen"],
|
|
||||||
row["last_seen"],
|
|
||||||
row["last_seen"],
|
|
||||||
row["last_seen"],
|
|
||||||
row["last_contacted"],
|
|
||||||
row["last_contacted"],
|
|
||||||
row["last_contacted"],
|
|
||||||
row["last_contacted"],
|
|
||||||
row["first_seen"],
|
|
||||||
row["first_seen"],
|
|
||||||
row["first_seen"],
|
|
||||||
row["first_seen"],
|
|
||||||
row["last_read_at"],
|
|
||||||
row["last_read_at"],
|
|
||||||
row["last_read_at"],
|
|
||||||
row["last_read_at"],
|
|
||||||
normalized_full_key,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
await db.conn.execute("DELETE FROM contacts WHERE public_key = ?", (old_key,))
|
|
||||||
|
|
||||||
promoted_keys.append(old_key)
|
await migrate_child_rows(conn, old_key, normalized_full_key)
|
||||||
|
|
||||||
|
# Merge timestamp metadata from the old prefix contact into the
|
||||||
|
# full-key contact (which all callers guarantee already exists),
|
||||||
|
# then delete the prefix placeholder.
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE contacts
|
||||||
|
SET last_seen = CASE
|
||||||
|
WHEN contacts.last_seen IS NULL THEN ?
|
||||||
|
WHEN ? IS NULL THEN contacts.last_seen
|
||||||
|
WHEN ? > contacts.last_seen THEN ?
|
||||||
|
ELSE contacts.last_seen
|
||||||
|
END,
|
||||||
|
last_contacted = CASE
|
||||||
|
WHEN contacts.last_contacted IS NULL THEN ?
|
||||||
|
WHEN ? IS NULL THEN contacts.last_contacted
|
||||||
|
WHEN ? > contacts.last_contacted THEN ?
|
||||||
|
ELSE contacts.last_contacted
|
||||||
|
END,
|
||||||
|
first_seen = CASE
|
||||||
|
WHEN contacts.first_seen IS NULL THEN ?
|
||||||
|
WHEN ? IS NULL THEN contacts.first_seen
|
||||||
|
WHEN ? < contacts.first_seen THEN ?
|
||||||
|
ELSE contacts.first_seen
|
||||||
|
END,
|
||||||
|
last_read_at = CASE
|
||||||
|
WHEN contacts.last_read_at IS NULL THEN ?
|
||||||
|
WHEN ? IS NULL THEN contacts.last_read_at
|
||||||
|
WHEN ? > contacts.last_read_at THEN ?
|
||||||
|
ELSE contacts.last_read_at
|
||||||
|
END
|
||||||
|
WHERE public_key = ?
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
row["last_seen"],
|
||||||
|
row["last_seen"],
|
||||||
|
row["last_seen"],
|
||||||
|
row["last_seen"],
|
||||||
|
row["last_contacted"],
|
||||||
|
row["last_contacted"],
|
||||||
|
row["last_contacted"],
|
||||||
|
row["last_contacted"],
|
||||||
|
row["first_seen"],
|
||||||
|
row["first_seen"],
|
||||||
|
row["first_seen"],
|
||||||
|
row["first_seen"],
|
||||||
|
row["last_read_at"],
|
||||||
|
row["last_read_at"],
|
||||||
|
row["last_read_at"],
|
||||||
|
row["last_read_at"],
|
||||||
|
normalized_full_key,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
async with conn.execute("DELETE FROM contacts WHERE public_key = ?", (old_key,)):
|
||||||
|
pass
|
||||||
|
|
||||||
|
promoted_keys.append(old_key)
|
||||||
|
|
||||||
await db.conn.commit()
|
|
||||||
return promoted_keys
|
return promoted_keys
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def mark_all_read(timestamp: int) -> None:
|
async def mark_all_read(timestamp: int) -> None:
|
||||||
"""Mark all contacts as read at the given timestamp."""
|
"""Mark all contacts as read at the given timestamp."""
|
||||||
await db.conn.execute("UPDATE contacts SET last_read_at = ?", (timestamp,))
|
async with db.tx() as conn:
|
||||||
await db.conn.commit()
|
async with conn.execute("UPDATE contacts SET last_read_at = ?", (timestamp,)):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_pubkey_first_byte(hex_byte: str) -> list[Contact]:
|
async def get_by_pubkey_first_byte(hex_byte: str) -> list[Contact]:
|
||||||
"""Get contacts whose public key starts with the given hex byte (2 chars)."""
|
"""Get contacts whose public key starts with the given hex byte (2 chars)."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM contacts WHERE substr(public_key, 1, 2) = ?",
|
async with conn.execute(
|
||||||
(hex_byte.lower(),),
|
"SELECT * FROM contacts WHERE substr(public_key, 1, 2) = ?",
|
||||||
)
|
(hex_byte.lower(),),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactRepository._row_to_contact(row) for row in rows]
|
return [ContactRepository._row_to_contact(row) for row in rows]
|
||||||
|
|
||||||
|
|
||||||
@@ -641,71 +746,75 @@ class ContactAdvertPathRepository:
|
|||||||
normalized_path = path_hex.lower()
|
normalized_path = path_hex.lower()
|
||||||
path_len = hop_count if hop_count is not None else len(normalized_path) // 2
|
path_len = hop_count if hop_count is not None else len(normalized_path) // 2
|
||||||
|
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
INSERT INTO contact_advert_paths
|
"""
|
||||||
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
INSERT INTO contact_advert_paths
|
||||||
VALUES (?, ?, ?, ?, ?, 1)
|
(public_key, path_hex, path_len, first_seen, last_seen, heard_count)
|
||||||
ON CONFLICT(public_key, path_hex, path_len) DO UPDATE SET
|
VALUES (?, ?, ?, ?, ?, 1)
|
||||||
last_seen = MAX(contact_advert_paths.last_seen, excluded.last_seen),
|
ON CONFLICT(public_key, path_hex, path_len) DO UPDATE SET
|
||||||
heard_count = contact_advert_paths.heard_count + 1
|
last_seen = MAX(contact_advert_paths.last_seen, excluded.last_seen),
|
||||||
""",
|
heard_count = contact_advert_paths.heard_count + 1
|
||||||
(normalized_key, normalized_path, path_len, timestamp, timestamp),
|
""",
|
||||||
)
|
(normalized_key, normalized_path, path_len, timestamp, timestamp),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
# Keep only the N most recent unique paths per contact.
|
# Keep only the N most recent unique paths per contact.
|
||||||
await db.conn.execute(
|
async with conn.execute(
|
||||||
"""
|
"""
|
||||||
DELETE FROM contact_advert_paths
|
DELETE FROM contact_advert_paths
|
||||||
WHERE public_key = ?
|
WHERE public_key = ?
|
||||||
AND id NOT IN (
|
AND id NOT IN (
|
||||||
SELECT id
|
SELECT id
|
||||||
FROM contact_advert_paths
|
FROM contact_advert_paths
|
||||||
WHERE public_key = ?
|
WHERE public_key = ?
|
||||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||||
LIMIT ?
|
LIMIT ?
|
||||||
)
|
)
|
||||||
""",
|
""",
|
||||||
(normalized_key, normalized_key, max_paths),
|
(normalized_key, normalized_key, max_paths),
|
||||||
)
|
):
|
||||||
await db.conn.commit()
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_recent_for_contact(public_key: str, limit: int = 10) -> list[ContactAdvertPath]:
|
async def get_recent_for_contact(public_key: str, limit: int = 10) -> list[ContactAdvertPath]:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT path_hex, path_len, first_seen, last_seen, heard_count
|
"""
|
||||||
FROM contact_advert_paths
|
SELECT path_hex, path_len, first_seen, last_seen, heard_count
|
||||||
WHERE public_key = ?
|
FROM contact_advert_paths
|
||||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
WHERE public_key = ?
|
||||||
LIMIT ?
|
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||||
""",
|
LIMIT ?
|
||||||
(public_key.lower(), limit),
|
""",
|
||||||
)
|
(public_key.lower(), limit),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [ContactAdvertPathRepository._row_to_path(row) for row in rows]
|
return [ContactAdvertPathRepository._row_to_path(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_recent_for_all_contacts(
|
async def get_recent_for_all_contacts(
|
||||||
limit_per_contact: int = 10,
|
limit_per_contact: int = 10,
|
||||||
) -> list[ContactAdvertPathSummary]:
|
) -> list[ContactAdvertPathSummary]:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT public_key, path_hex, path_len, first_seen, last_seen, heard_count
|
"""
|
||||||
FROM (
|
SELECT public_key, path_hex, path_len, first_seen, last_seen, heard_count
|
||||||
SELECT *,
|
FROM (
|
||||||
ROW_NUMBER() OVER (
|
SELECT *,
|
||||||
PARTITION BY public_key
|
ROW_NUMBER() OVER (
|
||||||
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
PARTITION BY public_key
|
||||||
) AS rn
|
ORDER BY last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||||
FROM contact_advert_paths
|
) AS rn
|
||||||
)
|
FROM contact_advert_paths
|
||||||
WHERE rn <= ?
|
)
|
||||||
ORDER BY public_key ASC, last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
WHERE rn <= ?
|
||||||
""",
|
ORDER BY public_key ASC, last_seen DESC, heard_count DESC, path_len ASC, path_hex ASC
|
||||||
(limit_per_contact,),
|
""",
|
||||||
)
|
(limit_per_contact,),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
|
||||||
grouped: dict[str, list[ContactAdvertPath]] = {}
|
grouped: dict[str, list[ContactAdvertPath]] = {}
|
||||||
for row in rows:
|
for row in rows:
|
||||||
@@ -727,29 +836,31 @@ class ContactNameHistoryRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def record_name(public_key: str, name: str, timestamp: int) -> None:
|
async def record_name(public_key: str, name: str, timestamp: int) -> None:
|
||||||
"""Record a name observation. Upserts: updates last_seen if name already known."""
|
"""Record a name observation. Upserts: updates last_seen if name already known."""
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
"""
|
||||||
VALUES (?, ?, ?, ?)
|
INSERT INTO contact_name_history (public_key, name, first_seen, last_seen)
|
||||||
ON CONFLICT(public_key, name) DO UPDATE SET
|
VALUES (?, ?, ?, ?)
|
||||||
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
ON CONFLICT(public_key, name) DO UPDATE SET
|
||||||
""",
|
last_seen = MAX(contact_name_history.last_seen, excluded.last_seen)
|
||||||
(public_key.lower(), name, timestamp, timestamp),
|
""",
|
||||||
)
|
(public_key.lower(), name, timestamp, timestamp),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_history(public_key: str) -> list[ContactNameHistory]:
|
async def get_history(public_key: str) -> list[ContactNameHistory]:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT name, first_seen, last_seen
|
"""
|
||||||
FROM contact_name_history
|
SELECT name, first_seen, last_seen
|
||||||
WHERE public_key = ?
|
FROM contact_name_history
|
||||||
ORDER BY last_seen DESC
|
WHERE public_key = ?
|
||||||
""",
|
ORDER BY last_seen DESC
|
||||||
(public_key.lower(),),
|
""",
|
||||||
)
|
(public_key.lower(),),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [
|
return [
|
||||||
ContactNameHistory(
|
ContactNameHistory(
|
||||||
name=row["name"], first_seen=row["first_seen"], last_seen=row["last_seen"]
|
name=row["name"], first_seen=row["first_seen"], last_seen=row["last_seen"]
|
||||||
|
|||||||
+61
-44
@@ -6,6 +6,8 @@ import time
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
import aiosqlite
|
||||||
|
|
||||||
from app.database import db
|
from app.database import db
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -31,26 +33,37 @@ def _row_to_dict(row: Any) -> dict[str, Any]:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_in_conn(conn: aiosqlite.Connection, config_id: str) -> dict[str, Any] | None:
|
||||||
|
"""Fetch a config using an already-acquired connection.
|
||||||
|
|
||||||
|
Used by ``create`` and ``update`` to return the freshly-written row
|
||||||
|
without re-entering the non-reentrant DB lock.
|
||||||
|
"""
|
||||||
|
async with conn.execute("SELECT * FROM fanout_configs WHERE id = ?", (config_id,)) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
return _row_to_dict(row)
|
||||||
|
|
||||||
|
|
||||||
class FanoutConfigRepository:
|
class FanoutConfigRepository:
|
||||||
"""CRUD operations for fanout_configs table."""
|
"""CRUD operations for fanout_configs table."""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all() -> list[dict[str, Any]]:
|
async def get_all() -> list[dict[str, Any]]:
|
||||||
"""Get all fanout configs ordered by sort_order."""
|
"""Get all fanout configs ordered by sort_order."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM fanout_configs ORDER BY sort_order, created_at"
|
async with conn.execute(
|
||||||
)
|
"SELECT * FROM fanout_configs ORDER BY sort_order, created_at"
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [_row_to_dict(row) for row in rows]
|
return [_row_to_dict(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get(config_id: str) -> dict[str, Any] | None:
|
async def get(config_id: str) -> dict[str, Any] | None:
|
||||||
"""Get a single fanout config by ID."""
|
"""Get a single fanout config by ID."""
|
||||||
cursor = await db.conn.execute("SELECT * FROM fanout_configs WHERE id = ?", (config_id,))
|
async with db.readonly() as conn:
|
||||||
row = await cursor.fetchone()
|
return await _get_in_conn(conn, config_id)
|
||||||
if row is None:
|
|
||||||
return None
|
|
||||||
return _row_to_dict(row)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def create(
|
async def create(
|
||||||
@@ -65,39 +78,41 @@ class FanoutConfigRepository:
|
|||||||
new_id = config_id or str(uuid.uuid4())
|
new_id = config_id or str(uuid.uuid4())
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
|
|
||||||
# Get next sort_order
|
async with db.tx() as conn:
|
||||||
cursor = await db.conn.execute(
|
# Determine next sort_order under the same lock as the insert,
|
||||||
"SELECT COALESCE(MAX(sort_order), -1) + 1 FROM fanout_configs"
|
# so two concurrent ``create()`` calls cannot collide.
|
||||||
)
|
async with conn.execute(
|
||||||
row = await cursor.fetchone()
|
"SELECT COALESCE(MAX(sort_order), -1) + 1 FROM fanout_configs"
|
||||||
sort_order = row[0] if row else 0
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
sort_order = row[0] if row else 0
|
||||||
|
|
||||||
await db.conn.execute(
|
async with conn.execute(
|
||||||
"""
|
"""
|
||||||
INSERT INTO fanout_configs (id, type, name, enabled, config, scope, sort_order, created_at)
|
INSERT INTO fanout_configs (id, type, name, enabled, config, scope, sort_order, created_at)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
""",
|
""",
|
||||||
(
|
(
|
||||||
new_id,
|
new_id,
|
||||||
config_type,
|
config_type,
|
||||||
name,
|
name,
|
||||||
1 if enabled else 0,
|
1 if enabled else 0,
|
||||||
json.dumps(config),
|
json.dumps(config),
|
||||||
json.dumps(scope),
|
json.dumps(scope),
|
||||||
sort_order,
|
sort_order,
|
||||||
now,
|
now,
|
||||||
),
|
),
|
||||||
)
|
):
|
||||||
await db.conn.commit()
|
pass
|
||||||
|
|
||||||
result = await FanoutConfigRepository.get(new_id)
|
result = await _get_in_conn(conn, new_id)
|
||||||
assert result is not None
|
assert result is not None
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update(config_id: str, **fields: Any) -> dict[str, Any] | None:
|
async def update(config_id: str, **fields: Any) -> dict[str, Any] | None:
|
||||||
"""Update a fanout config. Only provided fields are updated."""
|
"""Update a fanout config. Only provided fields are updated."""
|
||||||
updates = []
|
updates: list[str] = []
|
||||||
params: list[Any] = []
|
params: list[Any] = []
|
||||||
|
|
||||||
for field in ("name", "enabled", "config", "scope", "sort_order"):
|
for field in ("name", "enabled", "config", "scope", "sort_order"):
|
||||||
@@ -115,23 +130,25 @@ class FanoutConfigRepository:
|
|||||||
|
|
||||||
params.append(config_id)
|
params.append(config_id)
|
||||||
query = f"UPDATE fanout_configs SET {', '.join(updates)} WHERE id = ?"
|
query = f"UPDATE fanout_configs SET {', '.join(updates)} WHERE id = ?"
|
||||||
await db.conn.execute(query, params)
|
async with db.tx() as conn:
|
||||||
await db.conn.commit()
|
async with conn.execute(query, params):
|
||||||
|
pass
|
||||||
return await FanoutConfigRepository.get(config_id)
|
return await _get_in_conn(conn, config_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def delete(config_id: str) -> None:
|
async def delete(config_id: str) -> None:
|
||||||
"""Delete a fanout config."""
|
"""Delete a fanout config."""
|
||||||
await db.conn.execute("DELETE FROM fanout_configs WHERE id = ?", (config_id,))
|
async with db.tx() as conn:
|
||||||
await db.conn.commit()
|
async with conn.execute("DELETE FROM fanout_configs WHERE id = ?", (config_id,)):
|
||||||
|
pass
|
||||||
_configs_cache.pop(config_id, None)
|
_configs_cache.pop(config_id, None)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_enabled() -> list[dict[str, Any]]:
|
async def get_enabled() -> list[dict[str, Any]]:
|
||||||
"""Get all enabled fanout configs."""
|
"""Get all enabled fanout configs."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT * FROM fanout_configs WHERE enabled = 1 ORDER BY sort_order, created_at"
|
async with conn.execute(
|
||||||
)
|
"SELECT * FROM fanout_configs WHERE enabled = 1 ORDER BY sort_order, created_at"
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [_row_to_dict(row) for row in rows]
|
return [_row_to_dict(row) for row in rows]
|
||||||
|
|||||||
+392
-346
@@ -89,32 +89,34 @@ class MessageRepository:
|
|||||||
# Normalize sender_key to lowercase so queries can match without LOWER().
|
# Normalize sender_key to lowercase so queries can match without LOWER().
|
||||||
normalized_sender_key = sender_key.lower() if sender_key else sender_key
|
normalized_sender_key = sender_key.lower() if sender_key else sender_key
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp,
|
"""
|
||||||
received_at, paths, txt_type, signature, outgoing,
|
INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp,
|
||||||
sender_name, sender_key)
|
received_at, paths, txt_type, signature, outgoing,
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
sender_name, sender_key)
|
||||||
""",
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
(
|
""",
|
||||||
msg_type,
|
(
|
||||||
conversation_key,
|
msg_type,
|
||||||
text,
|
conversation_key,
|
||||||
sender_timestamp,
|
text,
|
||||||
received_at,
|
sender_timestamp,
|
||||||
paths_json,
|
received_at,
|
||||||
txt_type,
|
paths_json,
|
||||||
signature,
|
txt_type,
|
||||||
outgoing,
|
signature,
|
||||||
sender_name,
|
outgoing,
|
||||||
normalized_sender_key,
|
sender_name,
|
||||||
),
|
normalized_sender_key,
|
||||||
)
|
),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
|
rowcount = cursor.rowcount
|
||||||
|
lastrowid = cursor.lastrowid
|
||||||
# rowcount is 0 if INSERT was ignored due to UNIQUE constraint violation
|
# rowcount is 0 if INSERT was ignored due to UNIQUE constraint violation
|
||||||
if cursor.rowcount == 0:
|
if rowcount == 0:
|
||||||
return None
|
return None
|
||||||
return cursor.lastrowid
|
return lastrowid
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def add_path(
|
async def add_path(
|
||||||
@@ -142,17 +144,20 @@ class MessageRepository:
|
|||||||
if snr is not None:
|
if snr is not None:
|
||||||
entry["snr"] = snr
|
entry["snr"] = snr
|
||||||
new_entry = json.dumps(entry)
|
new_entry = json.dumps(entry)
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""UPDATE messages SET paths = json_insert(
|
async with conn.execute(
|
||||||
COALESCE(paths, '[]'), '$[#]', json(?)
|
"""UPDATE messages SET paths = json_insert(
|
||||||
) WHERE id = ?""",
|
COALESCE(paths, '[]'), '$[#]', json(?)
|
||||||
(new_entry, message_id),
|
) WHERE id = ?""",
|
||||||
)
|
(new_entry, message_id),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
# Read back the full list for the return value
|
# Read back the full list for the return value, same transaction.
|
||||||
cursor = await db.conn.execute("SELECT paths FROM messages WHERE id = ?", (message_id,))
|
async with conn.execute(
|
||||||
row = await cursor.fetchone()
|
"SELECT paths FROM messages WHERE id = ?", (message_id,)
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if not row or not row["paths"]:
|
if not row or not row["paths"]:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@@ -171,23 +176,24 @@ class MessageRepository:
|
|||||||
only a prefix as conversation_key are updated to use the full key.
|
only a prefix as conversation_key are updated to use the full key.
|
||||||
"""
|
"""
|
||||||
lower_key = full_key.lower()
|
lower_key = full_key.lower()
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""UPDATE messages SET conversation_key = ?,
|
async with conn.execute(
|
||||||
sender_key = CASE
|
"""UPDATE messages SET conversation_key = ?,
|
||||||
WHEN sender_key IS NOT NULL AND length(sender_key) < 64
|
sender_key = CASE
|
||||||
AND ? LIKE sender_key || '%'
|
WHEN sender_key IS NOT NULL AND length(sender_key) < 64
|
||||||
THEN ? ELSE sender_key END
|
AND ? LIKE sender_key || '%'
|
||||||
WHERE type = 'PRIV' AND length(conversation_key) < 64
|
THEN ? ELSE sender_key END
|
||||||
AND ? LIKE conversation_key || '%'
|
WHERE type = 'PRIV' AND length(conversation_key) < 64
|
||||||
AND (
|
AND ? LIKE conversation_key || '%'
|
||||||
SELECT COUNT(*) FROM contacts
|
AND (
|
||||||
WHERE length(public_key) = 64
|
SELECT COUNT(*) FROM contacts
|
||||||
AND public_key LIKE messages.conversation_key || '%'
|
WHERE length(public_key) = 64
|
||||||
) = 1""",
|
AND public_key LIKE messages.conversation_key || '%'
|
||||||
(lower_key, lower_key, lower_key, lower_key),
|
) = 1""",
|
||||||
)
|
(lower_key, lower_key, lower_key, lower_key),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def backfill_channel_sender_key(public_key: str, name: str) -> int:
|
async def backfill_channel_sender_key(public_key: str, name: str) -> int:
|
||||||
@@ -197,21 +203,22 @@ class MessageRepository:
|
|||||||
any channel messages with a matching sender_name but no sender_key
|
any channel messages with a matching sender_name but no sender_key
|
||||||
are updated to associate them with this contact's public key.
|
are updated to associate them with this contact's public key.
|
||||||
"""
|
"""
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"""UPDATE messages SET sender_key = ?
|
async with conn.execute(
|
||||||
WHERE type = 'CHAN' AND sender_name = ? AND sender_key IS NULL
|
"""UPDATE messages SET sender_key = ?
|
||||||
AND (
|
WHERE type = 'CHAN' AND sender_name = ? AND sender_key IS NULL
|
||||||
SELECT COUNT(*) FROM contacts
|
AND (
|
||||||
WHERE name = ?
|
SELECT COUNT(*) FROM contacts
|
||||||
) = 1
|
WHERE name = ?
|
||||||
AND EXISTS (
|
) = 1
|
||||||
SELECT 1 FROM contacts
|
AND EXISTS (
|
||||||
WHERE public_key = ? AND name = ?
|
SELECT 1 FROM contacts
|
||||||
)""",
|
WHERE public_key = ? AND name = ?
|
||||||
(public_key.lower(), name, name, public_key.lower(), name),
|
)""",
|
||||||
)
|
(public_key.lower(), name, name, public_key.lower(), name),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _normalize_conversation_key(conversation_key: str) -> tuple[str, str]:
|
def _normalize_conversation_key(conversation_key: str) -> tuple[str, str]:
|
||||||
@@ -462,8 +469,9 @@ class MessageRepository:
|
|||||||
query += " OFFSET ?"
|
query += " OFFSET ?"
|
||||||
params.append(offset)
|
params.append(offset)
|
||||||
|
|
||||||
cursor = await db.conn.execute(query, params)
|
async with db.readonly() as conn:
|
||||||
rows = await cursor.fetchall()
|
async with conn.execute(query, params) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [MessageRepository._row_to_message(row) for row in rows]
|
return [MessageRepository._row_to_message(row) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -501,51 +509,54 @@ class MessageRepository:
|
|||||||
where_sql = " AND ".join(["1=1", *where_parts])
|
where_sql = " AND ".join(["1=1", *where_parts])
|
||||||
|
|
||||||
# 1. Get the target message (must satisfy filters if provided)
|
# 1. Get the target message (must satisfy filters if provided)
|
||||||
target_cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
f"SELECT {MessageRepository._message_select('messages')} "
|
async with conn.execute(
|
||||||
f"FROM messages WHERE id = ? AND {where_sql}",
|
f"SELECT {MessageRepository._message_select('messages')} "
|
||||||
(message_id, *base_params),
|
f"FROM messages WHERE id = ? AND {where_sql}",
|
||||||
)
|
(message_id, *base_params),
|
||||||
target_row = await target_cursor.fetchone()
|
) as target_cursor:
|
||||||
if not target_row:
|
target_row = await target_cursor.fetchone()
|
||||||
return [], False, False
|
if not target_row:
|
||||||
|
return [], False, False
|
||||||
|
|
||||||
target = MessageRepository._row_to_message(target_row)
|
target = MessageRepository._row_to_message(target_row)
|
||||||
|
|
||||||
# 2. Get context_size+1 messages before target (DESC)
|
# 2. Get context_size+1 messages before target (DESC)
|
||||||
before_query = f"""
|
before_query = f"""
|
||||||
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
||||||
AND (received_at < ? OR (received_at = ? AND id < ?))
|
AND (received_at < ? OR (received_at = ? AND id < ?))
|
||||||
ORDER BY received_at DESC, id DESC LIMIT ?
|
ORDER BY received_at DESC, id DESC LIMIT ?
|
||||||
"""
|
"""
|
||||||
before_params = [
|
before_params = [
|
||||||
*base_params,
|
*base_params,
|
||||||
target.received_at,
|
target.received_at,
|
||||||
target.received_at,
|
target.received_at,
|
||||||
target.id,
|
target.id,
|
||||||
context_size + 1,
|
context_size + 1,
|
||||||
]
|
]
|
||||||
before_cursor = await db.conn.execute(before_query, before_params)
|
async with conn.execute(before_query, before_params) as before_cursor:
|
||||||
before_rows = list(await before_cursor.fetchall())
|
before_rows = list(await before_cursor.fetchall())
|
||||||
|
|
||||||
has_older = len(before_rows) > context_size
|
has_older = len(before_rows) > context_size
|
||||||
before_messages = [MessageRepository._row_to_message(r) for r in before_rows[:context_size]]
|
before_messages = [
|
||||||
|
MessageRepository._row_to_message(r) for r in before_rows[:context_size]
|
||||||
|
]
|
||||||
|
|
||||||
# 3. Get context_size+1 messages after target (ASC)
|
# 3. Get context_size+1 messages after target (ASC)
|
||||||
after_query = f"""
|
after_query = f"""
|
||||||
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
SELECT {MessageRepository._message_select("messages")} FROM messages WHERE {where_sql}
|
||||||
AND (received_at > ? OR (received_at = ? AND id > ?))
|
AND (received_at > ? OR (received_at = ? AND id > ?))
|
||||||
ORDER BY received_at ASC, id ASC LIMIT ?
|
ORDER BY received_at ASC, id ASC LIMIT ?
|
||||||
"""
|
"""
|
||||||
after_params = [
|
after_params = [
|
||||||
*base_params,
|
*base_params,
|
||||||
target.received_at,
|
target.received_at,
|
||||||
target.received_at,
|
target.received_at,
|
||||||
target.id,
|
target.id,
|
||||||
context_size + 1,
|
context_size + 1,
|
||||||
]
|
]
|
||||||
after_cursor = await db.conn.execute(after_query, after_params)
|
async with conn.execute(after_query, after_params) as after_cursor:
|
||||||
after_rows = list(await after_cursor.fetchall())
|
after_rows = list(await after_cursor.fetchall())
|
||||||
|
|
||||||
has_newer = len(after_rows) > context_size
|
has_newer = len(after_rows) > context_size
|
||||||
after_messages = [MessageRepository._row_to_message(r) for r in after_rows[:context_size]]
|
after_messages = [MessageRepository._row_to_message(r) for r in after_rows[:context_size]]
|
||||||
@@ -556,21 +567,29 @@ class MessageRepository:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def increment_ack_count(message_id: int) -> int:
|
async def increment_ack_count(message_id: int) -> int:
|
||||||
"""Increment ack count and return the new value."""
|
"""Increment ack count and return the new value.
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"UPDATE messages SET acked = acked + 1 WHERE id = ? RETURNING acked", (message_id,)
|
NOTE: ``RETURNING`` leaves the prepared statement active until the
|
||||||
)
|
row is fetched, so we MUST consume it inside the ``async with``
|
||||||
row = await cursor.fetchone()
|
block. Without that, the commit at the end of ``db.tx()`` fails
|
||||||
await db.conn.commit()
|
with ``cannot commit transaction - SQL statements in progress``.
|
||||||
|
"""
|
||||||
|
async with db.tx() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"UPDATE messages SET acked = acked + 1 WHERE id = ? RETURNING acked",
|
||||||
|
(message_id,),
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["acked"] if row else 1
|
return row["acked"] if row else 1
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_ack_and_paths(message_id: int) -> tuple[int, list[MessagePath] | None]:
|
async def get_ack_and_paths(message_id: int) -> tuple[int, list[MessagePath] | None]:
|
||||||
"""Get the current ack count and paths for a message."""
|
"""Get the current ack count and paths for a message."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT acked, paths FROM messages WHERE id = ?", (message_id,)
|
async with conn.execute(
|
||||||
)
|
"SELECT acked, paths FROM messages WHERE id = ?", (message_id,)
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if not row:
|
if not row:
|
||||||
return 0, None
|
return 0, None
|
||||||
return row["acked"], MessageRepository._parse_paths(row["paths"])
|
return row["acked"], MessageRepository._parse_paths(row["paths"])
|
||||||
@@ -578,11 +597,12 @@ class MessageRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_id(message_id: int) -> "Message | None":
|
async def get_by_id(message_id: int) -> "Message | None":
|
||||||
"""Look up a message by its ID."""
|
"""Look up a message by its ID."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
f"SELECT {MessageRepository._message_select('messages')} FROM messages WHERE id = ?",
|
async with conn.execute(
|
||||||
(message_id,),
|
f"SELECT {MessageRepository._message_select('messages')} FROM messages WHERE id = ?",
|
||||||
)
|
(message_id,),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if not row:
|
if not row:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -591,11 +611,14 @@ class MessageRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def delete_by_id(message_id: int) -> None:
|
async def delete_by_id(message_id: int) -> None:
|
||||||
"""Delete a message row by ID."""
|
"""Delete a message row by ID."""
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE raw_packets SET message_id = NULL WHERE message_id = ?", (message_id,)
|
async with conn.execute(
|
||||||
)
|
"UPDATE raw_packets SET message_id = NULL WHERE message_id = ?",
|
||||||
await db.conn.execute("DELETE FROM messages WHERE id = ?", (message_id,))
|
(message_id,),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
async with conn.execute("DELETE FROM messages WHERE id = ?", (message_id,)):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_content(
|
async def get_by_content(
|
||||||
@@ -618,8 +641,9 @@ class MessageRepository:
|
|||||||
query += " AND outgoing = ?"
|
query += " AND outgoing = ?"
|
||||||
params.append(1 if outgoing else 0)
|
params.append(1 if outgoing else 0)
|
||||||
query += " ORDER BY id ASC"
|
query += " ORDER BY id ASC"
|
||||||
cursor = await db.conn.execute(query, params)
|
async with db.readonly() as conn:
|
||||||
row = await cursor.fetchone()
|
async with conn.execute(query, params) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if not row:
|
if not row:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -653,76 +677,6 @@ class MessageRepository:
|
|||||||
)
|
)
|
||||||
blocked_sql = f" AND {blocked_clause}" if blocked_clause else ""
|
blocked_sql = f" AND {blocked_clause}" if blocked_clause else ""
|
||||||
|
|
||||||
# Channel unreads
|
|
||||||
cursor = await db.conn.execute(
|
|
||||||
f"""
|
|
||||||
SELECT m.conversation_key,
|
|
||||||
COUNT(*) as unread_count,
|
|
||||||
SUM(CASE
|
|
||||||
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
|
||||||
ELSE 0
|
|
||||||
END) > 0 as has_mention
|
|
||||||
FROM messages m
|
|
||||||
JOIN channels c ON m.conversation_key = c.key
|
|
||||||
WHERE m.type = 'CHAN' AND m.outgoing = 0
|
|
||||||
AND m.received_at > COALESCE(c.last_read_at, 0)
|
|
||||||
{blocked_sql}
|
|
||||||
GROUP BY m.conversation_key
|
|
||||||
""",
|
|
||||||
(mention_token or "", mention_token or "", *blocked_params),
|
|
||||||
)
|
|
||||||
rows = await cursor.fetchall()
|
|
||||||
for row in rows:
|
|
||||||
state_key = f"channel-{row['conversation_key']}"
|
|
||||||
counts[state_key] = row["unread_count"]
|
|
||||||
if mention_token and row["has_mention"]:
|
|
||||||
mention_flags[state_key] = True
|
|
||||||
|
|
||||||
# Contact unreads
|
|
||||||
cursor = await db.conn.execute(
|
|
||||||
f"""
|
|
||||||
SELECT m.conversation_key,
|
|
||||||
COUNT(*) as unread_count,
|
|
||||||
SUM(CASE
|
|
||||||
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
|
||||||
ELSE 0
|
|
||||||
END) > 0 as has_mention
|
|
||||||
FROM messages m
|
|
||||||
LEFT JOIN contacts ct ON m.conversation_key = ct.public_key
|
|
||||||
WHERE m.type = 'PRIV' AND m.outgoing = 0
|
|
||||||
AND m.received_at > COALESCE(ct.last_read_at, 0)
|
|
||||||
{blocked_sql}
|
|
||||||
GROUP BY m.conversation_key
|
|
||||||
""",
|
|
||||||
(mention_token or "", mention_token or "", *blocked_params),
|
|
||||||
)
|
|
||||||
rows = await cursor.fetchall()
|
|
||||||
for row in rows:
|
|
||||||
state_key = f"contact-{row['conversation_key']}"
|
|
||||||
counts[state_key] = row["unread_count"]
|
|
||||||
if mention_token and row["has_mention"]:
|
|
||||||
mention_flags[state_key] = True
|
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT key, last_read_at
|
|
||||||
FROM channels
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
rows = await cursor.fetchall()
|
|
||||||
for row in rows:
|
|
||||||
last_read_ats[f"channel-{row['key']}"] = row["last_read_at"]
|
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT public_key, last_read_at
|
|
||||||
FROM contacts
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
rows = await cursor.fetchall()
|
|
||||||
for row in rows:
|
|
||||||
last_read_ats[f"contact-{row['public_key']}"] = row["last_read_at"]
|
|
||||||
|
|
||||||
# Last message times for all conversations (including read ones),
|
# Last message times for all conversations (including read ones),
|
||||||
# excluding blocked incoming traffic so refresh matches live WS behavior.
|
# excluding blocked incoming traffic so refresh matches live WS behavior.
|
||||||
last_time_clause, last_time_params = MessageRepository._build_blocked_incoming_clause(
|
last_time_clause, last_time_params = MessageRepository._build_blocked_incoming_clause(
|
||||||
@@ -730,20 +684,94 @@ class MessageRepository:
|
|||||||
)
|
)
|
||||||
last_time_where_sql = f"WHERE {last_time_clause}" if last_time_clause else ""
|
last_time_where_sql = f"WHERE {last_time_clause}" if last_time_clause else ""
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
# Single readonly acquisition for all 5 queries — they form one logical
|
||||||
f"""
|
# snapshot, and holding the lock for the batch is cheaper than acquiring
|
||||||
SELECT type, conversation_key, MAX(received_at) as last_message_time
|
# it 5 times.
|
||||||
FROM messages
|
async with db.readonly() as conn:
|
||||||
{last_time_where_sql}
|
# Channel unreads
|
||||||
GROUP BY type, conversation_key
|
async with conn.execute(
|
||||||
""",
|
f"""
|
||||||
last_time_params,
|
SELECT m.conversation_key,
|
||||||
)
|
COUNT(*) as unread_count,
|
||||||
rows = await cursor.fetchall()
|
SUM(CASE
|
||||||
for row in rows:
|
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
||||||
prefix = "channel" if row["type"] == "CHAN" else "contact"
|
ELSE 0
|
||||||
state_key = f"{prefix}-{row['conversation_key']}"
|
END) > 0 as has_mention
|
||||||
last_message_times[state_key] = row["last_message_time"]
|
FROM messages m
|
||||||
|
JOIN channels c ON m.conversation_key = c.key
|
||||||
|
WHERE m.type = 'CHAN' AND m.outgoing = 0
|
||||||
|
AND m.received_at > COALESCE(c.last_read_at, 0)
|
||||||
|
{blocked_sql}
|
||||||
|
GROUP BY m.conversation_key
|
||||||
|
""",
|
||||||
|
(mention_token or "", mention_token or "", *blocked_params),
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
for row in rows:
|
||||||
|
state_key = f"channel-{row['conversation_key']}"
|
||||||
|
counts[state_key] = row["unread_count"]
|
||||||
|
if mention_token and row["has_mention"]:
|
||||||
|
mention_flags[state_key] = True
|
||||||
|
|
||||||
|
# Contact unreads
|
||||||
|
async with conn.execute(
|
||||||
|
f"""
|
||||||
|
SELECT m.conversation_key,
|
||||||
|
COUNT(*) as unread_count,
|
||||||
|
SUM(CASE
|
||||||
|
WHEN ? <> '' AND INSTR(LOWER(m.text), LOWER(?)) > 0 THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END) > 0 as has_mention
|
||||||
|
FROM messages m
|
||||||
|
LEFT JOIN contacts ct ON m.conversation_key = ct.public_key
|
||||||
|
WHERE m.type = 'PRIV' AND m.outgoing = 0
|
||||||
|
AND m.received_at > COALESCE(ct.last_read_at, 0)
|
||||||
|
{blocked_sql}
|
||||||
|
GROUP BY m.conversation_key
|
||||||
|
""",
|
||||||
|
(mention_token or "", mention_token or "", *blocked_params),
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
for row in rows:
|
||||||
|
state_key = f"contact-{row['conversation_key']}"
|
||||||
|
counts[state_key] = row["unread_count"]
|
||||||
|
if mention_token and row["has_mention"]:
|
||||||
|
mention_flags[state_key] = True
|
||||||
|
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT key, last_read_at
|
||||||
|
FROM channels
|
||||||
|
"""
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
for row in rows:
|
||||||
|
last_read_ats[f"channel-{row['key']}"] = row["last_read_at"]
|
||||||
|
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT public_key, last_read_at
|
||||||
|
FROM contacts
|
||||||
|
"""
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
for row in rows:
|
||||||
|
last_read_ats[f"contact-{row['public_key']}"] = row["last_read_at"]
|
||||||
|
|
||||||
|
async with conn.execute(
|
||||||
|
f"""
|
||||||
|
SELECT type, conversation_key, MAX(received_at) as last_message_time
|
||||||
|
FROM messages
|
||||||
|
{last_time_where_sql}
|
||||||
|
GROUP BY type, conversation_key
|
||||||
|
""",
|
||||||
|
last_time_params,
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
for row in rows:
|
||||||
|
prefix = "channel" if row["type"] == "CHAN" else "contact"
|
||||||
|
state_key = f"{prefix}-{row['conversation_key']}"
|
||||||
|
last_message_times[state_key] = row["last_message_time"]
|
||||||
|
|
||||||
# Only include last_read_ats for conversations that actually have messages.
|
# Only include last_read_ats for conversations that actually have messages.
|
||||||
# Without this filter, every contact heard via advertisement (even without
|
# Without this filter, every contact heard via advertisement (even without
|
||||||
@@ -760,41 +788,45 @@ class MessageRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def count_dm_messages(contact_key: str) -> int:
|
async def count_dm_messages(contact_key: str) -> int:
|
||||||
"""Count total DM messages for a contact."""
|
"""Count total DM messages for a contact."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'PRIV' AND conversation_key = ?",
|
async with conn.execute(
|
||||||
(contact_key.lower(),),
|
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'PRIV' AND conversation_key = ?",
|
||||||
)
|
(contact_key.lower(),),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["cnt"] if row else 0
|
return row["cnt"] if row else 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def count_channel_messages_by_sender(sender_key: str) -> int:
|
async def count_channel_messages_by_sender(sender_key: str) -> int:
|
||||||
"""Count channel messages sent by a specific contact."""
|
"""Count channel messages sent by a specific contact."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_key = ?",
|
async with conn.execute(
|
||||||
(sender_key.lower(),),
|
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_key = ?",
|
||||||
)
|
(sender_key.lower(),),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["cnt"] if row else 0
|
return row["cnt"] if row else 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def count_channel_messages_by_sender_name(sender_name: str) -> int:
|
async def count_channel_messages_by_sender_name(sender_name: str) -> int:
|
||||||
"""Count channel messages attributed to a display name."""
|
"""Count channel messages attributed to a display name."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
async with conn.execute(
|
||||||
(sender_name,),
|
"SELECT COUNT(*) as cnt FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
||||||
)
|
(sender_name,),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["cnt"] if row else 0
|
return row["cnt"] if row else 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_first_channel_message_by_sender_name(sender_name: str) -> int | None:
|
async def get_first_channel_message_by_sender_name(sender_name: str) -> int | None:
|
||||||
"""Get the earliest stored channel message timestamp for a display name."""
|
"""Get the earliest stored channel message timestamp for a display name."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT MIN(received_at) AS first_seen FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
async with conn.execute(
|
||||||
(sender_name,),
|
"SELECT MIN(received_at) AS first_seen FROM messages WHERE type = 'CHAN' AND sender_name = ?",
|
||||||
)
|
(sender_name,),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["first_seen"] if row and row["first_seen"] is not None else None
|
return row["first_seen"] if row and row["first_seen"] is not None else None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -813,67 +845,76 @@ class MessageRepository:
|
|||||||
t_48h = now - 172800
|
t_48h = now - 172800
|
||||||
t_7d = now - 604800
|
t_7d = now - 604800
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT COUNT(*) AS all_time,
|
"""
|
||||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_1h,
|
SELECT COUNT(*) AS all_time,
|
||||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_24h,
|
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_1h,
|
||||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_48h,
|
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_24h,
|
||||||
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_7d,
|
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_48h,
|
||||||
MIN(received_at) AS first_message_at,
|
SUM(CASE WHEN received_at >= ? THEN 1 ELSE 0 END) AS last_7d,
|
||||||
COUNT(DISTINCT sender_key) AS unique_sender_count
|
MIN(received_at) AS first_message_at,
|
||||||
FROM messages WHERE type = 'CHAN' AND conversation_key = ?
|
COUNT(DISTINCT sender_key) AS unique_sender_count
|
||||||
""",
|
FROM messages WHERE type = 'CHAN' AND conversation_key = ?
|
||||||
(t_1h, t_24h, t_48h, t_7d, conversation_key),
|
""",
|
||||||
)
|
(t_1h, t_24h, t_48h, t_7d, conversation_key),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
assert row is not None # Aggregate query always returns a row
|
row = await cursor.fetchone()
|
||||||
|
assert row is not None # Aggregate query always returns a row
|
||||||
|
|
||||||
message_counts = {
|
message_counts = {
|
||||||
"last_1h": row["last_1h"] or 0,
|
"last_1h": row["last_1h"] or 0,
|
||||||
"last_24h": row["last_24h"] or 0,
|
"last_24h": row["last_24h"] or 0,
|
||||||
"last_48h": row["last_48h"] or 0,
|
"last_48h": row["last_48h"] or 0,
|
||||||
"last_7d": row["last_7d"] or 0,
|
"last_7d": row["last_7d"] or 0,
|
||||||
"all_time": row["all_time"] or 0,
|
"all_time": row["all_time"] or 0,
|
||||||
}
|
|
||||||
|
|
||||||
cursor2 = await db.conn.execute(
|
|
||||||
"""
|
|
||||||
SELECT COALESCE(sender_name, sender_key, 'Unknown') AS display_name,
|
|
||||||
sender_key, COUNT(*) AS cnt
|
|
||||||
FROM messages
|
|
||||||
WHERE type = 'CHAN' AND conversation_key = ?
|
|
||||||
AND received_at >= ? AND sender_key IS NOT NULL
|
|
||||||
GROUP BY sender_key ORDER BY cnt DESC LIMIT 5
|
|
||||||
""",
|
|
||||||
(conversation_key, t_24h),
|
|
||||||
)
|
|
||||||
top_rows = await cursor2.fetchall()
|
|
||||||
top_senders = [
|
|
||||||
{
|
|
||||||
"sender_name": r["display_name"],
|
|
||||||
"sender_key": r["sender_key"],
|
|
||||||
"message_count": r["cnt"],
|
|
||||||
}
|
}
|
||||||
for r in top_rows
|
|
||||||
]
|
|
||||||
|
|
||||||
# Path hash width distribution for last 24h (in-Python parse of raw packet envelopes)
|
async with conn.execute(
|
||||||
cursor3 = await db.conn.execute(
|
"""
|
||||||
"""
|
SELECT COALESCE(sender_name, sender_key, 'Unknown') AS display_name,
|
||||||
SELECT rp.data FROM raw_packets rp
|
sender_key, COUNT(*) AS cnt
|
||||||
JOIN messages m ON rp.message_id = m.id
|
FROM messages
|
||||||
WHERE m.type = 'CHAN' AND m.conversation_key = ?
|
WHERE type = 'CHAN' AND conversation_key = ?
|
||||||
AND rp.timestamp >= ?
|
AND received_at >= ? AND sender_key IS NOT NULL
|
||||||
""",
|
GROUP BY sender_key ORDER BY cnt DESC LIMIT 5
|
||||||
(conversation_key, t_24h),
|
""",
|
||||||
)
|
(conversation_key, t_24h),
|
||||||
path_hash_width_24h = await bucket_path_hash_widths(cursor3)
|
) as cursor:
|
||||||
|
top_rows = await cursor.fetchall()
|
||||||
|
top_senders = [
|
||||||
|
{
|
||||||
|
"sender_name": r["display_name"],
|
||||||
|
"sender_key": r["sender_key"],
|
||||||
|
"message_count": r["cnt"],
|
||||||
|
}
|
||||||
|
for r in top_rows
|
||||||
|
]
|
||||||
|
|
||||||
|
# Path hash width distribution for last 24h: fetch raw rows under
|
||||||
|
# the lock, then release BEFORE the CPU-bound in-Python envelope
|
||||||
|
# parse. Parsing can iterate thousands of rows and previously held
|
||||||
|
# the DB lock for the whole traversal — blocking every other repo
|
||||||
|
# caller on a Pi. Keep the lock only for the fetch.
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT rp.data FROM raw_packets rp
|
||||||
|
JOIN messages m ON rp.message_id = m.id
|
||||||
|
WHERE m.type = 'CHAN' AND m.conversation_key = ?
|
||||||
|
AND rp.timestamp >= ?
|
||||||
|
""",
|
||||||
|
(conversation_key, t_24h),
|
||||||
|
) as cursor:
|
||||||
|
rows3 = await cursor.fetchall()
|
||||||
|
first_message_at = row["first_message_at"]
|
||||||
|
unique_sender_count = row["unique_sender_count"] or 0
|
||||||
|
|
||||||
|
path_hash_width_24h = bucket_path_hash_widths(rows3)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"message_counts": message_counts,
|
"message_counts": message_counts,
|
||||||
"first_message_at": row["first_message_at"],
|
"first_message_at": first_message_at,
|
||||||
"unique_sender_count": row["unique_sender_count"] or 0,
|
"unique_sender_count": unique_sender_count,
|
||||||
"top_senders_24h": top_senders,
|
"top_senders_24h": top_senders,
|
||||||
"path_hash_width_24h": path_hash_width_24h,
|
"path_hash_width_24h": path_hash_width_24h,
|
||||||
}
|
}
|
||||||
@@ -881,14 +922,15 @@ class MessageRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def count_channels_with_incoming_messages() -> int:
|
async def count_channels_with_incoming_messages() -> int:
|
||||||
"""Count distinct channel conversations with at least one incoming message."""
|
"""Count distinct channel conversations with at least one incoming message."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT COUNT(DISTINCT conversation_key) AS cnt
|
"""
|
||||||
FROM messages
|
SELECT COUNT(DISTINCT conversation_key) AS cnt
|
||||||
WHERE type = 'CHAN' AND outgoing = 0
|
FROM messages
|
||||||
"""
|
WHERE type = 'CHAN' AND outgoing = 0
|
||||||
)
|
"""
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return int(row["cnt"]) if row and row["cnt"] is not None else 0
|
return int(row["cnt"]) if row and row["cnt"] is not None else 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -897,20 +939,21 @@ class MessageRepository:
|
|||||||
|
|
||||||
Returns list of (channel_key, channel_name, message_count) tuples.
|
Returns list of (channel_key, channel_name, message_count) tuples.
|
||||||
"""
|
"""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
"""
|
||||||
COUNT(*) AS cnt
|
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||||
FROM messages m
|
COUNT(*) AS cnt
|
||||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
FROM messages m
|
||||||
WHERE m.type = 'CHAN' AND m.sender_key = ?
|
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||||
GROUP BY m.conversation_key
|
WHERE m.type = 'CHAN' AND m.sender_key = ?
|
||||||
ORDER BY cnt DESC
|
GROUP BY m.conversation_key
|
||||||
LIMIT ?
|
ORDER BY cnt DESC
|
||||||
""",
|
LIMIT ?
|
||||||
(sender_key.lower(), limit),
|
""",
|
||||||
)
|
(sender_key.lower(), limit),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [(row["conversation_key"], row["channel_name"], row["cnt"]) for row in rows]
|
return [(row["conversation_key"], row["channel_name"], row["cnt"]) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -918,34 +961,36 @@ class MessageRepository:
|
|||||||
sender_name: str, limit: int = 5
|
sender_name: str, limit: int = 5
|
||||||
) -> list[tuple[str, str, int]]:
|
) -> list[tuple[str, str, int]]:
|
||||||
"""Get channels where a display name has sent the most messages."""
|
"""Get channels where a display name has sent the most messages."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
"""
|
||||||
COUNT(*) AS cnt
|
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||||
FROM messages m
|
COUNT(*) AS cnt
|
||||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
FROM messages m
|
||||||
WHERE m.type = 'CHAN' AND m.sender_name = ?
|
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||||
GROUP BY m.conversation_key
|
WHERE m.type = 'CHAN' AND m.sender_name = ?
|
||||||
ORDER BY cnt DESC
|
GROUP BY m.conversation_key
|
||||||
LIMIT ?
|
ORDER BY cnt DESC
|
||||||
""",
|
LIMIT ?
|
||||||
(sender_name, limit),
|
""",
|
||||||
)
|
(sender_name, limit),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [(row["conversation_key"], row["channel_name"], row["cnt"]) for row in rows]
|
return [(row["conversation_key"], row["channel_name"], row["cnt"]) for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _get_activity_hour_buckets(where_sql: str, params: list[Any]) -> dict[int, int]:
|
async def _get_activity_hour_buckets(where_sql: str, params: list[Any]) -> dict[int, int]:
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
f"""
|
async with conn.execute(
|
||||||
SELECT received_at / 3600 AS hour_bucket, COUNT(*) AS cnt
|
f"""
|
||||||
FROM messages
|
SELECT received_at / 3600 AS hour_bucket, COUNT(*) AS cnt
|
||||||
WHERE {where_sql}
|
FROM messages
|
||||||
GROUP BY hour_bucket
|
WHERE {where_sql}
|
||||||
""",
|
GROUP BY hour_bucket
|
||||||
params,
|
""",
|
||||||
)
|
params,
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return {int(row["hour_bucket"]): row["cnt"] for row in rows}
|
return {int(row["hour_bucket"]): row["cnt"] for row in rows}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -999,16 +1044,17 @@ class MessageRepository:
|
|||||||
current_day_start = (now // 86400) * 86400
|
current_day_start = (now // 86400) * 86400
|
||||||
start = current_day_start - (weeks - 1) * bucket_seconds
|
start = current_day_start - (weeks - 1) * bucket_seconds
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
f"""
|
async with conn.execute(
|
||||||
SELECT (received_at - ?) / ? AS bucket_idx, COUNT(*) AS cnt
|
f"""
|
||||||
FROM messages
|
SELECT (received_at - ?) / ? AS bucket_idx, COUNT(*) AS cnt
|
||||||
WHERE {where_sql} AND received_at >= ?
|
FROM messages
|
||||||
GROUP BY bucket_idx
|
WHERE {where_sql} AND received_at >= ?
|
||||||
""",
|
GROUP BY bucket_idx
|
||||||
[start, bucket_seconds, *params, start],
|
""",
|
||||||
)
|
[start, bucket_seconds, *params, start],
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
counts = {int(row["bucket_idx"]): row["cnt"] for row in rows}
|
counts = {int(row["bucket_idx"]): row["cnt"] for row in rows}
|
||||||
|
|
||||||
return [
|
return [
|
||||||
|
|||||||
@@ -0,0 +1,162 @@
|
|||||||
|
"""Repository for push_subscriptions table."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from app.database import db
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Auto-delete subscriptions that have failed this many times consecutively
|
||||||
|
# without any successful delivery in between.
|
||||||
|
MAX_CONSECUTIVE_FAILURES = 15
|
||||||
|
|
||||||
|
|
||||||
|
def _row_to_dict(row: Any) -> dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"id": row["id"],
|
||||||
|
"endpoint": row["endpoint"],
|
||||||
|
"p256dh": row["p256dh"],
|
||||||
|
"auth": row["auth"],
|
||||||
|
"label": row["label"] or "",
|
||||||
|
"created_at": row["created_at"] or 0,
|
||||||
|
"last_success_at": row["last_success_at"],
|
||||||
|
"failure_count": row["failure_count"] or 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PushSubscriptionRepository:
|
||||||
|
@staticmethod
|
||||||
|
async def create(
|
||||||
|
endpoint: str,
|
||||||
|
p256dh: str,
|
||||||
|
auth: str,
|
||||||
|
label: str = "",
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Create or upsert a push subscription (keyed by endpoint)."""
|
||||||
|
sub_id = str(uuid.uuid4())
|
||||||
|
now = int(time.time())
|
||||||
|
|
||||||
|
async with db.tx() as conn:
|
||||||
|
await conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO push_subscriptions
|
||||||
|
(id, endpoint, p256dh, auth, label, created_at, failure_count)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, 0)
|
||||||
|
ON CONFLICT(endpoint) DO UPDATE SET
|
||||||
|
p256dh = excluded.p256dh,
|
||||||
|
auth = excluded.auth,
|
||||||
|
label = CASE WHEN excluded.label != '' THEN excluded.label
|
||||||
|
ELSE push_subscriptions.label END,
|
||||||
|
failure_count = 0
|
||||||
|
""",
|
||||||
|
(sub_id, endpoint, p256dh, auth, label, now),
|
||||||
|
)
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT * FROM push_subscriptions WHERE endpoint = ?", (endpoint,)
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
|
||||||
|
return _row_to_dict(row) if row else {"id": sub_id} # type: ignore[arg-type]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get(subscription_id: str) -> dict[str, Any] | None:
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT * FROM push_subscriptions WHERE id = ?", (subscription_id,)
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
return _row_to_dict(row) if row else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_by_endpoint(endpoint: str) -> dict[str, Any] | None:
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT * FROM push_subscriptions WHERE endpoint = ?", (endpoint,)
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
return _row_to_dict(row) if row else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_all() -> list[dict[str, Any]]:
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT * FROM push_subscriptions ORDER BY created_at DESC"
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
return [_row_to_dict(row) for row in rows]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def update(subscription_id: str, **fields: Any) -> dict[str, Any] | None:
|
||||||
|
updates: list[str] = []
|
||||||
|
params: list[Any] = []
|
||||||
|
|
||||||
|
if "label" in fields:
|
||||||
|
updates.append("label = ?")
|
||||||
|
params.append(fields["label"])
|
||||||
|
|
||||||
|
if not updates:
|
||||||
|
return await PushSubscriptionRepository.get(subscription_id)
|
||||||
|
|
||||||
|
params.append(subscription_id)
|
||||||
|
async with db.tx() as conn:
|
||||||
|
await conn.execute(
|
||||||
|
f"UPDATE push_subscriptions SET {', '.join(updates)} WHERE id = ?",
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT * FROM push_subscriptions WHERE id = ?", (subscription_id,)
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
return _row_to_dict(row) if row else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def delete(subscription_id: str) -> bool:
|
||||||
|
async with db.tx() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"DELETE FROM push_subscriptions WHERE id = ?", (subscription_id,)
|
||||||
|
) as cursor:
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def delete_by_endpoint(endpoint: str) -> bool:
|
||||||
|
async with db.tx() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"DELETE FROM push_subscriptions WHERE endpoint = ?", (endpoint,)
|
||||||
|
) as cursor:
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def batch_record_outcomes(
|
||||||
|
success_ids: list[str], failure_ids: list[str], remove_ids: list[str]
|
||||||
|
) -> None:
|
||||||
|
"""Batch-update delivery outcomes in a single transaction."""
|
||||||
|
now = int(time.time())
|
||||||
|
async with db.tx() as conn:
|
||||||
|
if remove_ids:
|
||||||
|
placeholders = ",".join("?" for _ in remove_ids)
|
||||||
|
await conn.execute(
|
||||||
|
f"DELETE FROM push_subscriptions WHERE id IN ({placeholders})",
|
||||||
|
remove_ids,
|
||||||
|
)
|
||||||
|
if success_ids:
|
||||||
|
placeholders = ",".join("?" for _ in success_ids)
|
||||||
|
await conn.execute(
|
||||||
|
f"UPDATE push_subscriptions SET last_success_at = ?, failure_count = 0 "
|
||||||
|
f"WHERE id IN ({placeholders})",
|
||||||
|
[now, *success_ids],
|
||||||
|
)
|
||||||
|
if failure_ids:
|
||||||
|
placeholders = ",".join("?" for _ in failure_ids)
|
||||||
|
await conn.execute(
|
||||||
|
f"UPDATE push_subscriptions SET failure_count = failure_count + 1 "
|
||||||
|
f"WHERE id IN ({placeholders})",
|
||||||
|
failure_ids,
|
||||||
|
)
|
||||||
|
# Evict subscriptions that have exceeded the failure threshold
|
||||||
|
await conn.execute(
|
||||||
|
"DELETE FROM push_subscriptions WHERE failure_count >= ?",
|
||||||
|
(MAX_CONSECUTIVE_FAILURES,),
|
||||||
|
)
|
||||||
+100
-73
@@ -34,81 +34,101 @@ class RawPacketRepository:
|
|||||||
# For malformed packets, hash the full data
|
# For malformed packets, hash the full data
|
||||||
payload_hash = sha256(data).digest()
|
payload_hash = sha256(data).digest()
|
||||||
|
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"INSERT OR IGNORE INTO raw_packets (timestamp, data, payload_hash) VALUES (?, ?, ?)",
|
async with conn.execute(
|
||||||
(ts, data, payload_hash),
|
"INSERT OR IGNORE INTO raw_packets (timestamp, data, payload_hash) VALUES (?, ?, ?)",
|
||||||
)
|
(ts, data, payload_hash),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
|
rowcount = cursor.rowcount
|
||||||
|
lastrowid = cursor.lastrowid
|
||||||
|
|
||||||
if cursor.rowcount > 0:
|
if rowcount > 0:
|
||||||
assert cursor.lastrowid is not None
|
assert lastrowid is not None
|
||||||
return (cursor.lastrowid, True)
|
return (lastrowid, True)
|
||||||
|
|
||||||
# Duplicate payload — look up the existing row.
|
# Duplicate payload — look up the existing row (same transaction).
|
||||||
cursor = await db.conn.execute(
|
async with conn.execute(
|
||||||
"SELECT id FROM raw_packets WHERE payload_hash = ?", (payload_hash,)
|
"SELECT id FROM raw_packets WHERE payload_hash = ?", (payload_hash,)
|
||||||
)
|
) as cursor:
|
||||||
existing = await cursor.fetchone()
|
existing = await cursor.fetchone()
|
||||||
assert existing is not None
|
assert existing is not None
|
||||||
return (existing["id"], False)
|
return (existing["id"], False)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_undecrypted_count() -> int:
|
async def get_undecrypted_count() -> int:
|
||||||
"""Get count of undecrypted packets (those without a linked message)."""
|
"""Get count of undecrypted packets (those without a linked message)."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT COUNT(*) as count FROM raw_packets WHERE message_id IS NULL"
|
async with conn.execute(
|
||||||
)
|
"SELECT COUNT(*) as count FROM raw_packets WHERE message_id IS NULL"
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["count"] if row else 0
|
return row["count"] if row else 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_oldest_undecrypted() -> int | None:
|
async def get_oldest_undecrypted() -> int | None:
|
||||||
"""Get timestamp of oldest undecrypted packet, or None if none exist."""
|
"""Get timestamp of oldest undecrypted packet, or None if none exist."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT MIN(timestamp) as oldest FROM raw_packets WHERE message_id IS NULL"
|
async with conn.execute(
|
||||||
)
|
"SELECT MIN(timestamp) as oldest FROM raw_packets WHERE message_id IS NULL"
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
return row["oldest"] if row and row["oldest"] is not None else None
|
return row["oldest"] if row and row["oldest"] is not None else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _stream_undecrypted_rows(
|
||||||
|
batch_size: int,
|
||||||
|
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||||
|
"""Internal: keyset-paginated scan of every undecrypted raw packet.
|
||||||
|
|
||||||
|
Yields ``(id, data, timestamp)`` for each row across all batches.
|
||||||
|
Lock is acquired per batch only — concurrent writes can interleave
|
||||||
|
at batch boundaries rather than being blocked for the full scan.
|
||||||
|
Each batch opens a fresh cursor and consumes it fully with
|
||||||
|
``fetchall()`` before releasing, so no prepared statement is alive
|
||||||
|
at a yield boundary.
|
||||||
|
|
||||||
|
``last_id`` advances per row, not per yield, so external filters
|
||||||
|
(see ``stream_undecrypted_text_messages``) that drop rows do not
|
||||||
|
cause a re-scan of skipped IDs.
|
||||||
|
"""
|
||||||
|
last_id = -1
|
||||||
|
while True:
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT id, data, timestamp FROM raw_packets "
|
||||||
|
"WHERE message_id IS NULL AND id > ? ORDER BY id ASC LIMIT ?",
|
||||||
|
(last_id, batch_size),
|
||||||
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
if not rows:
|
||||||
|
return
|
||||||
|
for row in rows:
|
||||||
|
last_id = row["id"]
|
||||||
|
yield (row["id"], bytes(row["data"]), row["timestamp"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def stream_all_undecrypted(
|
async def stream_all_undecrypted(
|
||||||
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
||||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||||
"""Yield all undecrypted packets as (id, data, timestamp) in bounded batches."""
|
"""Yield all undecrypted packets as (id, data, timestamp) in bounded batches."""
|
||||||
cursor = await db.conn.execute(
|
async for row in RawPacketRepository._stream_undecrypted_rows(batch_size):
|
||||||
"SELECT id, data, timestamp FROM raw_packets WHERE message_id IS NULL ORDER BY timestamp ASC"
|
yield row
|
||||||
)
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
rows = await cursor.fetchmany(batch_size)
|
|
||||||
if not rows:
|
|
||||||
break
|
|
||||||
for row in rows:
|
|
||||||
yield (row["id"], bytes(row["data"]), row["timestamp"])
|
|
||||||
finally:
|
|
||||||
await cursor.close()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def stream_undecrypted_text_messages(
|
async def stream_undecrypted_text_messages(
|
||||||
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
batch_size: int = UNDECRYPTED_PACKET_BATCH_SIZE,
|
||||||
) -> AsyncIterator[tuple[int, bytes, int]]:
|
) -> AsyncIterator[tuple[int, bytes, int]]:
|
||||||
"""Yield undecrypted TEXT_MESSAGE packets in bounded-size batches."""
|
"""Yield undecrypted TEXT_MESSAGE packets in bounded-size batches.
|
||||||
cursor = await db.conn.execute(
|
|
||||||
"SELECT id, data, timestamp FROM raw_packets WHERE message_id IS NULL ORDER BY timestamp ASC"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
rows = await cursor.fetchmany(batch_size)
|
|
||||||
if not rows:
|
|
||||||
break
|
|
||||||
|
|
||||||
for row in rows:
|
Filters the shared scan to rows whose payload parses as a text
|
||||||
data = bytes(row["data"])
|
message. Non-matching rows still advance the keyset cursor so they
|
||||||
payload_type = get_packet_payload_type(data)
|
aren't re-fetched on subsequent batches.
|
||||||
if payload_type == PayloadType.TEXT_MESSAGE:
|
"""
|
||||||
yield (row["id"], data, row["timestamp"])
|
async for packet_id, data, timestamp in RawPacketRepository._stream_undecrypted_rows(
|
||||||
finally:
|
batch_size
|
||||||
await cursor.close()
|
):
|
||||||
|
if get_packet_payload_type(data) == PayloadType.TEXT_MESSAGE:
|
||||||
|
yield (packet_id, data, timestamp)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def count_undecrypted_text_messages(
|
async def count_undecrypted_text_messages(
|
||||||
@@ -125,20 +145,22 @@ class RawPacketRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def mark_decrypted(packet_id: int, message_id: int) -> None:
|
async def mark_decrypted(packet_id: int, message_id: int) -> None:
|
||||||
"""Link a raw packet to its decrypted message."""
|
"""Link a raw packet to its decrypted message."""
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"UPDATE raw_packets SET message_id = ? WHERE id = ?",
|
async with conn.execute(
|
||||||
(message_id, packet_id),
|
"UPDATE raw_packets SET message_id = ? WHERE id = ?",
|
||||||
)
|
(message_id, packet_id),
|
||||||
await db.conn.commit()
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_linked_message_id(packet_id: int) -> int | None:
|
async def get_linked_message_id(packet_id: int) -> int | None:
|
||||||
"""Return the linked message ID for a raw packet, if any."""
|
"""Return the linked message ID for a raw packet, if any."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT message_id FROM raw_packets WHERE id = ?",
|
async with conn.execute(
|
||||||
(packet_id,),
|
"SELECT message_id FROM raw_packets WHERE id = ?",
|
||||||
)
|
(packet_id,),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if not row:
|
if not row:
|
||||||
return None
|
return None
|
||||||
return row["message_id"]
|
return row["message_id"]
|
||||||
@@ -146,11 +168,12 @@ class RawPacketRepository:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_id(packet_id: int) -> tuple[int, bytes, int, int | None] | None:
|
async def get_by_id(packet_id: int) -> tuple[int, bytes, int, int | None] | None:
|
||||||
"""Return a raw packet row as (id, data, timestamp, message_id)."""
|
"""Return a raw packet row as (id, data, timestamp, message_id)."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT id, data, timestamp, message_id FROM raw_packets WHERE id = ?",
|
async with conn.execute(
|
||||||
(packet_id,),
|
"SELECT id, data, timestamp, message_id FROM raw_packets WHERE id = ?",
|
||||||
)
|
(packet_id,),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
if not row:
|
if not row:
|
||||||
return None
|
return None
|
||||||
return (row["id"], bytes(row["data"]), row["timestamp"], row["message_id"])
|
return (row["id"], bytes(row["data"]), row["timestamp"], row["message_id"])
|
||||||
@@ -159,16 +182,20 @@ class RawPacketRepository:
|
|||||||
async def prune_old_undecrypted(max_age_days: int) -> int:
|
async def prune_old_undecrypted(max_age_days: int) -> int:
|
||||||
"""Delete undecrypted packets older than max_age_days. Returns count deleted."""
|
"""Delete undecrypted packets older than max_age_days. Returns count deleted."""
|
||||||
cutoff = int(time.time()) - (max_age_days * 86400)
|
cutoff = int(time.time()) - (max_age_days * 86400)
|
||||||
cursor = await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"DELETE FROM raw_packets WHERE message_id IS NULL AND timestamp < ?",
|
async with conn.execute(
|
||||||
(cutoff,),
|
"DELETE FROM raw_packets WHERE message_id IS NULL AND timestamp < ?",
|
||||||
)
|
(cutoff,),
|
||||||
await db.conn.commit()
|
) as cursor:
|
||||||
return cursor.rowcount
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def purge_linked_to_messages() -> int:
|
async def purge_linked_to_messages() -> int:
|
||||||
"""Delete raw packets that are already linked to a stored message."""
|
"""Delete raw packets that are already linked to a stored message."""
|
||||||
cursor = await db.conn.execute("DELETE FROM raw_packets WHERE message_id IS NOT NULL")
|
async with db.tx() as conn:
|
||||||
await db.conn.commit()
|
async with conn.execute(
|
||||||
return cursor.rowcount
|
"DELETE FROM raw_packets WHERE message_id IS NOT NULL"
|
||||||
|
) as cursor:
|
||||||
|
rowcount = cursor.rowcount
|
||||||
|
return rowcount
|
||||||
|
|||||||
@@ -21,51 +21,54 @@ class RepeaterTelemetryRepository:
|
|||||||
data: dict,
|
data: dict,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Insert a telemetry history row and prune stale entries."""
|
"""Insert a telemetry history row and prune stale entries."""
|
||||||
await db.conn.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO repeater_telemetry_history
|
|
||||||
(public_key, timestamp, data)
|
|
||||||
VALUES (?, ?, ?)
|
|
||||||
""",
|
|
||||||
(public_key, timestamp, json.dumps(data)),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Prune entries older than 30 days
|
|
||||||
cutoff = int(time.time()) - _MAX_AGE_SECONDS
|
cutoff = int(time.time()) - _MAX_AGE_SECONDS
|
||||||
await db.conn.execute(
|
async with db.tx() as conn:
|
||||||
"DELETE FROM repeater_telemetry_history WHERE public_key = ? AND timestamp < ?",
|
async with conn.execute(
|
||||||
(public_key, cutoff),
|
"""
|
||||||
)
|
INSERT INTO repeater_telemetry_history
|
||||||
|
(public_key, timestamp, data)
|
||||||
|
VALUES (?, ?, ?)
|
||||||
|
""",
|
||||||
|
(public_key, timestamp, json.dumps(data)),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
# Cap at _MAX_ENTRIES_PER_REPEATER (keep newest)
|
# Prune entries older than 30 days
|
||||||
await db.conn.execute(
|
async with conn.execute(
|
||||||
"""
|
"DELETE FROM repeater_telemetry_history WHERE public_key = ? AND timestamp < ?",
|
||||||
DELETE FROM repeater_telemetry_history
|
(public_key, cutoff),
|
||||||
WHERE public_key = ? AND id NOT IN (
|
):
|
||||||
SELECT id FROM repeater_telemetry_history
|
pass
|
||||||
WHERE public_key = ?
|
|
||||||
ORDER BY timestamp DESC
|
|
||||||
LIMIT ?
|
|
||||||
)
|
|
||||||
""",
|
|
||||||
(public_key, public_key, _MAX_ENTRIES_PER_REPEATER),
|
|
||||||
)
|
|
||||||
|
|
||||||
await db.conn.commit()
|
# Cap at _MAX_ENTRIES_PER_REPEATER (keep newest)
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
DELETE FROM repeater_telemetry_history
|
||||||
|
WHERE public_key = ? AND id NOT IN (
|
||||||
|
SELECT id FROM repeater_telemetry_history
|
||||||
|
WHERE public_key = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT ?
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
(public_key, public_key, _MAX_ENTRIES_PER_REPEATER),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_history(public_key: str, since_timestamp: int) -> list[dict]:
|
async def get_history(public_key: str, since_timestamp: int) -> list[dict]:
|
||||||
"""Return telemetry rows for a repeater since a given timestamp, ordered ASC."""
|
"""Return telemetry rows for a repeater since a given timestamp, ordered ASC."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT timestamp, data
|
"""
|
||||||
FROM repeater_telemetry_history
|
SELECT timestamp, data
|
||||||
WHERE public_key = ? AND timestamp >= ?
|
FROM repeater_telemetry_history
|
||||||
ORDER BY timestamp ASC
|
WHERE public_key = ? AND timestamp >= ?
|
||||||
""",
|
ORDER BY timestamp ASC
|
||||||
(public_key, since_timestamp),
|
""",
|
||||||
)
|
(public_key, since_timestamp),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"timestamp": row["timestamp"],
|
"timestamp": row["timestamp"],
|
||||||
@@ -73,3 +76,25 @@ class RepeaterTelemetryRepository:
|
|||||||
}
|
}
|
||||||
for row in rows
|
for row in rows
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_latest(public_key: str) -> dict | None:
|
||||||
|
"""Return the most recent telemetry row for a repeater, or None."""
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT timestamp, data
|
||||||
|
FROM repeater_telemetry_history
|
||||||
|
WHERE public_key = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
""",
|
||||||
|
(public_key,),
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
return {
|
||||||
|
"timestamp": row["timestamp"],
|
||||||
|
"data": json.loads(row["data"]),
|
||||||
|
}
|
||||||
|
|||||||
+320
-138
@@ -3,9 +3,12 @@ import logging
|
|||||||
import time
|
import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
import aiosqlite
|
||||||
|
|
||||||
from app.database import db
|
from app.database import db
|
||||||
from app.models import AppSettings
|
from app.models import AppSettings
|
||||||
from app.path_utils import bucket_path_hash_widths
|
from app.path_utils import bucket_path_hash_widths
|
||||||
|
from app.telemetry_interval import DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -13,29 +16,37 @@ SECONDS_1H = 3600
|
|||||||
SECONDS_24H = 86400
|
SECONDS_24H = 86400
|
||||||
SECONDS_72H = 259200
|
SECONDS_72H = 259200
|
||||||
SECONDS_7D = 604800
|
SECONDS_7D = 604800
|
||||||
RAW_PACKET_STATS_BATCH_SIZE = 500
|
|
||||||
|
|
||||||
|
|
||||||
class AppSettingsRepository:
|
class AppSettingsRepository:
|
||||||
"""Repository for app_settings table (single-row pattern)."""
|
"""Repository for app_settings table (single-row pattern).
|
||||||
|
|
||||||
|
Public methods acquire the DB lock exactly once. ``toggle_*`` helpers that
|
||||||
|
need a read-modify-write do so inside a single ``db.tx()`` — the internal
|
||||||
|
``_get_in_conn`` / ``_apply_updates`` helpers run under the caller's
|
||||||
|
already-held lock and must NEVER call ``db.tx()`` or ``db.readonly()``.
|
||||||
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get() -> AppSettings:
|
async def _get_in_conn(conn: aiosqlite.Connection) -> AppSettings:
|
||||||
"""Get the current app settings.
|
"""Load settings using an already-acquired connection.
|
||||||
|
|
||||||
Always returns settings - creates default row if needed (migration handles initial row).
|
Used by the public ``get()`` and by multi-step operations
|
||||||
|
(``toggle_blocked_key``, ``toggle_blocked_name``) to avoid re-entering
|
||||||
|
the non-reentrant DB lock.
|
||||||
"""
|
"""
|
||||||
cursor = await db.conn.execute(
|
async with conn.execute(
|
||||||
"""
|
"""
|
||||||
SELECT max_radio_contacts, auto_decrypt_dm_on_advert,
|
SELECT max_radio_contacts, auto_decrypt_dm_on_advert,
|
||||||
last_message_times,
|
last_message_times,
|
||||||
advert_interval, last_advert_time, flood_scope,
|
advert_interval, last_advert_time, flood_scope,
|
||||||
blocked_keys, blocked_names, discovery_blocked_types,
|
blocked_keys, blocked_names, discovery_blocked_types,
|
||||||
tracked_telemetry_repeaters, auto_resend_channel
|
tracked_telemetry_repeaters, auto_resend_channel,
|
||||||
|
telemetry_interval_hours
|
||||||
FROM app_settings WHERE id = 1
|
FROM app_settings WHERE id = 1
|
||||||
"""
|
"""
|
||||||
)
|
) as cursor:
|
||||||
row = await cursor.fetchone()
|
row = await cursor.fetchone()
|
||||||
|
|
||||||
if not row:
|
if not row:
|
||||||
# Should not happen after migration, but handle gracefully
|
# Should not happen after migration, but handle gracefully
|
||||||
@@ -92,6 +103,16 @@ class AppSettingsRepository:
|
|||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
auto_resend_channel = False
|
auto_resend_channel = False
|
||||||
|
|
||||||
|
# Parse telemetry_interval_hours (migration adds the column with
|
||||||
|
# default=8, but guard against older rows / partial migrations).
|
||||||
|
try:
|
||||||
|
raw_interval = row["telemetry_interval_hours"]
|
||||||
|
telemetry_interval_hours = (
|
||||||
|
int(raw_interval) if raw_interval is not None else DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
)
|
||||||
|
except (KeyError, TypeError, ValueError):
|
||||||
|
telemetry_interval_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
|
||||||
return AppSettings(
|
return AppSettings(
|
||||||
max_radio_contacts=row["max_radio_contacts"],
|
max_radio_contacts=row["max_radio_contacts"],
|
||||||
auto_decrypt_dm_on_advert=bool(row["auto_decrypt_dm_on_advert"]),
|
auto_decrypt_dm_on_advert=bool(row["auto_decrypt_dm_on_advert"]),
|
||||||
@@ -104,10 +125,13 @@ class AppSettingsRepository:
|
|||||||
discovery_blocked_types=discovery_blocked_types,
|
discovery_blocked_types=discovery_blocked_types,
|
||||||
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
||||||
auto_resend_channel=auto_resend_channel,
|
auto_resend_channel=auto_resend_channel,
|
||||||
|
telemetry_interval_hours=telemetry_interval_hours,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update(
|
async def _apply_updates(
|
||||||
|
conn: aiosqlite.Connection,
|
||||||
|
*,
|
||||||
max_radio_contacts: int | None = None,
|
max_radio_contacts: int | None = None,
|
||||||
auto_decrypt_dm_on_advert: bool | None = None,
|
auto_decrypt_dm_on_advert: bool | None = None,
|
||||||
last_message_times: dict[str, int] | None = None,
|
last_message_times: dict[str, int] | None = None,
|
||||||
@@ -119,9 +143,14 @@ class AppSettingsRepository:
|
|||||||
discovery_blocked_types: list[int] | None = None,
|
discovery_blocked_types: list[int] | None = None,
|
||||||
tracked_telemetry_repeaters: list[str] | None = None,
|
tracked_telemetry_repeaters: list[str] | None = None,
|
||||||
auto_resend_channel: bool | None = None,
|
auto_resend_channel: bool | None = None,
|
||||||
) -> AppSettings:
|
telemetry_interval_hours: int | None = None,
|
||||||
"""Update app settings. Only provided fields are updated."""
|
) -> None:
|
||||||
updates = []
|
"""Apply field updates using an already-acquired connection.
|
||||||
|
|
||||||
|
Emits a single UPDATE statement inside the caller's transaction. Does
|
||||||
|
NOT commit — the caller's ``db.tx()`` handles that.
|
||||||
|
"""
|
||||||
|
updates: list[str] = []
|
||||||
params: list[Any] = []
|
params: list[Any] = []
|
||||||
|
|
||||||
if max_radio_contacts is not None:
|
if max_radio_contacts is not None:
|
||||||
@@ -168,49 +197,186 @@ class AppSettingsRepository:
|
|||||||
updates.append("auto_resend_channel = ?")
|
updates.append("auto_resend_channel = ?")
|
||||||
params.append(1 if auto_resend_channel else 0)
|
params.append(1 if auto_resend_channel else 0)
|
||||||
|
|
||||||
|
if telemetry_interval_hours is not None:
|
||||||
|
updates.append("telemetry_interval_hours = ?")
|
||||||
|
params.append(telemetry_interval_hours)
|
||||||
|
|
||||||
if updates:
|
if updates:
|
||||||
query = f"UPDATE app_settings SET {', '.join(updates)} WHERE id = 1"
|
query = f"UPDATE app_settings SET {', '.join(updates)} WHERE id = 1"
|
||||||
await db.conn.execute(query, params)
|
async with conn.execute(query, params):
|
||||||
await db.conn.commit()
|
pass
|
||||||
|
|
||||||
return await AppSettingsRepository.get()
|
@staticmethod
|
||||||
|
async def get() -> AppSettings:
|
||||||
|
"""Get the current app settings.
|
||||||
|
|
||||||
|
Always returns settings - creates default row if needed (migration handles initial row).
|
||||||
|
"""
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
return await AppSettingsRepository._get_in_conn(conn)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def update(
|
||||||
|
max_radio_contacts: int | None = None,
|
||||||
|
auto_decrypt_dm_on_advert: bool | None = None,
|
||||||
|
last_message_times: dict[str, int] | None = None,
|
||||||
|
advert_interval: int | None = None,
|
||||||
|
last_advert_time: int | None = None,
|
||||||
|
flood_scope: str | None = None,
|
||||||
|
blocked_keys: list[str] | None = None,
|
||||||
|
blocked_names: list[str] | None = None,
|
||||||
|
discovery_blocked_types: list[int] | None = None,
|
||||||
|
tracked_telemetry_repeaters: list[str] | None = None,
|
||||||
|
auto_resend_channel: bool | None = None,
|
||||||
|
telemetry_interval_hours: int | None = None,
|
||||||
|
) -> AppSettings:
|
||||||
|
"""Update app settings. Only provided fields are updated."""
|
||||||
|
async with db.tx() as conn:
|
||||||
|
await AppSettingsRepository._apply_updates(
|
||||||
|
conn,
|
||||||
|
max_radio_contacts=max_radio_contacts,
|
||||||
|
auto_decrypt_dm_on_advert=auto_decrypt_dm_on_advert,
|
||||||
|
last_message_times=last_message_times,
|
||||||
|
advert_interval=advert_interval,
|
||||||
|
last_advert_time=last_advert_time,
|
||||||
|
flood_scope=flood_scope,
|
||||||
|
blocked_keys=blocked_keys,
|
||||||
|
blocked_names=blocked_names,
|
||||||
|
discovery_blocked_types=discovery_blocked_types,
|
||||||
|
tracked_telemetry_repeaters=tracked_telemetry_repeaters,
|
||||||
|
auto_resend_channel=auto_resend_channel,
|
||||||
|
telemetry_interval_hours=telemetry_interval_hours,
|
||||||
|
)
|
||||||
|
return await AppSettingsRepository._get_in_conn(conn)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def toggle_blocked_key(key: str) -> AppSettings:
|
async def toggle_blocked_key(key: str) -> AppSettings:
|
||||||
"""Toggle a public key in the blocked list. Keys are normalized to lowercase."""
|
"""Toggle a public key in the blocked list. Keys are normalized to lowercase.
|
||||||
|
|
||||||
|
Read-modify-write is atomic under a single ``db.tx()`` lock — two
|
||||||
|
concurrent toggles for the same key cannot produce an inconsistent
|
||||||
|
intermediate state.
|
||||||
|
"""
|
||||||
normalized = key.lower()
|
normalized = key.lower()
|
||||||
settings = await AppSettingsRepository.get()
|
async with db.tx() as conn:
|
||||||
if normalized in settings.blocked_keys:
|
settings = await AppSettingsRepository._get_in_conn(conn)
|
||||||
new_keys = [k for k in settings.blocked_keys if k != normalized]
|
if normalized in settings.blocked_keys:
|
||||||
else:
|
new_keys = [k for k in settings.blocked_keys if k != normalized]
|
||||||
new_keys = settings.blocked_keys + [normalized]
|
else:
|
||||||
return await AppSettingsRepository.update(blocked_keys=new_keys)
|
new_keys = settings.blocked_keys + [normalized]
|
||||||
|
await AppSettingsRepository._apply_updates(conn, blocked_keys=new_keys)
|
||||||
|
return await AppSettingsRepository._get_in_conn(conn)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def toggle_blocked_name(name: str) -> AppSettings:
|
async def toggle_blocked_name(name: str) -> AppSettings:
|
||||||
"""Toggle a display name in the blocked list."""
|
"""Toggle a display name in the blocked list.
|
||||||
settings = await AppSettingsRepository.get()
|
|
||||||
if name in settings.blocked_names:
|
Same atomicity guarantee as ``toggle_blocked_key``.
|
||||||
new_names = [n for n in settings.blocked_names if n != name]
|
"""
|
||||||
else:
|
async with db.tx() as conn:
|
||||||
new_names = settings.blocked_names + [name]
|
settings = await AppSettingsRepository._get_in_conn(conn)
|
||||||
return await AppSettingsRepository.update(blocked_names=new_names)
|
if name in settings.blocked_names:
|
||||||
|
new_names = [n for n in settings.blocked_names if n != name]
|
||||||
|
else:
|
||||||
|
new_names = settings.blocked_names + [name]
|
||||||
|
await AppSettingsRepository._apply_updates(conn, blocked_names=new_names)
|
||||||
|
return await AppSettingsRepository._get_in_conn(conn)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_vapid_keys() -> tuple[str, str]:
|
||||||
|
"""Return (private_key_pem, public_key_b64url) from app_settings.
|
||||||
|
|
||||||
|
These are internal-only columns not exposed via the AppSettings model.
|
||||||
|
"""
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT vapid_private_key, vapid_public_key FROM app_settings WHERE id = 1"
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
if row and row["vapid_private_key"] and row["vapid_public_key"]:
|
||||||
|
return row["vapid_private_key"], row["vapid_public_key"]
|
||||||
|
return "", ""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def set_vapid_keys(private_key: str, public_key: str) -> None:
|
||||||
|
"""Persist auto-generated VAPID key pair to app_settings."""
|
||||||
|
async with db.tx() as conn:
|
||||||
|
await conn.execute(
|
||||||
|
"UPDATE app_settings SET vapid_private_key = ?, vapid_public_key = ? WHERE id = 1",
|
||||||
|
(private_key, public_key),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_push_conversations() -> list[str]:
|
||||||
|
"""Return the global list of push-enabled conversation state keys.
|
||||||
|
|
||||||
|
Internal-only column, not exposed via the AppSettings model.
|
||||||
|
"""
|
||||||
|
async with db.readonly() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT push_conversations FROM app_settings WHERE id = 1"
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
if row and row["push_conversations"]:
|
||||||
|
try:
|
||||||
|
return json.loads(row["push_conversations"])
|
||||||
|
except (json.JSONDecodeError, TypeError):
|
||||||
|
return []
|
||||||
|
return []
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def set_push_conversations(conversations: list[str]) -> list[str]:
|
||||||
|
"""Replace the global push-enabled conversation list."""
|
||||||
|
async with db.tx() as conn:
|
||||||
|
await conn.execute(
|
||||||
|
"UPDATE app_settings SET push_conversations = ? WHERE id = 1",
|
||||||
|
(json.dumps(conversations),),
|
||||||
|
)
|
||||||
|
return conversations
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def toggle_push_conversation(key: str) -> list[str]:
|
||||||
|
"""Add or remove a conversation state key from the global push list.
|
||||||
|
|
||||||
|
Atomic read-modify-write under a single ``db.tx()`` lock.
|
||||||
|
"""
|
||||||
|
async with db.tx() as conn:
|
||||||
|
async with conn.execute(
|
||||||
|
"SELECT push_conversations FROM app_settings WHERE id = 1"
|
||||||
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
|
current: list[str] = []
|
||||||
|
if row and row["push_conversations"]:
|
||||||
|
try:
|
||||||
|
current = json.loads(row["push_conversations"])
|
||||||
|
except (json.JSONDecodeError, TypeError):
|
||||||
|
current = []
|
||||||
|
if key in current:
|
||||||
|
current = [k for k in current if k != key]
|
||||||
|
else:
|
||||||
|
current.append(key)
|
||||||
|
await conn.execute(
|
||||||
|
"UPDATE app_settings SET push_conversations = ? WHERE id = 1",
|
||||||
|
(json.dumps(current),),
|
||||||
|
)
|
||||||
|
return current
|
||||||
|
|
||||||
|
|
||||||
class StatisticsRepository:
|
class StatisticsRepository:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_database_message_totals() -> dict[str, int]:
|
async def get_database_message_totals() -> dict[str, int]:
|
||||||
"""Return message totals needed by lightweight debug surfaces."""
|
"""Return message totals needed by lightweight debug surfaces."""
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT
|
"""
|
||||||
SUM(CASE WHEN type = 'PRIV' THEN 1 ELSE 0 END) AS total_dms,
|
SELECT
|
||||||
SUM(CASE WHEN type = 'CHAN' THEN 1 ELSE 0 END) AS total_channel_messages,
|
SUM(CASE WHEN type = 'PRIV' THEN 1 ELSE 0 END) AS total_dms,
|
||||||
SUM(CASE WHEN outgoing = 1 THEN 1 ELSE 0 END) AS total_outgoing
|
SUM(CASE WHEN type = 'CHAN' THEN 1 ELSE 0 END) AS total_channel_messages,
|
||||||
FROM messages
|
SUM(CASE WHEN outgoing = 1 THEN 1 ELSE 0 END) AS total_outgoing
|
||||||
"""
|
FROM messages
|
||||||
)
|
"""
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
assert row is not None
|
assert row is not None
|
||||||
return {
|
return {
|
||||||
"total_dms": row["total_dms"] or 0,
|
"total_dms": row["total_dms"] or 0,
|
||||||
@@ -223,18 +389,19 @@ class StatisticsRepository:
|
|||||||
"""Get time-windowed counts for contacts/repeaters heard."""
|
"""Get time-windowed counts for contacts/repeaters heard."""
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
op = "!=" if exclude else "="
|
op = "!=" if exclude else "="
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
f"""
|
async with conn.execute(
|
||||||
SELECT
|
f"""
|
||||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_hour,
|
SELECT
|
||||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_hour,
|
||||||
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_week
|
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
||||||
FROM contacts
|
SUM(CASE WHEN last_seen >= ? THEN 1 ELSE 0 END) AS last_week
|
||||||
WHERE type {op} ? AND last_seen IS NOT NULL
|
FROM contacts
|
||||||
""",
|
WHERE type {op} ? AND last_seen IS NOT NULL
|
||||||
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D, contact_type),
|
""",
|
||||||
)
|
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D, contact_type),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
assert row is not None # Aggregate query always returns a row
|
assert row is not None # Aggregate query always returns a row
|
||||||
return {
|
return {
|
||||||
"last_hour": row["last_hour"] or 0,
|
"last_hour": row["last_hour"] or 0,
|
||||||
@@ -250,24 +417,25 @@ class StatisticsRepository:
|
|||||||
the old UPPER(...) join and aggregate per known channel directly.
|
the old UPPER(...) join and aggregate per known channel directly.
|
||||||
"""
|
"""
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
WITH known AS (
|
"""
|
||||||
SELECT conversation_key, MAX(received_at) AS last_received_at
|
WITH known AS (
|
||||||
FROM messages
|
SELECT conversation_key, MAX(received_at) AS last_received_at
|
||||||
WHERE type = 'CHAN'
|
FROM messages
|
||||||
AND conversation_key IN (SELECT key FROM channels)
|
WHERE type = 'CHAN'
|
||||||
GROUP BY conversation_key
|
AND conversation_key IN (SELECT key FROM channels)
|
||||||
)
|
GROUP BY conversation_key
|
||||||
SELECT
|
)
|
||||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_hour,
|
SELECT
|
||||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_hour,
|
||||||
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_week
|
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_24_hours,
|
||||||
FROM known
|
SUM(CASE WHEN last_received_at >= ? THEN 1 ELSE 0 END) AS last_week
|
||||||
""",
|
FROM known
|
||||||
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D),
|
""",
|
||||||
)
|
(now - SECONDS_1H, now - SECONDS_24H, now - SECONDS_7D),
|
||||||
row = await cursor.fetchone()
|
) as cursor:
|
||||||
|
row = await cursor.fetchone()
|
||||||
assert row is not None
|
assert row is not None
|
||||||
return {
|
return {
|
||||||
"last_hour": row["last_hour"] or 0,
|
"last_hour": row["last_hour"] or 0,
|
||||||
@@ -281,91 +449,105 @@ class StatisticsRepository:
|
|||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
cutoff = now - SECONDS_72H
|
cutoff = now - SECONDS_72H
|
||||||
# Bucket timestamps to the start of each hour
|
# Bucket timestamps to the start of each hour
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT (timestamp / 3600) * 3600 AS hour_ts, COUNT(*) AS count
|
"""
|
||||||
FROM raw_packets
|
SELECT (timestamp / 3600) * 3600 AS hour_ts, COUNT(*) AS count
|
||||||
WHERE timestamp >= ?
|
FROM raw_packets
|
||||||
GROUP BY hour_ts
|
WHERE timestamp >= ?
|
||||||
ORDER BY hour_ts
|
GROUP BY hour_ts
|
||||||
""",
|
ORDER BY hour_ts
|
||||||
(cutoff,),
|
""",
|
||||||
)
|
(cutoff,),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
return [{"timestamp": row["hour_ts"], "count": row["count"]} for row in rows]
|
return [{"timestamp": row["hour_ts"], "count": row["count"]} for row in rows]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _path_hash_width_24h() -> dict[str, int | float]:
|
async def _path_hash_width_24h() -> dict[str, int | float]:
|
||||||
"""Count parsed raw packets from the last 24h by hop hash width."""
|
"""Count parsed raw packets from the last 24h by hop hash width."""
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
cursor = await db.conn.execute(
|
async with db.readonly() as conn:
|
||||||
"SELECT data FROM raw_packets WHERE timestamp >= ?",
|
async with conn.execute(
|
||||||
(now - SECONDS_24H,),
|
"SELECT data FROM raw_packets WHERE timestamp >= ?",
|
||||||
)
|
(now - SECONDS_24H,),
|
||||||
return await bucket_path_hash_widths(cursor, batch_size=RAW_PACKET_STATS_BATCH_SIZE)
|
) as cursor:
|
||||||
|
rows = await cursor.fetchall()
|
||||||
|
return bucket_path_hash_widths(rows)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all() -> dict:
|
async def get_all() -> dict:
|
||||||
"""Aggregate all statistics from existing tables."""
|
"""Aggregate all statistics from existing tables.
|
||||||
|
|
||||||
|
Each helper acquires its own lock; there's no requirement that the
|
||||||
|
whole snapshot be atomic. If we ever wanted a consistent snapshot
|
||||||
|
we'd batch all queries into a single ``db.readonly()`` and use
|
||||||
|
``_in_conn`` helpers, but statistics are intentionally approximate.
|
||||||
|
"""
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
|
|
||||||
# Top 5 busiest channels in last 24h
|
async with db.readonly() as conn:
|
||||||
cursor = await db.conn.execute(
|
# Top 5 busiest channels in last 24h
|
||||||
"""
|
async with conn.execute(
|
||||||
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
"""
|
||||||
COUNT(*) AS message_count
|
SELECT m.conversation_key, COALESCE(c.name, m.conversation_key) AS channel_name,
|
||||||
FROM messages m
|
COUNT(*) AS message_count
|
||||||
LEFT JOIN channels c ON m.conversation_key = c.key
|
FROM messages m
|
||||||
WHERE m.type = 'CHAN' AND m.received_at >= ?
|
LEFT JOIN channels c ON m.conversation_key = c.key
|
||||||
GROUP BY m.conversation_key
|
WHERE m.type = 'CHAN' AND m.received_at >= ?
|
||||||
ORDER BY COUNT(*) DESC
|
GROUP BY m.conversation_key
|
||||||
LIMIT 5
|
ORDER BY COUNT(*) DESC
|
||||||
""",
|
LIMIT 5
|
||||||
(now - SECONDS_24H,),
|
""",
|
||||||
)
|
(now - SECONDS_24H,),
|
||||||
rows = await cursor.fetchall()
|
) as cursor:
|
||||||
busiest_channels_24h = [
|
rows = await cursor.fetchall()
|
||||||
{
|
busiest_channels_24h = [
|
||||||
"channel_key": row["conversation_key"],
|
{
|
||||||
"channel_name": row["channel_name"],
|
"channel_key": row["conversation_key"],
|
||||||
"message_count": row["message_count"],
|
"channel_name": row["channel_name"],
|
||||||
}
|
"message_count": row["message_count"],
|
||||||
for row in rows
|
}
|
||||||
]
|
for row in rows
|
||||||
|
]
|
||||||
|
|
||||||
# Entity counts
|
# Entity counts
|
||||||
cursor = await db.conn.execute("SELECT COUNT(*) AS cnt FROM contacts WHERE type != 2")
|
async with conn.execute(
|
||||||
row = await cursor.fetchone()
|
"SELECT COUNT(*) AS cnt FROM contacts WHERE type != 2"
|
||||||
assert row is not None
|
) as cursor:
|
||||||
contact_count: int = row["cnt"]
|
row = await cursor.fetchone()
|
||||||
|
assert row is not None
|
||||||
|
contact_count: int = row["cnt"]
|
||||||
|
|
||||||
cursor = await db.conn.execute("SELECT COUNT(*) AS cnt FROM contacts WHERE type = 2")
|
async with conn.execute(
|
||||||
row = await cursor.fetchone()
|
"SELECT COUNT(*) AS cnt FROM contacts WHERE type = 2"
|
||||||
assert row is not None
|
) as cursor:
|
||||||
repeater_count: int = row["cnt"]
|
row = await cursor.fetchone()
|
||||||
|
assert row is not None
|
||||||
|
repeater_count: int = row["cnt"]
|
||||||
|
|
||||||
cursor = await db.conn.execute("SELECT COUNT(*) AS cnt FROM channels")
|
async with conn.execute("SELECT COUNT(*) AS cnt FROM channels") as cursor:
|
||||||
row = await cursor.fetchone()
|
row = await cursor.fetchone()
|
||||||
assert row is not None
|
assert row is not None
|
||||||
channel_count: int = row["cnt"]
|
channel_count: int = row["cnt"]
|
||||||
|
|
||||||
# Packet split
|
# Packet split
|
||||||
cursor = await db.conn.execute(
|
async with conn.execute(
|
||||||
"""
|
"""
|
||||||
SELECT COUNT(*) AS total,
|
SELECT COUNT(*) AS total,
|
||||||
SUM(CASE WHEN message_id IS NOT NULL THEN 1 ELSE 0 END) AS decrypted
|
SUM(CASE WHEN message_id IS NOT NULL THEN 1 ELSE 0 END) AS decrypted
|
||||||
FROM raw_packets
|
FROM raw_packets
|
||||||
"""
|
"""
|
||||||
)
|
) as cursor:
|
||||||
pkt_row = await cursor.fetchone()
|
pkt_row = await cursor.fetchone()
|
||||||
assert pkt_row is not None
|
assert pkt_row is not None
|
||||||
total_packets = pkt_row["total"] or 0
|
total_packets = pkt_row["total"] or 0
|
||||||
decrypted_packets = pkt_row["decrypted"] or 0
|
decrypted_packets = pkt_row["decrypted"] or 0
|
||||||
undecrypted_packets = total_packets - decrypted_packets
|
undecrypted_packets = total_packets - decrypted_packets
|
||||||
|
|
||||||
|
# These each acquire their own lock. The snapshot isn't atomic across
|
||||||
|
# them — fine for stats, which are approximate by nature.
|
||||||
message_totals = await StatisticsRepository.get_database_message_totals()
|
message_totals = await StatisticsRepository.get_database_message_totals()
|
||||||
|
|
||||||
# Activity windows
|
|
||||||
contacts_heard = await StatisticsRepository._activity_counts(contact_type=2, exclude=True)
|
contacts_heard = await StatisticsRepository._activity_counts(contact_type=2, exclude=True)
|
||||||
repeaters_heard = await StatisticsRepository._activity_counts(contact_type=2)
|
repeaters_heard = await StatisticsRepository._activity_counts(contact_type=2)
|
||||||
known_channels_active = await StatisticsRepository._known_channels_active()
|
known_channels_active = await StatisticsRepository._known_channels_active()
|
||||||
|
|||||||
+26
-2
@@ -16,7 +16,16 @@ from app.repository.fanout import FanoutConfigRepository
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter(prefix="/fanout", tags=["fanout"])
|
router = APIRouter(prefix="/fanout", tags=["fanout"])
|
||||||
|
|
||||||
_VALID_TYPES = {"mqtt_private", "mqtt_community", "bot", "webhook", "apprise", "sqs", "map_upload"}
|
_VALID_TYPES = {
|
||||||
|
"mqtt_private",
|
||||||
|
"mqtt_community",
|
||||||
|
"mqtt_ha",
|
||||||
|
"bot",
|
||||||
|
"webhook",
|
||||||
|
"apprise",
|
||||||
|
"sqs",
|
||||||
|
"map_upload",
|
||||||
|
}
|
||||||
|
|
||||||
_IATA_RE = re.compile(r"^[A-Z]{3}$")
|
_IATA_RE = re.compile(r"^[A-Z]{3}$")
|
||||||
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
|
_DEFAULT_COMMUNITY_MQTT_TOPIC_TEMPLATE = "meshcore/{IATA}/{PUBLIC_KEY}/packets"
|
||||||
@@ -96,6 +105,8 @@ def _validate_and_normalize_config(config_type: str, config: dict) -> dict:
|
|||||||
_validate_sqs_config(normalized)
|
_validate_sqs_config(normalized)
|
||||||
elif config_type == "map_upload":
|
elif config_type == "map_upload":
|
||||||
_validate_map_upload_config(normalized)
|
_validate_map_upload_config(normalized)
|
||||||
|
elif config_type == "mqtt_ha":
|
||||||
|
_validate_mqtt_ha_config(normalized)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
@@ -318,6 +329,19 @@ def _validate_map_upload_config(config: dict) -> None:
|
|||||||
config["geofence_radius_km"] = radius
|
config["geofence_radius_km"] = radius
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_mqtt_ha_config(config: dict) -> None:
|
||||||
|
"""Validate mqtt_ha config blob."""
|
||||||
|
if not config.get("broker_host"):
|
||||||
|
raise HTTPException(status_code=400, detail="broker_host is required for mqtt_ha")
|
||||||
|
port = config.get("broker_port", 1883)
|
||||||
|
if not isinstance(port, int) or port < 1 or port > 65535:
|
||||||
|
raise HTTPException(status_code=400, detail="broker_port must be between 1 and 65535")
|
||||||
|
for field in ("tracked_contacts", "tracked_repeaters"):
|
||||||
|
value = config.get(field)
|
||||||
|
if value is not None and not isinstance(value, list):
|
||||||
|
raise HTTPException(status_code=400, detail=f"{field} must be a list of public keys")
|
||||||
|
|
||||||
|
|
||||||
def _enforce_scope(config_type: str, scope: dict) -> dict:
|
def _enforce_scope(config_type: str, scope: dict) -> dict:
|
||||||
"""Enforce type-specific scope constraints. Returns normalized scope."""
|
"""Enforce type-specific scope constraints. Returns normalized scope."""
|
||||||
if config_type == "mqtt_community":
|
if config_type == "mqtt_community":
|
||||||
@@ -326,7 +350,7 @@ def _enforce_scope(config_type: str, scope: dict) -> dict:
|
|||||||
return {"messages": "none", "raw_packets": "all"}
|
return {"messages": "none", "raw_packets": "all"}
|
||||||
if config_type == "bot":
|
if config_type == "bot":
|
||||||
return {"messages": "all", "raw_packets": "none"}
|
return {"messages": "all", "raw_packets": "none"}
|
||||||
if config_type in ("webhook", "apprise"):
|
if config_type in ("webhook", "apprise", "mqtt_ha"):
|
||||||
messages = scope.get("messages", "all")
|
messages = scope.get("messages", "all")
|
||||||
if messages not in ("all", "none") and not isinstance(messages, dict):
|
if messages not in ("all", "none") and not isinstance(messages, dict):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|||||||
@@ -0,0 +1,164 @@
|
|||||||
|
"""Web Push subscription management endpoints."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from pywebpush import WebPushException
|
||||||
|
|
||||||
|
from app.push.send import send_push
|
||||||
|
from app.push.vapid import get_vapid_private_key, get_vapid_public_key
|
||||||
|
from app.repository.push_subscriptions import PushSubscriptionRepository
|
||||||
|
from app.repository.settings import AppSettingsRepository
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/push", tags=["push"])
|
||||||
|
|
||||||
|
|
||||||
|
# ── Request/response models ─────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
class VapidPublicKeyResponse(BaseModel):
|
||||||
|
public_key: str
|
||||||
|
|
||||||
|
|
||||||
|
class PushSubscribeRequest(BaseModel):
|
||||||
|
endpoint: str = Field(min_length=1)
|
||||||
|
p256dh: str = Field(min_length=1)
|
||||||
|
auth: str = Field(min_length=1)
|
||||||
|
label: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class PushSubscriptionUpdate(BaseModel):
|
||||||
|
label: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class PushConversationToggle(BaseModel):
|
||||||
|
key: str = Field(min_length=1)
|
||||||
|
|
||||||
|
|
||||||
|
# ─��� Endpoints ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/vapid-public-key", response_model=VapidPublicKeyResponse)
|
||||||
|
async def vapid_public_key() -> VapidPublicKeyResponse:
|
||||||
|
"""Return the VAPID public key for browser PushManager.subscribe()."""
|
||||||
|
key = get_vapid_public_key()
|
||||||
|
if not key:
|
||||||
|
raise HTTPException(status_code=503, detail="VAPID keys not initialized")
|
||||||
|
return VapidPublicKeyResponse(public_key=key)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/subscribe")
|
||||||
|
async def subscribe(body: PushSubscribeRequest) -> dict:
|
||||||
|
"""Register or update a push subscription (device). Upserts by endpoint."""
|
||||||
|
sub = await PushSubscriptionRepository.create(
|
||||||
|
endpoint=body.endpoint,
|
||||||
|
p256dh=body.p256dh,
|
||||||
|
auth=body.auth,
|
||||||
|
label=body.label,
|
||||||
|
)
|
||||||
|
return sub
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/subscriptions")
|
||||||
|
async def list_subscriptions() -> list[dict]:
|
||||||
|
"""List all push subscriptions (devices)."""
|
||||||
|
return await PushSubscriptionRepository.get_all()
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/subscriptions/{subscription_id}")
|
||||||
|
async def update_subscription(subscription_id: str, body: PushSubscriptionUpdate) -> dict:
|
||||||
|
"""Update a subscription's label."""
|
||||||
|
existing = await PushSubscriptionRepository.get(subscription_id)
|
||||||
|
if not existing:
|
||||||
|
raise HTTPException(status_code=404, detail="Subscription not found")
|
||||||
|
|
||||||
|
updates = {}
|
||||||
|
if body.label is not None:
|
||||||
|
updates["label"] = body.label
|
||||||
|
|
||||||
|
result = await PushSubscriptionRepository.update(subscription_id, **updates)
|
||||||
|
return result or existing
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/subscriptions/{subscription_id}")
|
||||||
|
async def unsubscribe(subscription_id: str) -> dict:
|
||||||
|
"""Delete a push subscription (device)."""
|
||||||
|
deleted = await PushSubscriptionRepository.delete(subscription_id)
|
||||||
|
if not deleted:
|
||||||
|
raise HTTPException(status_code=404, detail="Subscription not found")
|
||||||
|
return {"deleted": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/subscriptions/{subscription_id}/test")
|
||||||
|
async def test_push(subscription_id: str) -> dict:
|
||||||
|
"""Send a test notification to a subscription."""
|
||||||
|
sub = await PushSubscriptionRepository.get(subscription_id)
|
||||||
|
if not sub:
|
||||||
|
raise HTTPException(status_code=404, detail="Subscription not found")
|
||||||
|
|
||||||
|
vapid_key = get_vapid_private_key()
|
||||||
|
if not vapid_key:
|
||||||
|
raise HTTPException(status_code=503, detail="VAPID keys not initialized")
|
||||||
|
|
||||||
|
payload = json.dumps(
|
||||||
|
{
|
||||||
|
"title": "RemoteTerm Test",
|
||||||
|
"body": "Push notifications are working!",
|
||||||
|
"tag": "meshcore-test",
|
||||||
|
"url_hash": "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with asyncio.timeout(15):
|
||||||
|
await send_push(
|
||||||
|
subscription_info={
|
||||||
|
"endpoint": sub["endpoint"],
|
||||||
|
"keys": {"p256dh": sub["p256dh"], "auth": sub["auth"]},
|
||||||
|
},
|
||||||
|
payload=payload,
|
||||||
|
vapid_private_key=vapid_key,
|
||||||
|
vapid_claims={"sub": "mailto:noreply@meshcore.local"},
|
||||||
|
)
|
||||||
|
return {"status": "sent"}
|
||||||
|
except TimeoutError:
|
||||||
|
raise HTTPException(status_code=504, detail="Push delivery timed out") from None
|
||||||
|
except WebPushException as e:
|
||||||
|
status_code = getattr(getattr(e, "response", None), "status_code", 0)
|
||||||
|
if status_code in (403, 404, 410):
|
||||||
|
logger.info(
|
||||||
|
"Test push: subscription stale (HTTP %d), removing %s",
|
||||||
|
status_code,
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
await PushSubscriptionRepository.delete(subscription_id)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=410,
|
||||||
|
detail="Subscription is stale (VAPID key mismatch or expired). "
|
||||||
|
"Re-enable push from a conversation header.",
|
||||||
|
) from None
|
||||||
|
logger.warning("Test push failed: %s", e)
|
||||||
|
raise HTTPException(status_code=502, detail=f"Push delivery failed: {e}") from None
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Test push failed: %s", e)
|
||||||
|
raise HTTPException(status_code=502, detail=f"Push delivery failed: {e}") from None
|
||||||
|
|
||||||
|
|
||||||
|
# ── Global push conversation management ──────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/conversations")
|
||||||
|
async def get_push_conversations() -> list[str]:
|
||||||
|
"""Return the global list of push-enabled conversation state keys."""
|
||||||
|
return await AppSettingsRepository.get_push_conversations()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/conversations/toggle")
|
||||||
|
async def toggle_push_conversation(body: PushConversationToggle) -> list[str]:
|
||||||
|
"""Add or remove a conversation from the global push list."""
|
||||||
|
return await AppSettingsRepository.toggle_push_conversation(body.key)
|
||||||
@@ -94,6 +94,7 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
|||||||
contact = await _resolve_contact_or_404(public_key)
|
contact = await _resolve_contact_or_404(public_key)
|
||||||
_require_repeater(contact)
|
_require_repeater(contact)
|
||||||
|
|
||||||
|
lpp_raw = None
|
||||||
async with radio_manager.radio_operation(
|
async with radio_manager.radio_operation(
|
||||||
"repeater_status", pause_polling=True, suspend_auto_fetch=True
|
"repeater_status", pause_polling=True, suspend_auto_fetch=True
|
||||||
) as mc:
|
) as mc:
|
||||||
@@ -102,6 +103,15 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
|||||||
|
|
||||||
status = await mc.commands.req_status_sync(contact.public_key, timeout=10, min_timeout=5)
|
status = await mc.commands.req_status_sync(contact.public_key, timeout=10, min_timeout=5)
|
||||||
|
|
||||||
|
# Best-effort LPP sensor fetch while we still hold the lock
|
||||||
|
if status is not None:
|
||||||
|
try:
|
||||||
|
lpp_raw = await mc.commands.req_telemetry_sync(
|
||||||
|
contact.public_key, timeout=10, min_timeout=5
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug("LPP sensor fetch failed for %s (non-fatal): %s", public_key[:12], e)
|
||||||
|
|
||||||
if status is None:
|
if status is None:
|
||||||
raise HTTPException(status_code=504, detail="No status response from repeater")
|
raise HTTPException(status_code=504, detail="No status response from repeater")
|
||||||
|
|
||||||
@@ -128,6 +138,24 @@ async def repeater_status(public_key: str) -> RepeaterStatusResponse:
|
|||||||
# Record to telemetry history as a JSON blob (best-effort)
|
# Record to telemetry history as a JSON blob (best-effort)
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
status_dict = response.model_dump(exclude={"telemetry_history"})
|
status_dict = response.model_dump(exclude={"telemetry_history"})
|
||||||
|
|
||||||
|
# Attach scalar LPP sensors to the stored snapshot (same logic as auto-collect)
|
||||||
|
if lpp_raw:
|
||||||
|
lpp_sensors = []
|
||||||
|
for entry in lpp_raw:
|
||||||
|
value = entry.get("value", 0)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
continue
|
||||||
|
lpp_sensors.append(
|
||||||
|
{
|
||||||
|
"channel": entry.get("channel", 0),
|
||||||
|
"type_name": str(entry.get("type", "unknown")),
|
||||||
|
"value": value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if lpp_sensors:
|
||||||
|
status_dict["lpp_sensors"] = lpp_sensors
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await RepeaterTelemetryRepository.record(
|
await RepeaterTelemetryRepository.record(
|
||||||
public_key=contact.public_key,
|
public_key=contact.public_key,
|
||||||
|
|||||||
@@ -8,6 +8,13 @@ from pydantic import BaseModel, Field
|
|||||||
from app.models import CONTACT_TYPE_REPEATER, AppSettings
|
from app.models import CONTACT_TYPE_REPEATER, AppSettings
|
||||||
from app.region_scope import normalize_region_scope
|
from app.region_scope import normalize_region_scope
|
||||||
from app.repository import AppSettingsRepository, ChannelRepository, ContactRepository
|
from app.repository import AppSettingsRepository, ChannelRepository, ContactRepository
|
||||||
|
from app.telemetry_interval import (
|
||||||
|
DEFAULT_TELEMETRY_INTERVAL_HOURS,
|
||||||
|
TELEMETRY_INTERVAL_OPTIONS_HOURS,
|
||||||
|
clamp_telemetry_interval,
|
||||||
|
legal_interval_options,
|
||||||
|
next_run_timestamp_utc,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter(prefix="/settings", tags=["settings"])
|
router = APIRouter(prefix="/settings", tags=["settings"])
|
||||||
@@ -57,6 +64,15 @@ class AppSettingsUpdate(BaseModel):
|
|||||||
default=None,
|
default=None,
|
||||||
description="Auto-resend channel messages once if no echo heard within 2 seconds",
|
description="Auto-resend channel messages once if no echo heard within 2 seconds",
|
||||||
)
|
)
|
||||||
|
telemetry_interval_hours: int | None = Field(
|
||||||
|
default=None,
|
||||||
|
description=(
|
||||||
|
"Preferred tracked-repeater telemetry interval in hours. "
|
||||||
|
f"Must be one of {list(TELEMETRY_INTERVAL_OPTIONS_HOURS)}. "
|
||||||
|
"Effective interval is clamped up to the shortest legal value "
|
||||||
|
"based on the current tracked-repeater count."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class BlockKeyRequest(BaseModel):
|
class BlockKeyRequest(BaseModel):
|
||||||
@@ -82,6 +98,29 @@ class TrackedTelemetryRequest(BaseModel):
|
|||||||
public_key: str = Field(description="Public key of the repeater to toggle tracking")
|
public_key: str = Field(description="Public key of the repeater to toggle tracking")
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetrySchedule(BaseModel):
|
||||||
|
"""Surface of telemetry scheduling derivations for the UI.
|
||||||
|
|
||||||
|
``preferred_hours`` is the stored user choice. ``effective_hours`` is the
|
||||||
|
value the scheduler actually uses (preferred, clamped up to the shortest
|
||||||
|
legal interval given the current tracked-repeater count). ``options``
|
||||||
|
lists the subset of the menu that is legal at the current count; the UI
|
||||||
|
should hide anything not in this list. ``next_run_at`` is the Unix
|
||||||
|
timestamp (seconds, UTC) of the next scheduled cycle, or ``None`` when
|
||||||
|
no repeaters are tracked (nothing to schedule).
|
||||||
|
"""
|
||||||
|
|
||||||
|
preferred_hours: int = Field(description="User's saved telemetry interval preference")
|
||||||
|
effective_hours: int = Field(description="Scheduler's clamped interval")
|
||||||
|
options: list[int] = Field(description="Legal interval choices at the current count")
|
||||||
|
tracked_count: int = Field(description="Number of repeaters currently tracked")
|
||||||
|
max_tracked: int = Field(description="Maximum number of repeaters that can be tracked")
|
||||||
|
next_run_at: int | None = Field(
|
||||||
|
default=None,
|
||||||
|
description="Unix timestamp (UTC seconds) of the next scheduled cycle",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TrackedTelemetryResponse(BaseModel):
|
class TrackedTelemetryResponse(BaseModel):
|
||||||
tracked_telemetry_repeaters: list[str] = Field(
|
tracked_telemetry_repeaters: list[str] = Field(
|
||||||
description="Current list of tracked repeater public keys"
|
description="Current list of tracked repeater public keys"
|
||||||
@@ -89,6 +128,24 @@ class TrackedTelemetryResponse(BaseModel):
|
|||||||
names: dict[str, str] = Field(
|
names: dict[str, str] = Field(
|
||||||
description="Map of public key to display name for tracked repeaters"
|
description="Map of public key to display name for tracked repeaters"
|
||||||
)
|
)
|
||||||
|
schedule: TelemetrySchedule = Field(description="Current scheduling state")
|
||||||
|
|
||||||
|
|
||||||
|
def _build_schedule(tracked_count: int, preferred_hours: int | None) -> TelemetrySchedule:
|
||||||
|
pref = (
|
||||||
|
preferred_hours
|
||||||
|
if preferred_hours in TELEMETRY_INTERVAL_OPTIONS_HOURS
|
||||||
|
else DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
)
|
||||||
|
effective = clamp_telemetry_interval(pref, tracked_count)
|
||||||
|
return TelemetrySchedule(
|
||||||
|
preferred_hours=pref,
|
||||||
|
effective_hours=effective,
|
||||||
|
options=legal_interval_options(tracked_count),
|
||||||
|
tracked_count=tracked_count,
|
||||||
|
max_tracked=MAX_TRACKED_TELEMETRY_REPEATERS,
|
||||||
|
next_run_at=next_run_timestamp_utc(effective) if tracked_count > 0 else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("", response_model=AppSettings)
|
@router.get("", response_model=AppSettings)
|
||||||
@@ -136,6 +193,20 @@ async def update_settings(update: AppSettingsUpdate) -> AppSettings:
|
|||||||
if update.auto_resend_channel is not None:
|
if update.auto_resend_channel is not None:
|
||||||
kwargs["auto_resend_channel"] = update.auto_resend_channel
|
kwargs["auto_resend_channel"] = update.auto_resend_channel
|
||||||
|
|
||||||
|
# Telemetry interval preference. Invalid values fall back to default
|
||||||
|
# rather than 400-ing so a stale client can't brick settings saves.
|
||||||
|
if update.telemetry_interval_hours is not None:
|
||||||
|
raw_interval = update.telemetry_interval_hours
|
||||||
|
if raw_interval not in TELEMETRY_INTERVAL_OPTIONS_HOURS:
|
||||||
|
logger.warning(
|
||||||
|
"telemetry_interval_hours=%r is not in the menu; defaulting to %d",
|
||||||
|
raw_interval,
|
||||||
|
DEFAULT_TELEMETRY_INTERVAL_HOURS,
|
||||||
|
)
|
||||||
|
raw_interval = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
logger.info("Updating telemetry_interval_hours to %d", raw_interval)
|
||||||
|
kwargs["telemetry_interval_hours"] = raw_interval
|
||||||
|
|
||||||
# Flood scope
|
# Flood scope
|
||||||
flood_scope_changed = False
|
flood_scope_changed = False
|
||||||
if update.flood_scope is not None:
|
if update.flood_scope is not None:
|
||||||
@@ -229,6 +300,7 @@ async def toggle_tracked_telemetry(request: TrackedTelemetryRequest) -> TrackedT
|
|||||||
return TrackedTelemetryResponse(
|
return TrackedTelemetryResponse(
|
||||||
tracked_telemetry_repeaters=new_list,
|
tracked_telemetry_repeaters=new_list,
|
||||||
names=await _resolve_names(new_list),
|
names=await _resolve_names(new_list),
|
||||||
|
schedule=_build_schedule(len(new_list), settings.telemetry_interval_hours),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validate it's a repeater
|
# Validate it's a repeater
|
||||||
@@ -255,4 +327,20 @@ async def toggle_tracked_telemetry(request: TrackedTelemetryRequest) -> TrackedT
|
|||||||
return TrackedTelemetryResponse(
|
return TrackedTelemetryResponse(
|
||||||
tracked_telemetry_repeaters=new_list,
|
tracked_telemetry_repeaters=new_list,
|
||||||
names=await _resolve_names(new_list),
|
names=await _resolve_names(new_list),
|
||||||
|
schedule=_build_schedule(len(new_list), settings.telemetry_interval_hours),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tracked-telemetry/schedule", response_model=TelemetrySchedule)
|
||||||
|
async def get_telemetry_schedule() -> TelemetrySchedule:
|
||||||
|
"""Return the current telemetry scheduling derivation.
|
||||||
|
|
||||||
|
The UI uses this to render the interval dropdown (legal options),
|
||||||
|
surface saved-vs-effective when they differ, and show the next-run-at
|
||||||
|
timestamp so users know when the next cycle will fire.
|
||||||
|
"""
|
||||||
|
app_settings = await AppSettingsRepository.get()
|
||||||
|
return _build_schedule(
|
||||||
|
len(app_settings.tracked_telemetry_repeaters),
|
||||||
|
app_settings.telemetry_interval_hours,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -252,6 +252,11 @@ async def _store_direct_message(
|
|||||||
|
|
||||||
if update_last_contacted_key:
|
if update_last_contacted_key:
|
||||||
await contact_repository.update_last_contacted(update_last_contacted_key, received_at)
|
await contact_repository.update_last_contacted(update_last_contacted_key, received_at)
|
||||||
|
# Incoming DMs are direct RF evidence that this contact transmitted;
|
||||||
|
# outgoing DMs are our own send and must not bump the contact's
|
||||||
|
# last_seen.
|
||||||
|
if not outgoing:
|
||||||
|
await contact_repository.touch_last_seen(update_last_contacted_key, received_at)
|
||||||
|
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|||||||
@@ -513,14 +513,15 @@ async def _retry_direct_message_until_acked(
|
|||||||
|
|
||||||
ack_code = _extract_expected_ack_code(result)
|
ack_code = _extract_expected_ack_code(result)
|
||||||
if not ack_code:
|
if not ack_code:
|
||||||
logger.warning(
|
logger.debug(
|
||||||
"Background DM retry attempt %d/%d for %s returned no expected_ack; "
|
"Background DM retry attempt %d/%d for %s returned no expected_ack; "
|
||||||
"stopping retries to avoid duplicate sends",
|
"continuing with previous timeout",
|
||||||
attempt + 1,
|
attempt + 1,
|
||||||
DM_SEND_MAX_ATTEMPTS,
|
DM_SEND_MAX_ATTEMPTS,
|
||||||
contact.public_key[:12],
|
contact.public_key[:12],
|
||||||
)
|
)
|
||||||
return
|
attempt += 1
|
||||||
|
continue
|
||||||
|
|
||||||
next_wait_timeout_ms = _get_direct_message_retry_timeout_ms(result)
|
next_wait_timeout_ms = _get_direct_message_retry_timeout_ms(result)
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,88 @@
|
|||||||
|
"""Shared math for the tracked-repeater telemetry scheduler.
|
||||||
|
|
||||||
|
The app enforces a ceiling of 24 repeater status checks per 24 hours across
|
||||||
|
all tracked repeaters. With N repeaters tracked, the shortest legal interval
|
||||||
|
is ``24 // floor(24 / N)`` hours. Longer intervals (``12`` or ``24``) are
|
||||||
|
always legal at any N and are offered as user choices on top of the derived
|
||||||
|
shortest-legal value.
|
||||||
|
|
||||||
|
The user picks an interval via settings. The scheduler uses
|
||||||
|
``clamp_telemetry_interval`` to push that pick up to the shortest legal
|
||||||
|
interval if the user has added repeaters that invalidated their choice.
|
||||||
|
The stored preference is *not* mutated on clamp — users get their pick back
|
||||||
|
if they later drop repeaters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
|
# Daily check budget: total number of repeater status checks we allow
|
||||||
|
# across all tracked repeaters per 24-hour window.
|
||||||
|
DAILY_CHECK_CEILING = 24
|
||||||
|
|
||||||
|
# Menu of interval values shown to users. The derivation-based options
|
||||||
|
# (1..8) are filtered per current repeater count via
|
||||||
|
# ``legal_interval_options``; 12 and 24 are always legal.
|
||||||
|
TELEMETRY_INTERVAL_OPTIONS_HOURS: tuple[int, ...] = (1, 2, 3, 4, 6, 8, 12, 24)
|
||||||
|
|
||||||
|
DEFAULT_TELEMETRY_INTERVAL_HOURS = 8
|
||||||
|
|
||||||
|
|
||||||
|
def shortest_legal_interval_hours(n_tracked: int) -> int:
|
||||||
|
"""Return the shortest interval (hours) that keeps under the daily ceiling.
|
||||||
|
|
||||||
|
With ``N`` repeaters, each full cycle costs ``N`` checks. We're capped at
|
||||||
|
``DAILY_CHECK_CEILING`` checks/day, so the maximum cycles/day is
|
||||||
|
``floor(24 / N)`` and the resulting interval is ``24 // cycles_per_day``.
|
||||||
|
For ``N == 0`` we return the default so the math still terminates, though
|
||||||
|
the scheduler skips empty-tracked cycles regardless.
|
||||||
|
"""
|
||||||
|
if n_tracked <= 0:
|
||||||
|
return DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
cycles_per_day = DAILY_CHECK_CEILING // n_tracked
|
||||||
|
if cycles_per_day <= 0:
|
||||||
|
# Would exceed ceiling even at 24h cadence; fall back to 24h.
|
||||||
|
return 24
|
||||||
|
return 24 // cycles_per_day
|
||||||
|
|
||||||
|
|
||||||
|
def clamp_telemetry_interval(preferred_hours: int, n_tracked: int) -> int:
|
||||||
|
"""Return the effective interval: max of user preference and shortest legal.
|
||||||
|
|
||||||
|
Unrecognized values fall back to the default.
|
||||||
|
"""
|
||||||
|
if preferred_hours not in TELEMETRY_INTERVAL_OPTIONS_HOURS:
|
||||||
|
preferred_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
shortest = shortest_legal_interval_hours(n_tracked)
|
||||||
|
return max(preferred_hours, shortest)
|
||||||
|
|
||||||
|
|
||||||
|
def legal_interval_options(n_tracked: int) -> list[int]:
|
||||||
|
"""Return the subset of the interval menu that is legal for a given N."""
|
||||||
|
shortest = shortest_legal_interval_hours(n_tracked)
|
||||||
|
return [h for h in TELEMETRY_INTERVAL_OPTIONS_HOURS if h >= shortest]
|
||||||
|
|
||||||
|
|
||||||
|
def next_run_timestamp_utc(effective_hours: int, now: datetime | None = None) -> int:
|
||||||
|
"""Return Unix timestamp for the next UTC top-of-hour where
|
||||||
|
``hour % effective_hours == 0``.
|
||||||
|
|
||||||
|
Returns the next matching hour strictly in the future (never ``now``
|
||||||
|
itself, even if ``now`` lies exactly on a matching boundary).
|
||||||
|
"""
|
||||||
|
if effective_hours <= 0:
|
||||||
|
effective_hours = DEFAULT_TELEMETRY_INTERVAL_HOURS
|
||||||
|
if now is None:
|
||||||
|
now = datetime.now(UTC)
|
||||||
|
else:
|
||||||
|
now = now.astimezone(UTC)
|
||||||
|
|
||||||
|
# Round up to the next top-of-hour, then skip forward until the modulo matches.
|
||||||
|
candidate = now.replace(minute=0, second=0, microsecond=0)
|
||||||
|
# Always move at least one hour forward so "now" never matches.
|
||||||
|
candidate = candidate.replace(hour=candidate.hour)
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
candidate = candidate + timedelta(hours=1)
|
||||||
|
while candidate.hour % effective_hours != 0:
|
||||||
|
candidate = candidate + timedelta(hours=1)
|
||||||
|
return int(candidate.timestamp())
|
||||||
@@ -108,6 +108,10 @@ def broadcast_event(event_type: str, data: dict, *, realtime: bool = True) -> No
|
|||||||
|
|
||||||
if event_type == "message":
|
if event_type == "message":
|
||||||
asyncio.create_task(fanout_manager.broadcast_message(data))
|
asyncio.create_task(fanout_manager.broadcast_message(data))
|
||||||
|
|
||||||
|
from app.push.manager import push_manager
|
||||||
|
|
||||||
|
asyncio.create_task(push_manager.dispatch_message(data))
|
||||||
elif event_type == "raw_packet":
|
elif event_type == "raw_packet":
|
||||||
asyncio.create_task(fanout_manager.broadcast_raw(data))
|
asyncio.create_task(fanout_manager.broadcast_raw(data))
|
||||||
elif event_type == "contact":
|
elif event_type == "contact":
|
||||||
|
|||||||
+28
-5
@@ -40,7 +40,8 @@ frontend/src/
|
|||||||
├── styles.css # Additional global app styles
|
├── styles.css # Additional global app styles
|
||||||
├── themes.css # Color theme definitions
|
├── themes.css # Color theme definitions
|
||||||
├── contexts/
|
├── contexts/
|
||||||
│ └── DistanceUnitContext.tsx # Browser-local distance-unit context/provider
|
│ ├── DistanceUnitContext.tsx # Browser-local distance-unit context/provider
|
||||||
|
│ └── PushSubscriptionContext.tsx # Push subscription state context/provider
|
||||||
├── lib/
|
├── lib/
|
||||||
│ └── utils.ts # cn() — clsx + tailwind-merge helper
|
│ └── utils.ts # cn() — clsx + tailwind-merge helper
|
||||||
├── hooks/
|
├── hooks/
|
||||||
@@ -57,6 +58,7 @@ frontend/src/
|
|||||||
│ ├── useConversationRouter.ts # URL hash → active conversation routing
|
│ ├── useConversationRouter.ts # URL hash → active conversation routing
|
||||||
│ ├── useContactsAndChannels.ts # Contact/channel loading, creation, deletion
|
│ ├── useContactsAndChannels.ts # Contact/channel loading, creation, deletion
|
||||||
│ ├── useBrowserNotifications.ts # Per-conversation browser notification preferences + dispatch
|
│ ├── useBrowserNotifications.ts # Per-conversation browser notification preferences + dispatch
|
||||||
|
│ ├── usePushSubscription.ts # Web Push subscription lifecycle, per-conversation filters
|
||||||
│ ├── useFaviconBadge.ts # Browser tab unread badge state
|
│ ├── useFaviconBadge.ts # Browser tab unread badge state
|
||||||
│ ├── useRawPacketStatsSession.ts # Session-scoped packet-feed stats history
|
│ ├── useRawPacketStatsSession.ts # Session-scoped packet-feed stats history
|
||||||
│ └── useRememberedServerPassword.ts # Browser-local repeater/room password persistence
|
│ └── useRememberedServerPassword.ts # Browser-local repeater/room password persistence
|
||||||
@@ -91,7 +93,13 @@ frontend/src/
|
|||||||
│ ├── radioPresets.ts # LoRa radio preset configurations
|
│ ├── radioPresets.ts # LoRa radio preset configurations
|
||||||
│ ├── publicChannel.ts # Public-channel resolution helpers for routing/hash defaults
|
│ ├── publicChannel.ts # Public-channel resolution helpers for routing/hash defaults
|
||||||
│ ├── fontScale.ts # Browser-local relative font scale persistence/application
|
│ ├── fontScale.ts # Browser-local relative font scale persistence/application
|
||||||
│ └── theme.ts # Theme switching helpers
|
│ ├── theme.ts # Theme switching helpers
|
||||||
|
│ ├── autoFocusInput.ts # Auto-focus input helper
|
||||||
|
│ ├── batteryDisplay.ts # Battery level display helpers
|
||||||
|
│ ├── messageIdentity.ts # Message identity/dedup helpers
|
||||||
|
│ ├── rawPacketInspector.ts # Raw packet inspection helpers
|
||||||
|
│ ├── serverLoginState.ts # Server login state helpers
|
||||||
|
│ └── statusDotPulse.ts # Status dot pulse animation helpers
|
||||||
├── components/
|
├── components/
|
||||||
│ ├── StatusBar.tsx
|
│ ├── StatusBar.tsx
|
||||||
│ ├── Sidebar.tsx
|
│ ├── Sidebar.tsx
|
||||||
@@ -134,7 +142,8 @@ frontend/src/
|
|||||||
│ │ ├── SettingsDatabaseSection.tsx # DB size, cleanup, auto-decrypt, local label
|
│ │ ├── SettingsDatabaseSection.tsx # DB size, cleanup, auto-decrypt, local label
|
||||||
│ │ ├── SettingsStatisticsSection.tsx # Read-only mesh network stats
|
│ │ ├── SettingsStatisticsSection.tsx # Read-only mesh network stats
|
||||||
│ │ ├── SettingsAboutSection.tsx # Version, author, license, links
|
│ │ ├── SettingsAboutSection.tsx # Version, author, license, links
|
||||||
│ │ └── ThemeSelector.tsx # Color theme picker
|
│ │ ├── ThemeSelector.tsx # Color theme picker
|
||||||
|
│ │ └── BulkDeleteContactsModal.tsx # Bulk contact deletion dialog
|
||||||
│ ├── repeater/
|
│ ├── repeater/
|
||||||
│ │ ├── repeaterPaneShared.tsx # Shared: RepeaterPane, KvRow, format helpers
|
│ │ ├── repeaterPaneShared.tsx # Shared: RepeaterPane, KvRow, format helpers
|
||||||
│ │ ├── RepeaterTelemetryPane.tsx # Battery, airtime, packet counts
|
│ │ ├── RepeaterTelemetryPane.tsx # Battery, airtime, packet counts
|
||||||
@@ -144,6 +153,7 @@ frontend/src/
|
|||||||
│ │ ├── RepeaterRadioSettingsPane.tsx # Radio config + advert intervals
|
│ │ ├── RepeaterRadioSettingsPane.tsx # Radio config + advert intervals
|
||||||
│ │ ├── RepeaterLppTelemetryPane.tsx # CayenneLPP sensor data
|
│ │ ├── RepeaterLppTelemetryPane.tsx # CayenneLPP sensor data
|
||||||
│ │ ├── RepeaterOwnerInfoPane.tsx # Owner info + guest password
|
│ │ ├── RepeaterOwnerInfoPane.tsx # Owner info + guest password
|
||||||
|
│ │ ├── RepeaterTelemetryHistoryPane.tsx # Historical telemetry chart/table
|
||||||
│ │ ├── RepeaterActionsPane.tsx # Send Advert, Sync Clock, Reboot
|
│ │ ├── RepeaterActionsPane.tsx # Send Advert, Sync Clock, Reboot
|
||||||
│ │ └── RepeaterConsolePane.tsx # CLI console with history
|
│ │ └── RepeaterConsolePane.tsx # CLI console with history
|
||||||
│ └── ui/ # shadcn/ui primitives
|
│ └── ui/ # shadcn/ui primitives
|
||||||
@@ -356,7 +366,7 @@ LocalStorage migration helpers for favorites; canonical favorites are server-sid
|
|||||||
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
|
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
|
||||||
- `tracked_telemetry_repeaters`
|
- `tracked_telemetry_repeaters`
|
||||||
- `auto_resend_channel`
|
- `auto_resend_channel`
|
||||||
|
- `telemetry_interval_hours`
|
||||||
|
|
||||||
Note: MQTT, bot, and community MQTT settings were migrated to the `fanout_configs` table (managed via `/api/fanout`). They are no longer part of `AppSettings`.
|
Note: MQTT, bot, and community MQTT settings were migrated to the `fanout_configs` table (managed via `/api/fanout`). They are no longer part of `AppSettings`.
|
||||||
|
|
||||||
@@ -429,6 +439,17 @@ The `SearchView` component (`components/SearchView.tsx`) provides full-text sear
|
|||||||
- **Bidirectional pagination**: After jumping mid-history, `hasNewerMessages` enables forward pagination via `fetchNewerMessages`. The scroll-to-bottom button calls `jumpToBottom` (re-fetches latest page) instead of just scrolling.
|
- **Bidirectional pagination**: After jumping mid-history, `hasNewerMessages` enables forward pagination via `fetchNewerMessages`. The scroll-to-bottom button calls `jumpToBottom` (re-fetches latest page) instead of just scrolling.
|
||||||
- **WS message suppression**: When `hasNewerMessages` is true, incoming WS messages for the active conversation are not added to the message list (the user is viewing historical context, not the latest page).
|
- **WS message suppression**: When `hasNewerMessages` is true, incoming WS messages for the active conversation are not added to the message list (the user is viewing historical context, not the latest page).
|
||||||
|
|
||||||
|
## Web Push Notifications
|
||||||
|
|
||||||
|
Web Push allows notifications even when the browser tab is closed. Requires HTTPS (self-signed OK).
|
||||||
|
|
||||||
|
- **Service worker**: `frontend/public/sw.js` handles `push` events (show notification) and `notificationclick` (focus/open tab, navigate via `url_hash`). Registered in `main.tsx` on secure contexts only.
|
||||||
|
- **`usePushSubscription` hook**: manages the full subscription lifecycle — subscribe (register SW → `PushManager.subscribe()` → POST to backend), unsubscribe, global push-conversation toggles, device listing, and deletion.
|
||||||
|
- **ChatHeader integration**: `BellRing` icon (amber when active) appears next to the existing desktop notification `Bell` on secure contexts. First click subscribes the browser and enables push for that conversation; subsequent clicks toggle the conversation on/off.
|
||||||
|
- **Settings > Local**: `PushDeviceManagement` component shows subscription status, lists all registered devices with test/delete buttons. Uses `usePushSubscription` hook directly.
|
||||||
|
- Auto-generates device labels from User-Agent (e.g., "Chrome on macOS").
|
||||||
|
- `PushSubscriptionInfo` type in `types.ts`; API methods in `api.ts`.
|
||||||
|
|
||||||
## Styling
|
## Styling
|
||||||
|
|
||||||
UI styling is mostly utility-class driven (Tailwind-style classes in JSX) plus shared globals in `index.css` and `styles.css`.
|
UI styling is mostly utility-class driven (Tailwind-style classes in JSX) plus shared globals in `index.css` and `styles.css`.
|
||||||
@@ -441,7 +462,9 @@ Do not rely on old class-only layout assumptions.
|
|||||||
Key conventions documented in the reference:
|
Key conventions documented in the reference:
|
||||||
|
|
||||||
- **Text sizes** use `rem`-based Tailwind values so they scale with the user's font-size slider. Do not use hard-locked `px` values (e.g., `text-[10px]`). The canonical sizes are `text-[0.625rem]` (10px), `text-[0.6875rem]` (11px), `text-[0.8125rem]` (13px), plus standard Tailwind `text-xs`/`text-sm`/`text-base`/`text-lg`/`text-xl`.
|
- **Text sizes** use `rem`-based Tailwind values so they scale with the user's font-size slider. Do not use hard-locked `px` values (e.g., `text-[10px]`). The canonical sizes are `text-[0.625rem]` (10px), `text-[0.6875rem]` (11px), `text-[0.8125rem]` (13px), plus standard Tailwind `text-xs`/`text-sm`/`text-base`/`text-lg`/`text-xl`.
|
||||||
- **Section labels** use `text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium`.
|
- **Group titles** (sub-section headings within settings tabs) use `<h3 className="text-base font-semibold tracking-tight">`. These separate major groups like "Connection", "Identity", "MQTT Broker". When a group contains named sub-items (e.g. "Contact Management" → "Blocked Contacts", "Bulk Delete"), use `<h4 className="text-sm font-semibold">` for the children and nest them inside the parent group's `div` instead of separating with `<Separator />`.
|
||||||
|
- **Helper / description text** uses `text-[0.8125rem] text-muted-foreground` (13px). This is for explanatory paragraphs under inputs or sections — not for metadata, timestamps, or alert text which stay at `text-xs`.
|
||||||
|
- **Metadata labels** use `text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium` for compact category tags like "Push-enabled conversations" or "Registered Devices".
|
||||||
- **Buttons** use the shadcn `<Button>` component. Semantic color overrides (danger, warning, success) use `variant="outline"` with `className="border-{color}/50 text-{color} hover:bg-{color}/10"`.
|
- **Buttons** use the shadcn `<Button>` component. Semantic color overrides (danger, warning, success) use `variant="outline"` with `className="border-{color}/50 text-{color} hover:bg-{color}/10"`.
|
||||||
- **Badges/tags** use `text-[0.625rem] uppercase tracking-wider px-1.5 py-0.5 rounded` with `bg-muted` (neutral) or `bg-primary/10` (active).
|
- **Badges/tags** use `text-[0.625rem] uppercase tracking-wider px-1.5 py-0.5 rounded` with `bg-muted` (neutral) or `bg-primary/10` (active).
|
||||||
- **Clickable text** (copy-to-clipboard, navigational links) uses `role="button" tabIndex={0}` with `cursor-pointer hover:text-primary transition-colors`.
|
- **Clickable text** (copy-to-clipboard, navigational links) uses `role="button" tabIndex={0}` with `cursor-pointer hover:text-primary transition-colors`.
|
||||||
|
|||||||
+4
-1
@@ -13,8 +13,11 @@
|
|||||||
<link rel="icon" type="image/png" href="./favicon-96x96.png" sizes="96x96" />
|
<link rel="icon" type="image/png" href="./favicon-96x96.png" sizes="96x96" />
|
||||||
<link rel="shortcut icon" href="./favicon.ico" />
|
<link rel="shortcut icon" href="./favicon.ico" />
|
||||||
<link rel="apple-touch-icon" sizes="180x180" href="./apple-touch-icon.png" />
|
<link rel="apple-touch-icon" sizes="180x180" href="./apple-touch-icon.png" />
|
||||||
<link rel="manifest" href="./site.webmanifest" />
|
<link rel="manifest" href="./site.webmanifest" crossorigin="use-credentials" />
|
||||||
<script>
|
<script>
|
||||||
|
// Service worker registration moved to main.tsx (requires isSecureContext
|
||||||
|
// for Web Push). Do not duplicate here.
|
||||||
|
|
||||||
// Start critical data fetches before React/Vite JS loads.
|
// Start critical data fetches before React/Vite JS loads.
|
||||||
// Must be in <head> BEFORE the module script so the browser queues these
|
// Must be in <head> BEFORE the module script so the browser queues these
|
||||||
// fetches before it discovers and starts downloading the JS bundle.
|
// fetches before it discovers and starts downloading the JS bundle.
|
||||||
|
|||||||
Generated
+2
-2
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"version": "3.8.0",
|
"version": "3.11.3",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"version": "3.8.0",
|
"version": "3.11.3",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/lang-python": "^6.2.1",
|
"@codemirror/lang-python": "^6.2.1",
|
||||||
"@codemirror/theme-one-dark": "^6.1.3",
|
"@codemirror/theme-one-dark": "^6.1.3",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "remoteterm-meshcore-frontend",
|
"name": "remoteterm-meshcore-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "3.11.0",
|
"version": "3.12.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
|
|||||||
Binary file not shown.
|
After Width: | Height: | Size: 122 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 426 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 109 KiB |
@@ -0,0 +1,60 @@
|
|||||||
|
/* Service worker for PWA installability and Web Push notifications. */
|
||||||
|
|
||||||
|
self.addEventListener("install", () => {
|
||||||
|
self.skipWaiting();
|
||||||
|
});
|
||||||
|
|
||||||
|
self.addEventListener("activate", (event) => {
|
||||||
|
event.waitUntil(self.clients.claim());
|
||||||
|
});
|
||||||
|
|
||||||
|
// No-op fetch handler — required for PWA installability criteria.
|
||||||
|
// We don't cache anything; the app always fetches from the network.
|
||||||
|
self.addEventListener("fetch", () => {});
|
||||||
|
|
||||||
|
self.addEventListener("push", (event) => {
|
||||||
|
let data = {};
|
||||||
|
try {
|
||||||
|
data = event.data ? event.data.json() : {};
|
||||||
|
} catch {
|
||||||
|
data = { title: "New message", body: event.data?.text() || "" };
|
||||||
|
}
|
||||||
|
|
||||||
|
const title = data.title || "New message";
|
||||||
|
const options = {
|
||||||
|
body: data.body || "",
|
||||||
|
icon: "./favicon-256x256.png",
|
||||||
|
badge: "./favicon-96x96.png",
|
||||||
|
tag: data.tag || "meshcore-push",
|
||||||
|
data: { url_hash: data.url_hash || "" },
|
||||||
|
};
|
||||||
|
|
||||||
|
event.waitUntil(self.registration.showNotification(title, options));
|
||||||
|
});
|
||||||
|
|
||||||
|
self.addEventListener("notificationclick", (event) => {
|
||||||
|
event.notification.close();
|
||||||
|
const urlHash = event.notification.data?.url_hash || "";
|
||||||
|
// Use the SW registration scope as the base URL so subpath deployments
|
||||||
|
// (e.g. archworks.co/meshcore/) navigate correctly.
|
||||||
|
const base = self.registration.scope;
|
||||||
|
|
||||||
|
event.waitUntil(
|
||||||
|
clients
|
||||||
|
.matchAll({ type: "window", includeUncontrolled: true })
|
||||||
|
.then((windowClients) => {
|
||||||
|
// Focus an existing tab if one is open
|
||||||
|
for (const client of windowClients) {
|
||||||
|
if (client.url.startsWith(base)) {
|
||||||
|
client.focus();
|
||||||
|
if (urlHash) {
|
||||||
|
client.navigate(base + urlHash);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise open a new tab
|
||||||
|
return clients.openWindow(base + (urlHash || ""));
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
@@ -22,6 +22,7 @@ import { toast } from './components/ui/sonner';
|
|||||||
import { AppShell } from './components/AppShell';
|
import { AppShell } from './components/AppShell';
|
||||||
import type { MessageInputHandle } from './components/MessageInput';
|
import type { MessageInputHandle } from './components/MessageInput';
|
||||||
import { DistanceUnitProvider } from './contexts/DistanceUnitContext';
|
import { DistanceUnitProvider } from './contexts/DistanceUnitContext';
|
||||||
|
import { usePush } from './contexts/PushSubscriptionContext';
|
||||||
import { messageContainsMention } from './utils/messageParser';
|
import { messageContainsMention } from './utils/messageParser';
|
||||||
import { getStateKey } from './utils/conversationState';
|
import { getStateKey } from './utils/conversationState';
|
||||||
import type { BulkCreateHashtagChannelsResult, Conversation, Message, RawPacket } from './types';
|
import type { BulkCreateHashtagChannelsResult, Conversation, Message, RawPacket } from './types';
|
||||||
@@ -99,6 +100,7 @@ export function App() {
|
|||||||
toggleConversationNotifications,
|
toggleConversationNotifications,
|
||||||
notifyIncomingMessage,
|
notifyIncomingMessage,
|
||||||
} = useBrowserNotifications();
|
} = useBrowserNotifications();
|
||||||
|
const pushSubscription = usePush();
|
||||||
const { rawPacketStatsSession, recordRawPacketObservation } = useRawPacketStatsSession();
|
const { rawPacketStatsSession, recordRawPacketObservation } = useRawPacketStatsSession();
|
||||||
const {
|
const {
|
||||||
showNewMessage,
|
showNewMessage,
|
||||||
@@ -588,6 +590,7 @@ export function App() {
|
|||||||
onDeleteChannel: handleDeleteChannel,
|
onDeleteChannel: handleDeleteChannel,
|
||||||
onSetChannelFloodScopeOverride: handleSetChannelFloodScopeOverride,
|
onSetChannelFloodScopeOverride: handleSetChannelFloodScopeOverride,
|
||||||
onSetChannelPathHashModeOverride: handleSetChannelPathHashModeOverride,
|
onSetChannelPathHashModeOverride: handleSetChannelPathHashModeOverride,
|
||||||
|
onSelectConversation: handleSelectConversationWithTargetReset,
|
||||||
onOpenContactInfo: handleOpenContactInfo,
|
onOpenContactInfo: handleOpenContactInfo,
|
||||||
onOpenChannelInfo: handleOpenChannelInfo,
|
onOpenChannelInfo: handleOpenChannelInfo,
|
||||||
onSenderClick: handleSenderClick,
|
onSenderClick: handleSenderClick,
|
||||||
@@ -614,6 +617,36 @@ export function App() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
pushSupported: pushSubscription.isSupported,
|
||||||
|
pushSubscribed: pushSubscription.isSubscribed,
|
||||||
|
pushEnabledForConversation:
|
||||||
|
activeConversation?.type === 'contact' || activeConversation?.type === 'channel'
|
||||||
|
? pushSubscription.isConversationPushEnabled(
|
||||||
|
getStateKey(activeConversation.type, activeConversation.id)
|
||||||
|
)
|
||||||
|
: false,
|
||||||
|
onTogglePush: async () => {
|
||||||
|
if (
|
||||||
|
!activeConversation ||
|
||||||
|
(activeConversation.type !== 'contact' && activeConversation.type !== 'channel')
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
const key = getStateKey(activeConversation.type, activeConversation.id);
|
||||||
|
const pushEnabled = pushSubscription.isConversationPushEnabled(key);
|
||||||
|
|
||||||
|
if (!pushEnabled && !pushSubscription.isSubscribed) {
|
||||||
|
const subscriptionId = await pushSubscription.subscribe();
|
||||||
|
if (!subscriptionId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await pushSubscription.toggleConversation(key);
|
||||||
|
},
|
||||||
|
onOpenPushSettings: () => {
|
||||||
|
setSettingsSection('local');
|
||||||
|
if (!showSettings) handleToggleSettingsView();
|
||||||
|
},
|
||||||
trackedTelemetryRepeaters: appSettings?.tracked_telemetry_repeaters ?? [],
|
trackedTelemetryRepeaters: appSettings?.tracked_telemetry_repeaters ?? [],
|
||||||
onToggleTrackedTelemetry: handleToggleTrackedTelemetry,
|
onToggleTrackedTelemetry: handleToggleTrackedTelemetry,
|
||||||
repeaterAutoLoginKey,
|
repeaterAutoLoginKey,
|
||||||
@@ -647,6 +680,7 @@ export function App() {
|
|||||||
onToggleBlockedKey: handleBlockKey,
|
onToggleBlockedKey: handleBlockKey,
|
||||||
onToggleBlockedName: handleBlockName,
|
onToggleBlockedName: handleBlockName,
|
||||||
contacts,
|
contacts,
|
||||||
|
channels,
|
||||||
onBulkDeleteContacts: (deletedKeys: string[]) => {
|
onBulkDeleteContacts: (deletedKeys: string[]) => {
|
||||||
const keySet = new Set(deletedKeys.map((k) => k.toLowerCase()));
|
const keySet = new Set(deletedKeys.map((k) => k.toLowerCase()));
|
||||||
setContacts((prev) => prev.filter((c) => !keySet.has(c.public_key.toLowerCase())));
|
setContacts((prev) => prev.filter((c) => !keySet.has(c.public_key.toLowerCase())));
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ import type {
|
|||||||
RadioTraceResponse,
|
RadioTraceResponse,
|
||||||
RadioDiscoveryTarget,
|
RadioDiscoveryTarget,
|
||||||
PathDiscoveryResponse,
|
PathDiscoveryResponse,
|
||||||
|
PushSubscriptionInfo,
|
||||||
ResendChannelMessageResponse,
|
ResendChannelMessageResponse,
|
||||||
RepeaterAclResponse,
|
RepeaterAclResponse,
|
||||||
RepeaterAdvertIntervalsResponse,
|
RepeaterAdvertIntervalsResponse,
|
||||||
@@ -33,6 +34,7 @@ import type {
|
|||||||
RepeaterRadioSettingsResponse,
|
RepeaterRadioSettingsResponse,
|
||||||
RepeaterStatusResponse,
|
RepeaterStatusResponse,
|
||||||
TelemetryHistoryEntry,
|
TelemetryHistoryEntry,
|
||||||
|
TelemetrySchedule,
|
||||||
TrackedTelemetryResponse,
|
TrackedTelemetryResponse,
|
||||||
StatisticsResponse,
|
StatisticsResponse,
|
||||||
TraceResponse,
|
TraceResponse,
|
||||||
@@ -332,6 +334,8 @@ export const api = {
|
|||||||
body: JSON.stringify({ public_key: publicKey }),
|
body: JSON.stringify({ public_key: publicKey }),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
getTelemetrySchedule: () => fetchJson<TelemetrySchedule>('/settings/tracked-telemetry/schedule'),
|
||||||
|
|
||||||
// Favorites
|
// Favorites
|
||||||
toggleFavorite: (type: 'channel' | 'contact', id: string) =>
|
toggleFavorite: (type: 'channel' | 'contact', id: string) =>
|
||||||
fetchJson<{ type: string; id: string; favorite: boolean }>('/settings/favorites/toggle', {
|
fetchJson<{ type: string; id: string; favorite: boolean }>('/settings/favorites/toggle', {
|
||||||
@@ -438,4 +442,28 @@ export const api = {
|
|||||||
fetchJson<RepeaterLppTelemetryResponse>(`/contacts/${publicKey}/room/lpp-telemetry`, {
|
fetchJson<RepeaterLppTelemetryResponse>(`/contacts/${publicKey}/room/lpp-telemetry`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
// Push Notifications
|
||||||
|
getVapidPublicKey: () => fetchJson<{ public_key: string }>('/push/vapid-public-key'),
|
||||||
|
pushSubscribe: (subscription: {
|
||||||
|
endpoint: string;
|
||||||
|
p256dh: string;
|
||||||
|
auth: string;
|
||||||
|
label?: string;
|
||||||
|
}) =>
|
||||||
|
fetchJson<PushSubscriptionInfo>('/push/subscribe', {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify(subscription),
|
||||||
|
}),
|
||||||
|
getPushSubscriptions: () => fetchJson<PushSubscriptionInfo[]>('/push/subscriptions'),
|
||||||
|
deletePushSubscription: (id: string) =>
|
||||||
|
fetchJson<{ deleted: boolean }>(`/push/subscriptions/${id}`, { method: 'DELETE' }),
|
||||||
|
testPushSubscription: (id: string) =>
|
||||||
|
fetchJson<{ status: string }>(`/push/subscriptions/${id}/test`, { method: 'POST' }),
|
||||||
|
getPushConversations: () => fetchJson<string[]>('/push/conversations'),
|
||||||
|
togglePushConversation: (key: string) =>
|
||||||
|
fetchJson<string[]>('/push/conversations/toggle', {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({ key }),
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -84,12 +84,12 @@ export function BulkAddChannelResultModal({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<p className="text-sm text-muted-foreground">No new rooms were added.</p>
|
<p className="text-sm text-muted-foreground">No new channels were added.</p>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{result && result.invalid_names.length > 0 && (
|
{result && result.invalid_names.length > 0 && (
|
||||||
<div className="rounded-md border border-warning/30 bg-warning/10 px-3 py-2 text-sm text-warning">
|
<div className="rounded-md border border-warning/30 bg-warning/10 px-3 py-2 text-sm text-warning">
|
||||||
Ignored invalid room names: {result.invalid_names.join(', ')}
|
Ignored invalid channel names: {result.invalid_names.join(', ')}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useEffect, useState } from 'react';
|
import { useEffect, useRef, useState } from 'react';
|
||||||
import { Bell, ChevronsLeftRight, Globe2, Info, Route, Star, Trash2 } from 'lucide-react';
|
import { Bell, ChevronsLeftRight, Globe2, Info, Route, Star, Trash2 } from 'lucide-react';
|
||||||
import { toast } from './ui/sonner';
|
import { toast } from './ui/sonner';
|
||||||
import { DirectTraceIcon } from './DirectTraceIcon';
|
import { DirectTraceIcon } from './DirectTraceIcon';
|
||||||
@@ -26,6 +26,11 @@ interface ChatHeaderProps {
|
|||||||
onTrace: () => void;
|
onTrace: () => void;
|
||||||
onPathDiscovery: (publicKey: string) => Promise<PathDiscoveryResponse>;
|
onPathDiscovery: (publicKey: string) => Promise<PathDiscoveryResponse>;
|
||||||
onToggleNotifications: () => void;
|
onToggleNotifications: () => void;
|
||||||
|
pushSupported?: boolean;
|
||||||
|
pushSubscribed?: boolean;
|
||||||
|
pushEnabledForConversation?: boolean;
|
||||||
|
onTogglePush?: () => void;
|
||||||
|
onOpenPushSettings?: () => void;
|
||||||
onToggleFavorite: (type: 'channel' | 'contact', id: string) => void;
|
onToggleFavorite: (type: 'channel' | 'contact', id: string) => void;
|
||||||
onSetChannelFloodScopeOverride?: (key: string, floodScopeOverride: string) => void;
|
onSetChannelFloodScopeOverride?: (key: string, floodScopeOverride: string) => void;
|
||||||
onSetChannelPathHashModeOverride?: (key: string, pathHashModeOverride: number | null) => void;
|
onSetChannelPathHashModeOverride?: (key: string, pathHashModeOverride: number | null) => void;
|
||||||
@@ -46,6 +51,11 @@ export function ChatHeader({
|
|||||||
onTrace,
|
onTrace,
|
||||||
onPathDiscovery,
|
onPathDiscovery,
|
||||||
onToggleNotifications,
|
onToggleNotifications,
|
||||||
|
pushSupported,
|
||||||
|
pushSubscribed,
|
||||||
|
pushEnabledForConversation,
|
||||||
|
onTogglePush,
|
||||||
|
onOpenPushSettings,
|
||||||
onToggleFavorite,
|
onToggleFavorite,
|
||||||
onSetChannelFloodScopeOverride,
|
onSetChannelFloodScopeOverride,
|
||||||
onSetChannelPathHashModeOverride,
|
onSetChannelPathHashModeOverride,
|
||||||
@@ -58,14 +68,29 @@ export function ChatHeader({
|
|||||||
const [pathDiscoveryOpen, setPathDiscoveryOpen] = useState(false);
|
const [pathDiscoveryOpen, setPathDiscoveryOpen] = useState(false);
|
||||||
const [channelOverrideOpen, setChannelOverrideOpen] = useState(false);
|
const [channelOverrideOpen, setChannelOverrideOpen] = useState(false);
|
||||||
const [pathHashModeOverrideOpen, setPathHashModeOverrideOpen] = useState(false);
|
const [pathHashModeOverrideOpen, setPathHashModeOverrideOpen] = useState(false);
|
||||||
|
const [notifDropdownOpen, setNotifDropdownOpen] = useState(false);
|
||||||
|
const notifDropdownRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setShowKey(false);
|
setShowKey(false);
|
||||||
setPathDiscoveryOpen(false);
|
setPathDiscoveryOpen(false);
|
||||||
setChannelOverrideOpen(false);
|
setChannelOverrideOpen(false);
|
||||||
setPathHashModeOverrideOpen(false);
|
setPathHashModeOverrideOpen(false);
|
||||||
|
setNotifDropdownOpen(false);
|
||||||
}, [conversation.id]);
|
}, [conversation.id]);
|
||||||
|
|
||||||
|
// Close notification dropdown on outside click
|
||||||
|
useEffect(() => {
|
||||||
|
if (!notifDropdownOpen) return;
|
||||||
|
const handler = (e: MouseEvent) => {
|
||||||
|
if (notifDropdownRef.current && !notifDropdownRef.current.contains(e.target as Node)) {
|
||||||
|
setNotifDropdownOpen(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
document.addEventListener('mousedown', handler);
|
||||||
|
return () => document.removeEventListener('mousedown', handler);
|
||||||
|
}, [notifDropdownOpen]);
|
||||||
|
|
||||||
const activeChannel =
|
const activeChannel =
|
||||||
conversation.type === 'channel'
|
conversation.type === 'channel'
|
||||||
? channels.find((channel) => channel.key === conversation.id)
|
? channels.find((channel) => channel.key === conversation.id)
|
||||||
@@ -288,34 +313,94 @@ export function ChatHeader({
|
|||||||
<DirectTraceIcon className="h-4 w-4 text-muted-foreground" />
|
<DirectTraceIcon className="h-4 w-4 text-muted-foreground" />
|
||||||
</button>
|
</button>
|
||||||
)}
|
)}
|
||||||
{notificationsSupported && !activeContactIsRoomServer && (
|
{(notificationsSupported || pushSupported) && !activeContactIsRoomServer && (
|
||||||
<button
|
<div className="relative" ref={notifDropdownRef}>
|
||||||
className="flex items-center gap-1 rounded px-1 py-1 hover:bg-accent text-lg leading-none transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
<button
|
||||||
onClick={onToggleNotifications}
|
className="p-1 rounded hover:bg-accent text-lg leading-none transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
||||||
title={
|
onClick={() => setNotifDropdownOpen((v) => !v)}
|
||||||
notificationsEnabled
|
title="Notification settings"
|
||||||
? 'Disable desktop notifications for this conversation'
|
aria-label="Notification settings"
|
||||||
: notificationsPermission === 'denied'
|
aria-expanded={notifDropdownOpen}
|
||||||
? 'Notifications blocked by the browser'
|
>
|
||||||
: 'Enable desktop notifications for this conversation'
|
<Bell
|
||||||
}
|
className={cn(
|
||||||
aria-label={
|
'h-4 w-4',
|
||||||
notificationsEnabled
|
notificationsEnabled || pushEnabledForConversation
|
||||||
? 'Disable notifications for this conversation'
|
? 'text-primary'
|
||||||
: 'Enable notifications for this conversation'
|
: 'text-muted-foreground'
|
||||||
}
|
)}
|
||||||
>
|
fill={notificationsEnabled || pushEnabledForConversation ? 'currentColor' : 'none'}
|
||||||
<Bell
|
aria-hidden="true"
|
||||||
className={`h-4 w-4 ${notificationsEnabled ? 'text-status-connected' : 'text-muted-foreground'}`}
|
/>
|
||||||
fill={notificationsEnabled ? 'currentColor' : 'none'}
|
</button>
|
||||||
aria-hidden="true"
|
{notifDropdownOpen && (
|
||||||
/>
|
<div className="absolute right-[-4.5rem] sm:right-0 top-full z-50 mt-1 w-[calc(100vw-2rem)] sm:w-72 max-w-72 rounded-md border border-border bg-popover p-3 shadow-lg space-y-3">
|
||||||
{notificationsEnabled && (
|
{notificationsSupported && (
|
||||||
<span className="hidden md:inline text-[0.6875rem] font-medium text-status-connected">
|
<label className="flex items-start gap-2.5 cursor-pointer group">
|
||||||
Notifications On
|
<input
|
||||||
</span>
|
type="checkbox"
|
||||||
|
className="mt-0.5 accent-primary h-4 w-4 shrink-0"
|
||||||
|
checked={notificationsEnabled}
|
||||||
|
disabled={notificationsPermission === 'denied'}
|
||||||
|
onChange={onToggleNotifications}
|
||||||
|
/>
|
||||||
|
<div className="min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground block leading-tight">
|
||||||
|
Desktop notifications (legacy)
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-muted-foreground leading-snug block mt-0.5">
|
||||||
|
{notificationsPermission === 'denied'
|
||||||
|
? 'Blocked by browser — check site permissions'
|
||||||
|
: 'Alerts while this tab is open'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
)}
|
||||||
|
{pushSupported && onTogglePush && (
|
||||||
|
<>
|
||||||
|
<label className="flex items-start gap-2.5 cursor-pointer group">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
className="mt-0.5 accent-primary h-4 w-4 shrink-0"
|
||||||
|
checked={!!pushEnabledForConversation}
|
||||||
|
onChange={onTogglePush}
|
||||||
|
/>
|
||||||
|
<div className="min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground block leading-tight">
|
||||||
|
Web Push (beta testing)
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-muted-foreground leading-snug block mt-0.5">
|
||||||
|
{pushSubscribed
|
||||||
|
? 'Alerts even when the browser is closed'
|
||||||
|
: 'Alerts even when the browser is closed. Requires HTTPS.'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
<span className="text-xs text-muted-foreground leading-snug block mt-0.5">
|
||||||
|
All notification types require a trusted HTTPS context. Depending on your
|
||||||
|
browser, a snakeoil certificate may not be sufficient.
|
||||||
|
</span>
|
||||||
|
{onOpenPushSettings && (
|
||||||
|
<p className="text-xs text-muted-foreground leading-snug mt-1.5">
|
||||||
|
Manage Web Push enabled devices in{' '}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
setNotifDropdownOpen(false);
|
||||||
|
onOpenPushSettings();
|
||||||
|
}}
|
||||||
|
className="text-primary hover:underline transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||||
|
>
|
||||||
|
Settings → Local
|
||||||
|
</button>
|
||||||
|
.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
</button>
|
</div>
|
||||||
)}
|
)}
|
||||||
{conversation.type === 'channel' && onSetChannelFloodScopeOverride && (
|
{conversation.type === 'channel' && onSetChannelFloodScopeOverride && (
|
||||||
<button
|
<button
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ interface CommandPaletteProps {
|
|||||||
|
|
||||||
interface Searchable {
|
interface Searchable {
|
||||||
searchText: string;
|
searchText: string;
|
||||||
|
keyText?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SearchableContact extends Searchable {
|
interface SearchableContact extends Searchable {
|
||||||
@@ -106,7 +107,9 @@ function filterList<T extends Searchable>(items: T[], query: string): T[] {
|
|||||||
if (!query) return items.slice(0, MAX_PER_GROUP);
|
if (!query) return items.slice(0, MAX_PER_GROUP);
|
||||||
const results: T[] = [];
|
const results: T[] = [];
|
||||||
for (const item of items) {
|
for (const item of items) {
|
||||||
if (fuzzyMatch(item.searchText, query)) {
|
const nameMatch = fuzzyMatch(item.searchText, query);
|
||||||
|
const keyMatch = item.keyText ? item.keyText.startsWith(query) : false;
|
||||||
|
if (nameMatch || keyMatch) {
|
||||||
results.push(item);
|
results.push(item);
|
||||||
if (results.length >= MAX_PER_GROUP) break;
|
if (results.length >= MAX_PER_GROUP) break;
|
||||||
}
|
}
|
||||||
@@ -159,7 +162,8 @@ export function CommandPalette({
|
|||||||
const entry: SearchableContact = {
|
const entry: SearchableContact = {
|
||||||
contact: c,
|
contact: c,
|
||||||
displayName,
|
displayName,
|
||||||
searchText: `${displayName} ${c.public_key}`.toLowerCase(),
|
searchText: displayName.toLowerCase(),
|
||||||
|
keyText: c.public_key.toLowerCase(),
|
||||||
};
|
};
|
||||||
if (c.type === CONTACT_TYPE_REPEATER) {
|
if (c.type === CONTACT_TYPE_REPEATER) {
|
||||||
(c.favorite ? fr : rp).push(entry);
|
(c.favorite ? fr : rp).push(entry);
|
||||||
@@ -174,7 +178,8 @@ export function CommandPalette({
|
|||||||
for (const ch of channels) {
|
for (const ch of channels) {
|
||||||
const entry: SearchableChannel = {
|
const entry: SearchableChannel = {
|
||||||
channel: ch,
|
channel: ch,
|
||||||
searchText: `${ch.name} ${ch.key}`.toLowerCase(),
|
searchText: ch.name.toLowerCase(),
|
||||||
|
keyText: ch.key.toLowerCase(),
|
||||||
};
|
};
|
||||||
(ch.favorite ? fch : rch).push(entry);
|
(ch.favorite ? fch : rch).push(entry);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -298,17 +298,16 @@ export function ContactInfoPane({
|
|||||||
|
|
||||||
{isPrefixOnlyResolvedContact && (
|
{isPrefixOnlyResolvedContact && (
|
||||||
<div className="mx-5 mt-4 rounded-md border border-destructive/30 bg-destructive/10 px-3 py-2 text-sm text-destructive">
|
<div className="mx-5 mt-4 rounded-md border border-destructive/30 bg-destructive/10 px-3 py-2 text-sm text-destructive">
|
||||||
We only know a key prefix for this sender, which can happen when a fallback DM
|
We've received a message from this sender but don't have their full
|
||||||
arrives before we hear an advertisement. This contact stays read-only until the full
|
identity yet. This contact stays read-only until their identity is confirmed —
|
||||||
key resolves from a later advertisement.
|
this usually happens automatically when they next advertise.
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{isUnknownFullKeyResolvedContact && (
|
{isUnknownFullKeyResolvedContact && (
|
||||||
<div className="mx-5 mt-4 rounded-md border border-warning/30 bg-warning/10 px-3 py-2 text-sm text-warning">
|
<div className="mx-5 mt-4 rounded-md border border-warning/30 bg-warning/10 px-3 py-2 text-sm text-warning">
|
||||||
We know this sender's full key, but we have not yet heard an advertisement that
|
This sender's profile details (name, location) haven't arrived yet. They
|
||||||
fills in their identity details. Those details will appear automatically when an
|
will fill in automatically when the sender's next advertisement is heard.
|
||||||
advertisement arrives.
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -3,10 +3,9 @@ import { useMemo, useState } from 'react';
|
|||||||
import type { Contact, PathDiscoveryResponse, PathDiscoveryRoute } from '../types';
|
import type { Contact, PathDiscoveryResponse, PathDiscoveryRoute } from '../types';
|
||||||
import {
|
import {
|
||||||
findContactsByPrefix,
|
findContactsByPrefix,
|
||||||
|
formatForcedRouteSummary,
|
||||||
|
formatLearnedRouteSummary,
|
||||||
formatRouteLabel,
|
formatRouteLabel,
|
||||||
getDirectContactRoute,
|
|
||||||
getEffectiveContactRoute,
|
|
||||||
hasRoutingOverride,
|
|
||||||
parsePathHops,
|
parsePathHops,
|
||||||
} from '../utils/pathUtils';
|
} from '../utils/pathUtils';
|
||||||
import { Button } from './ui/button';
|
import { Button } from './ui/button';
|
||||||
@@ -99,30 +98,9 @@ export function ContactPathDiscoveryModal({
|
|||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const [result, setResult] = useState<PathDiscoveryResponse | null>(null);
|
const [result, setResult] = useState<PathDiscoveryResponse | null>(null);
|
||||||
|
|
||||||
const effectiveRoute = useMemo(() => getEffectiveContactRoute(contact), [contact]);
|
const learnedRouteSummary = useMemo(() => formatLearnedRouteSummary(contact), [contact]);
|
||||||
const directRoute = useMemo(() => getDirectContactRoute(contact), [contact]);
|
const forcedRouteSummary = useMemo(() => formatForcedRouteSummary(contact), [contact]);
|
||||||
const hasForcedRoute = hasRoutingOverride(contact);
|
const hasForcedRoute = forcedRouteSummary !== null;
|
||||||
const learnedRouteSummary = useMemo(() => {
|
|
||||||
if (!directRoute) {
|
|
||||||
return 'Flood';
|
|
||||||
}
|
|
||||||
const hops = parsePathHops(directRoute.path, directRoute.path_len);
|
|
||||||
return hops.length > 0
|
|
||||||
? `${formatRouteLabel(directRoute.path_len, true)} (${hops.join(' -> ')})`
|
|
||||||
: formatRouteLabel(directRoute.path_len, true);
|
|
||||||
}, [directRoute]);
|
|
||||||
const forcedRouteSummary = useMemo(() => {
|
|
||||||
if (!hasForcedRoute) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (effectiveRoute.pathLen === -1) {
|
|
||||||
return 'Flood';
|
|
||||||
}
|
|
||||||
const hops = parsePathHops(effectiveRoute.path, effectiveRoute.pathLen);
|
|
||||||
return hops.length > 0
|
|
||||||
? `${formatRouteLabel(effectiveRoute.pathLen, true)} (${hops.join(' -> ')})`
|
|
||||||
: formatRouteLabel(effectiveRoute.pathLen, true);
|
|
||||||
}, [effectiveRoute, hasForcedRoute]);
|
|
||||||
|
|
||||||
const forwardChain = result
|
const forwardChain = result
|
||||||
? renderRouteNodes(
|
? renderRouteNodes(
|
||||||
|
|||||||
@@ -3,10 +3,9 @@ import { useEffect, useMemo, useState } from 'react';
|
|||||||
import { api } from '../api';
|
import { api } from '../api';
|
||||||
import type { Contact } from '../types';
|
import type { Contact } from '../types';
|
||||||
import {
|
import {
|
||||||
formatRouteLabel,
|
formatForcedRouteSummary,
|
||||||
|
formatLearnedRouteSummary,
|
||||||
formatRoutingOverrideInput,
|
formatRoutingOverrideInput,
|
||||||
getDirectContactRoute,
|
|
||||||
hasRoutingOverride,
|
|
||||||
} from '../utils/pathUtils';
|
} from '../utils/pathUtils';
|
||||||
import { Button } from './ui/button';
|
import { Button } from './ui/button';
|
||||||
import {
|
import {
|
||||||
@@ -28,18 +27,6 @@ interface ContactRoutingOverrideModalProps {
|
|||||||
onError: (message: string) => void;
|
onError: (message: string) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
function summarizeLearnedRoute(contact: Contact): string {
|
|
||||||
return formatRouteLabel(getDirectContactRoute(contact)?.path_len ?? -1, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
function summarizeForcedRoute(contact: Contact): string | null {
|
|
||||||
if (!hasRoutingOverride(contact)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const routeOverrideLen = contact.route_override_len;
|
|
||||||
return routeOverrideLen == null ? null : formatRouteLabel(routeOverrideLen, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ContactRoutingOverrideModal({
|
export function ContactRoutingOverrideModal({
|
||||||
open,
|
open,
|
||||||
onClose,
|
onClose,
|
||||||
@@ -59,7 +46,8 @@ export function ContactRoutingOverrideModal({
|
|||||||
setError(null);
|
setError(null);
|
||||||
}, [contact, open]);
|
}, [contact, open]);
|
||||||
|
|
||||||
const forcedRouteSummary = useMemo(() => summarizeForcedRoute(contact), [contact]);
|
const learnedRouteSummary = useMemo(() => formatLearnedRouteSummary(contact), [contact]);
|
||||||
|
const forcedRouteSummary = useMemo(() => formatForcedRouteSummary(contact), [contact]);
|
||||||
|
|
||||||
const saveRoute = async (value: string) => {
|
const saveRoute = async (value: string) => {
|
||||||
setSaving(true);
|
setSaving(true);
|
||||||
@@ -98,7 +86,7 @@ export function ContactRoutingOverrideModal({
|
|||||||
<div className="rounded-md border border-border bg-muted/20 p-3 text-sm">
|
<div className="rounded-md border border-border bg-muted/20 p-3 text-sm">
|
||||||
<div className="font-medium">{contact.name || contact.public_key.slice(0, 12)}</div>
|
<div className="font-medium">{contact.name || contact.public_key.slice(0, 12)}</div>
|
||||||
<div className="mt-1 text-muted-foreground">
|
<div className="mt-1 text-muted-foreground">
|
||||||
Current learned route: {summarizeLearnedRoute(contact)}
|
Current learned route: {learnedRouteSummary}
|
||||||
</div>
|
</div>
|
||||||
{forcedRouteSummary && (
|
{forcedRouteSummary && (
|
||||||
<div className="mt-1 text-destructive">
|
<div className="mt-1 text-destructive">
|
||||||
|
|||||||
@@ -20,7 +20,11 @@ import type {
|
|||||||
} from '../types';
|
} from '../types';
|
||||||
import type { RawPacketStatsSessionState } from '../utils/rawPacketStats';
|
import type { RawPacketStatsSessionState } from '../utils/rawPacketStats';
|
||||||
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_ROOM } from '../types';
|
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_ROOM } from '../types';
|
||||||
import { isPrefixOnlyContact, isUnknownFullKeyContact } from '../utils/pubkey';
|
import {
|
||||||
|
getContactDisplayName,
|
||||||
|
isPrefixOnlyContact,
|
||||||
|
isUnknownFullKeyContact,
|
||||||
|
} from '../utils/pubkey';
|
||||||
|
|
||||||
const RepeaterDashboard = lazy(() =>
|
const RepeaterDashboard = lazy(() =>
|
||||||
import('./RepeaterDashboard').then((m) => ({ default: m.RepeaterDashboard }))
|
import('./RepeaterDashboard').then((m) => ({ default: m.RepeaterDashboard }))
|
||||||
@@ -65,6 +69,7 @@ interface ConversationPaneProps {
|
|||||||
channelKey: string,
|
channelKey: string,
|
||||||
pathHashModeOverride: number | null
|
pathHashModeOverride: number | null
|
||||||
) => Promise<void>;
|
) => Promise<void>;
|
||||||
|
onSelectConversation: (conversation: Conversation) => void;
|
||||||
onOpenContactInfo: (publicKey: string, fromChannel?: boolean) => void;
|
onOpenContactInfo: (publicKey: string, fromChannel?: boolean) => void;
|
||||||
onOpenChannelInfo: (channelKey: string) => void;
|
onOpenChannelInfo: (channelKey: string) => void;
|
||||||
onSenderClick: (sender: string) => void;
|
onSenderClick: (sender: string) => void;
|
||||||
@@ -77,6 +82,11 @@ interface ConversationPaneProps {
|
|||||||
onDismissUnreadMarker: () => void;
|
onDismissUnreadMarker: () => void;
|
||||||
onSendMessage: (text: string) => Promise<void>;
|
onSendMessage: (text: string) => Promise<void>;
|
||||||
onToggleNotifications: () => void;
|
onToggleNotifications: () => void;
|
||||||
|
pushSupported?: boolean;
|
||||||
|
pushSubscribed?: boolean;
|
||||||
|
pushEnabledForConversation?: boolean;
|
||||||
|
onTogglePush?: () => void;
|
||||||
|
onOpenPushSettings?: () => void;
|
||||||
trackedTelemetryRepeaters: string[];
|
trackedTelemetryRepeaters: string[];
|
||||||
onToggleTrackedTelemetry: (publicKey: string) => Promise<void>;
|
onToggleTrackedTelemetry: (publicKey: string) => Promise<void>;
|
||||||
repeaterAutoLoginKey: string | null;
|
repeaterAutoLoginKey: string | null;
|
||||||
@@ -93,17 +103,17 @@ function ContactResolutionBanner({ variant }: { variant: 'unknown-full-key' | 'p
|
|||||||
if (variant === 'prefix-only') {
|
if (variant === 'prefix-only') {
|
||||||
return (
|
return (
|
||||||
<div className="mx-4 mt-3 rounded-md border border-destructive/30 bg-destructive/10 px-3 py-2 text-sm text-destructive">
|
<div className="mx-4 mt-3 rounded-md border border-destructive/30 bg-destructive/10 px-3 py-2 text-sm text-destructive">
|
||||||
We only know a key prefix for this sender, which can happen when a fallback DM arrives
|
We've received a message from this sender but don't have their full identity yet.
|
||||||
before we learn their full identity. This conversation is read-only until we hear an
|
Sending is disabled until their identity is confirmed — this usually happens
|
||||||
advertisement that resolves the full key.
|
automatically when they next advertise.
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="mx-4 mt-3 rounded-md border border-warning/30 bg-warning/10 px-3 py-2 text-sm text-warning">
|
<div className="mx-4 mt-3 rounded-md border border-warning/30 bg-warning/10 px-3 py-2 text-sm text-warning">
|
||||||
A full identity profile is not yet available because we have not heard an advertisement from
|
This sender's profile details (name, location) haven't arrived yet. They will fill
|
||||||
this sender. The contact will fill in automatically when an advertisement arrives.
|
in automatically when the sender's next advert is heard.
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -137,6 +147,7 @@ export function ConversationPane({
|
|||||||
onDeleteChannel,
|
onDeleteChannel,
|
||||||
onSetChannelFloodScopeOverride,
|
onSetChannelFloodScopeOverride,
|
||||||
onSetChannelPathHashModeOverride,
|
onSetChannelPathHashModeOverride,
|
||||||
|
onSelectConversation,
|
||||||
onOpenContactInfo,
|
onOpenContactInfo,
|
||||||
onOpenChannelInfo,
|
onOpenChannelInfo,
|
||||||
onSenderClick,
|
onSenderClick,
|
||||||
@@ -149,6 +160,11 @@ export function ConversationPane({
|
|||||||
onDismissUnreadMarker,
|
onDismissUnreadMarker,
|
||||||
onSendMessage,
|
onSendMessage,
|
||||||
onToggleNotifications,
|
onToggleNotifications,
|
||||||
|
pushSupported,
|
||||||
|
pushSubscribed,
|
||||||
|
pushEnabledForConversation,
|
||||||
|
onTogglePush,
|
||||||
|
onOpenPushSettings,
|
||||||
trackedTelemetryRepeaters,
|
trackedTelemetryRepeaters,
|
||||||
onToggleTrackedTelemetry,
|
onToggleTrackedTelemetry,
|
||||||
repeaterAutoLoginKey,
|
repeaterAutoLoginKey,
|
||||||
@@ -197,6 +213,17 @@ export function ConversationPane({
|
|||||||
focusedKey={activeConversation.mapFocusKey}
|
focusedKey={activeConversation.mapFocusKey}
|
||||||
rawPackets={rawPackets}
|
rawPackets={rawPackets}
|
||||||
config={config}
|
config={config}
|
||||||
|
onSelectContact={(contact) =>
|
||||||
|
onSelectConversation({
|
||||||
|
type: 'contact',
|
||||||
|
id: contact.public_key,
|
||||||
|
name: getContactDisplayName(
|
||||||
|
contact.name,
|
||||||
|
contact.public_key,
|
||||||
|
contact.last_advert
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
</Suspense>
|
</Suspense>
|
||||||
</div>
|
</div>
|
||||||
@@ -271,6 +298,11 @@ export function ConversationPane({
|
|||||||
notificationsSupported={notificationsSupported}
|
notificationsSupported={notificationsSupported}
|
||||||
notificationsEnabled={notificationsEnabled}
|
notificationsEnabled={notificationsEnabled}
|
||||||
notificationsPermission={notificationsPermission}
|
notificationsPermission={notificationsPermission}
|
||||||
|
pushSupported={pushSupported}
|
||||||
|
pushSubscribed={pushSubscribed}
|
||||||
|
pushEnabledForConversation={pushEnabledForConversation}
|
||||||
|
onTogglePush={onTogglePush}
|
||||||
|
onOpenPushSettings={onOpenPushSettings}
|
||||||
onTrace={onTrace}
|
onTrace={onTrace}
|
||||||
onPathDiscovery={onPathDiscovery}
|
onPathDiscovery={onPathDiscovery}
|
||||||
onToggleNotifications={onToggleNotifications}
|
onToggleNotifications={onToggleNotifications}
|
||||||
|
|||||||
@@ -1,5 +1,14 @@
|
|||||||
import { Fragment, useEffect, useState, useMemo, useRef, useCallback } from 'react';
|
import { Fragment, useEffect, useState, useMemo, useRef, useCallback } from 'react';
|
||||||
import { MapContainer, TileLayer, CircleMarker, Popup, useMap, Polyline } from 'react-leaflet';
|
import {
|
||||||
|
MapContainer,
|
||||||
|
TileLayer,
|
||||||
|
CircleMarker,
|
||||||
|
Popup,
|
||||||
|
useMap,
|
||||||
|
useMapEvents,
|
||||||
|
Polyline,
|
||||||
|
LayersControl,
|
||||||
|
} from 'react-leaflet';
|
||||||
import type { LatLngBoundsExpression, CircleMarker as LeafletCircleMarker } from 'leaflet';
|
import type { LatLngBoundsExpression, CircleMarker as LeafletCircleMarker } from 'leaflet';
|
||||||
import L from 'leaflet';
|
import L from 'leaflet';
|
||||||
import 'leaflet/dist/leaflet.css';
|
import 'leaflet/dist/leaflet.css';
|
||||||
@@ -21,29 +30,132 @@ interface MapViewProps {
|
|||||||
focusedKey?: string | null;
|
focusedKey?: string | null;
|
||||||
rawPackets?: RawPacket[];
|
rawPackets?: RawPacket[];
|
||||||
config?: RadioConfig | null;
|
config?: RadioConfig | null;
|
||||||
|
/** When provided, the contact name in each popup becomes a clickable link
|
||||||
|
* that opens the conversation for that contact (DM, repeater, or room). */
|
||||||
|
onSelectContact?: (contact: Contact) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Tile layer presets ---
|
// --- Tile layer presets ---
|
||||||
const TILE_LIGHT = {
|
// Every provider here is free and works without an API key. Attribution strings
|
||||||
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
// follow each provider's requirements; do not remove them. If you add a new
|
||||||
attribution: '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a>',
|
// provider, verify its terms of service (especially for Esri / Google-style
|
||||||
background: '#1a1a2e',
|
// satellite tiles) before committing.
|
||||||
};
|
interface TileLayerPreset {
|
||||||
const TILE_DARK = {
|
id: string;
|
||||||
url: 'https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png',
|
label: string;
|
||||||
attribution:
|
url: string;
|
||||||
'© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> © <a href="https://carto.com/">CARTO</a>',
|
attribution: string;
|
||||||
background: '#0d0d0d',
|
background: string;
|
||||||
};
|
/** Highest zoom the provider publishes tiles at. When the layer is active,
|
||||||
|
* the map's zoom ceiling is tightened to this value via
|
||||||
|
* `MaxZoomByActiveLayer` so the user cannot zoom into a grey void. */
|
||||||
|
maxZoom?: number;
|
||||||
|
}
|
||||||
|
|
||||||
function getSavedDarkMap(): boolean {
|
// Global zoom bounds for the MapContainer itself. These are pinned to the
|
||||||
|
// container so Leaflet's internal tile-range math never has to guess when
|
||||||
|
// layers swap in/out via LayersControl. Without this, an initial-mount race
|
||||||
|
// between MapContainer layout and LayersControl.BaseLayer addition has been
|
||||||
|
// observed to throw "Attempted to load an infinite number of tiles".
|
||||||
|
const MAP_MIN_ZOOM = 2;
|
||||||
|
const MAP_MAX_ZOOM = 19;
|
||||||
|
|
||||||
|
const TILE_LAYERS: readonly TileLayerPreset[] = [
|
||||||
|
{
|
||||||
|
id: 'light',
|
||||||
|
label: 'Light (OpenStreetMap)',
|
||||||
|
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||||
|
attribution: '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a>',
|
||||||
|
background: '#1a1a2e',
|
||||||
|
maxZoom: 19,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'dark',
|
||||||
|
label: 'Dark (CARTO)',
|
||||||
|
url: 'https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png',
|
||||||
|
attribution:
|
||||||
|
'© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> © <a href="https://carto.com/">CARTO</a>',
|
||||||
|
background: '#0d0d0d',
|
||||||
|
maxZoom: 19,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'topographic',
|
||||||
|
label: 'Topographic (OpenTopoMap)',
|
||||||
|
url: 'https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png',
|
||||||
|
attribution:
|
||||||
|
'Map data: © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, <a href="http://viewfinderpanoramas.org">SRTM</a> | Map style: © <a href="https://opentopomap.org">OpenTopoMap</a> (<a href="https://creativecommons.org/licenses/by-sa/3.0/">CC-BY-SA</a>)',
|
||||||
|
background: '#a3b3bc',
|
||||||
|
maxZoom: 17,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'satellite',
|
||||||
|
label: 'Satellite (Esri)',
|
||||||
|
url: 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
|
||||||
|
attribution:
|
||||||
|
'Tiles © <a href="https://www.esri.com/">Esri</a> — Source: Esri, Maxar, Earthstar Geographics, and the GIS User Community',
|
||||||
|
background: '#1a1f2e',
|
||||||
|
// Esri's tile service advertises LODs up to 23 and returns HTTP 200 for
|
||||||
|
// every tile request, but the underlying imagery is only high-resolution
|
||||||
|
// up to ~18 in most developed areas and shallower in rural regions. We
|
||||||
|
// cap at 18 rather than 19 so users don't zoom into visibly-empty or
|
||||||
|
// severely-upscaled tiles. Remote regions may still be sparse at 18.
|
||||||
|
maxZoom: 18,
|
||||||
|
},
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
const MAP_LAYER_STORAGE_KEY = 'remoteterm-map-layer';
|
||||||
|
const LEGACY_DARK_MAP_STORAGE_KEY = 'remoteterm-dark-map';
|
||||||
|
|
||||||
|
function getSavedLayerId(): string {
|
||||||
try {
|
try {
|
||||||
return localStorage.getItem('remoteterm-dark-map') === 'true';
|
const stored = localStorage.getItem(MAP_LAYER_STORAGE_KEY);
|
||||||
|
if (stored && TILE_LAYERS.some((l) => l.id === stored)) return stored;
|
||||||
|
// Legacy migration: boolean dark-map flag predates multi-layer support.
|
||||||
|
const legacyDark = localStorage.getItem(LEGACY_DARK_MAP_STORAGE_KEY) === 'true';
|
||||||
|
return legacyDark ? 'dark' : 'light';
|
||||||
} catch {
|
} catch {
|
||||||
return false;
|
return 'light';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Leaflet-internal companion component: listens for base-layer changes driven
|
||||||
|
* by Leaflet's own LayersControl UI and pipes the selection back to React.
|
||||||
|
* Kept separate so the persistence/state logic stays out of the render tree.
|
||||||
|
*/
|
||||||
|
function LayerChangeWatcher({ onChange }: { onChange: (name: string) => void }) {
|
||||||
|
useMapEvents({
|
||||||
|
baselayerchange: (event) => {
|
||||||
|
if (event.name) onChange(event.name);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enforces the active layer's zoom ceiling on the underlying Leaflet map.
|
||||||
|
*
|
||||||
|
* Leaflet's `map.getMaxZoom()` prefers `options.maxZoom` (set on MapContainer)
|
||||||
|
* over per-layer `maxZoom`, so a per-TileLayer cap is silently ignored unless
|
||||||
|
* we push it down to the map itself. We do that here whenever the active
|
||||||
|
* layer changes, and clamp the current zoom if the user happened to be zoomed
|
||||||
|
* past the new cap at the moment of the switch.
|
||||||
|
*
|
||||||
|
* The MapContainer's fixed `minZoom`/`maxZoom` remain the absolute hull that
|
||||||
|
* prevents the "Attempted to load an infinite number of tiles" race during
|
||||||
|
* initial mount (see `MAP_MIN_ZOOM`/`MAP_MAX_ZOOM` below).
|
||||||
|
*/
|
||||||
|
function MaxZoomByActiveLayer({ maxZoom }: { maxZoom: number }) {
|
||||||
|
const map = useMap();
|
||||||
|
useEffect(() => {
|
||||||
|
map.setMaxZoom(maxZoom);
|
||||||
|
if (map.getZoom() > maxZoom) {
|
||||||
|
map.setZoom(maxZoom);
|
||||||
|
}
|
||||||
|
}, [map, maxZoom]);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
const MAP_RECENCY_COLORS = {
|
const MAP_RECENCY_COLORS = {
|
||||||
recent: '#06b6d4',
|
recent: '#06b6d4',
|
||||||
today: '#2563eb',
|
today: '#2563eb',
|
||||||
@@ -379,20 +491,43 @@ function ParticleOverlay({ particles }: { particles: MapParticle[] }) {
|
|||||||
|
|
||||||
// --- Main component ---
|
// --- Main component ---
|
||||||
|
|
||||||
export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewProps) {
|
export function MapView({
|
||||||
|
contacts,
|
||||||
|
focusedKey,
|
||||||
|
rawPackets,
|
||||||
|
config,
|
||||||
|
onSelectContact,
|
||||||
|
}: MapViewProps) {
|
||||||
const [sevenDaysAgo] = useState(() => Date.now() / 1000 - 7 * 24 * 60 * 60);
|
const [sevenDaysAgo] = useState(() => Date.now() / 1000 - 7 * 24 * 60 * 60);
|
||||||
const [darkMap, setDarkMap] = useState(getSavedDarkMap);
|
const [selectedLayerId, setSelectedLayerId] = useState<string>(getSavedLayerId);
|
||||||
const tile = darkMap ? TILE_DARK : TILE_LIGHT;
|
const activeLayer = TILE_LAYERS.find((l) => l.id === selectedLayerId) ?? TILE_LAYERS[0];
|
||||||
|
|
||||||
// Sync with settings changes from other components
|
// Sync layer selection across tabs and windows.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const onStorage = (e: StorageEvent) => {
|
const onStorage = (e: StorageEvent) => {
|
||||||
if (e.key === 'remoteterm-dark-map') setDarkMap(e.newValue === 'true');
|
if (e.key !== MAP_LAYER_STORAGE_KEY) return;
|
||||||
|
const next = e.newValue ?? '';
|
||||||
|
if (TILE_LAYERS.some((l) => l.id === next)) {
|
||||||
|
setSelectedLayerId(next);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
window.addEventListener('storage', onStorage);
|
window.addEventListener('storage', onStorage);
|
||||||
return () => window.removeEventListener('storage', onStorage);
|
return () => window.removeEventListener('storage', onStorage);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const handleLayerChange = useCallback((layerName: string) => {
|
||||||
|
const match = TILE_LAYERS.find((l) => l.label === layerName);
|
||||||
|
if (!match) return;
|
||||||
|
setSelectedLayerId(match.id);
|
||||||
|
try {
|
||||||
|
localStorage.setItem(MAP_LAYER_STORAGE_KEY, match.id);
|
||||||
|
// Clear the legacy key so a future downgrade-rollback doesn't revert us.
|
||||||
|
localStorage.removeItem(LEGACY_DARK_MAP_STORAGE_KEY);
|
||||||
|
} catch {
|
||||||
|
// localStorage may be disabled; selection stays in memory only.
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
const [showPackets, setShowPackets] = useState(false);
|
const [showPackets, setShowPackets] = useState(false);
|
||||||
const [discoveryMode, setDiscoveryMode] = useState(false);
|
const [discoveryMode, setDiscoveryMode] = useState(false);
|
||||||
const [discoveredKeys, setDiscoveredKeys] = useState<Set<string>>(new Set());
|
const [discoveredKeys, setDiscoveredKeys] = useState<Set<string>>(new Set());
|
||||||
@@ -674,10 +809,12 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex flex-col h-full">
|
<div className="flex flex-col h-full">
|
||||||
{/* Info bar */}
|
{/* Info bar: stacks vertically on narrow viewports (info label, legend
|
||||||
<div className="px-4 py-2 bg-muted/50 text-xs text-muted-foreground flex items-center justify-between">
|
row, controls row) so nothing truncates; flattens to a single row
|
||||||
|
with right-aligned cluster at md and up. */}
|
||||||
|
<div className="px-4 py-2 bg-muted/50 text-xs text-muted-foreground flex flex-col gap-1 md:flex-row md:items-center md:justify-between md:gap-3">
|
||||||
<span>{infoLabel}</span>
|
<span>{infoLabel}</span>
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex flex-wrap items-center gap-x-3 gap-y-1 md:justify-end">
|
||||||
{!showPackets && (
|
{!showPackets && (
|
||||||
<>
|
<>
|
||||||
<span className="flex items-center gap-1">
|
<span className="flex items-center gap-1">
|
||||||
@@ -758,7 +895,7 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
|||||||
/>{' '}
|
/>{' '}
|
||||||
repeater
|
repeater
|
||||||
</span>
|
</span>
|
||||||
<label className="flex items-center gap-1.5 cursor-pointer ml-2">
|
<label className="flex items-center gap-1.5 cursor-pointer">
|
||||||
<input
|
<input
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={showPackets}
|
checked={showPackets}
|
||||||
@@ -791,10 +928,28 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
|||||||
<MapContainer
|
<MapContainer
|
||||||
center={[20, 0]}
|
center={[20, 0]}
|
||||||
zoom={2}
|
zoom={2}
|
||||||
|
minZoom={MAP_MIN_ZOOM}
|
||||||
|
maxZoom={MAP_MAX_ZOOM}
|
||||||
className="h-full w-full"
|
className="h-full w-full"
|
||||||
style={{ background: tile.background }}
|
style={{ background: activeLayer.background }}
|
||||||
>
|
>
|
||||||
<TileLayer key={tile.url} attribution={tile.attribution} url={tile.url} />
|
<LayersControl position="topright" collapsed={false}>
|
||||||
|
{TILE_LAYERS.map((layer) => (
|
||||||
|
<LayersControl.BaseLayer
|
||||||
|
key={layer.id}
|
||||||
|
name={layer.label}
|
||||||
|
checked={layer.id === selectedLayerId}
|
||||||
|
>
|
||||||
|
<TileLayer
|
||||||
|
url={layer.url}
|
||||||
|
attribution={layer.attribution}
|
||||||
|
maxZoom={layer.maxZoom}
|
||||||
|
/>
|
||||||
|
</LayersControl.BaseLayer>
|
||||||
|
))}
|
||||||
|
</LayersControl>
|
||||||
|
<LayerChangeWatcher onChange={handleLayerChange} />
|
||||||
|
<MaxZoomByActiveLayer maxZoom={activeLayer.maxZoom ?? MAP_MAX_ZOOM} />
|
||||||
<MapBoundsHandler contacts={mappableContacts} focusedContact={focusedContact} />
|
<MapBoundsHandler contacts={mappableContacts} focusedContact={focusedContact} />
|
||||||
|
|
||||||
{/* Faint route lines for active packet paths */}
|
{/* Faint route lines for active packet paths */}
|
||||||
@@ -839,7 +994,21 @@ export function MapView({ contacts, focusedKey, rawPackets, config }: MapViewPro
|
|||||||
🛜
|
🛜
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
{displayName}
|
{onSelectContact ? (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="p-0 bg-transparent border-0 font-inherit text-primary underline hover:text-primary/80 cursor-pointer"
|
||||||
|
onClick={(event) => {
|
||||||
|
event.stopPropagation();
|
||||||
|
onSelectContact(contact);
|
||||||
|
}}
|
||||||
|
title={`Open conversation with ${displayName}`}
|
||||||
|
>
|
||||||
|
{displayName}
|
||||||
|
</button>
|
||||||
|
) : (
|
||||||
|
displayName
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div className="text-xs text-gray-500 mt-1">Last heard: {lastHeardLabel}</div>
|
<div className="text-xs text-gray-500 mt-1">Last heard: {lastHeardLabel}</div>
|
||||||
<div className="text-xs text-gray-400 mt-1 font-mono">
|
<div className="text-xs text-gray-400 mt-1 font-mono">
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
type ReactNode,
|
type ReactNode,
|
||||||
} from 'react';
|
} from 'react';
|
||||||
import type { Channel, Contact, Message, MessagePath, RadioConfig, RawPacket } from '../types';
|
import type { Channel, Contact, Message, MessagePath, RadioConfig, RawPacket } from '../types';
|
||||||
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_ROOM } from '../types';
|
import { CONTACT_TYPE_ROOM } from '../types';
|
||||||
import { api } from '../api';
|
import { api } from '../api';
|
||||||
import {
|
import {
|
||||||
findLinkedChannelReferences,
|
findLinkedChannelReferences,
|
||||||
@@ -808,12 +808,13 @@ export function MessageList({
|
|||||||
{sortedMessages.map((msg, index) => {
|
{sortedMessages.map((msg, index) => {
|
||||||
// For DMs, look up contact; for channel messages, use parsed sender
|
// For DMs, look up contact; for channel messages, use parsed sender
|
||||||
const contact = msg.type === 'PRIV' ? getContact(msg.conversation_key) : null;
|
const contact = msg.type === 'PRIV' ? getContact(msg.conversation_key) : null;
|
||||||
const isRepeater = contact?.type === CONTACT_TYPE_REPEATER;
|
|
||||||
const isRoomServer = contact?.type === CONTACT_TYPE_ROOM;
|
const isRoomServer = contact?.type === CONTACT_TYPE_ROOM;
|
||||||
|
|
||||||
// Skip sender parsing for repeater messages (CLI responses often have colons)
|
// Only parse "sender: text" prefix for channel messages — DMs never carry
|
||||||
|
// an in-text sender prefix, so parsing them would incorrectly strip
|
||||||
|
// user text that happens to contain a colon (e.g. "TEST1: TEST2").
|
||||||
const { sender, content } =
|
const { sender, content } =
|
||||||
isRepeater || (isRoomServer && msg.type === 'PRIV')
|
msg.type === 'PRIV'
|
||||||
? { sender: null, content: msg.text }
|
? { sender: null, content: msg.text }
|
||||||
: parseSenderFromText(msg.text);
|
: parseSenderFromText(msg.text);
|
||||||
const directSenderName =
|
const directSenderName =
|
||||||
@@ -845,7 +846,8 @@ export function MessageList({
|
|||||||
isCorruptChannelMessage
|
isCorruptChannelMessage
|
||||||
);
|
);
|
||||||
const prevMsg = sortedMessages[index - 1];
|
const prevMsg = sortedMessages[index - 1];
|
||||||
const prevParsedSender = prevMsg ? parseSenderFromText(prevMsg.text).sender : null;
|
const prevParsedSender =
|
||||||
|
prevMsg && prevMsg.type === 'CHAN' ? parseSenderFromText(prevMsg.text).sender : null;
|
||||||
const prevSenderKey = prevMsg
|
const prevSenderKey = prevMsg
|
||||||
? getSenderKey(
|
? getSenderKey(
|
||||||
prevMsg,
|
prevMsg,
|
||||||
|
|||||||
@@ -183,11 +183,11 @@ export function NewMessageModal({
|
|||||||
permitCapitals
|
permitCapitals
|
||||||
);
|
);
|
||||||
if (channelNames.length === 0) {
|
if (channelNames.length === 0) {
|
||||||
setError('Enter at least one valid room name');
|
setError('Enter at least one valid channel name');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (invalidNames.length > 0) {
|
if (invalidNames.length > 0) {
|
||||||
setError(`Invalid room names: ${invalidNames.join(', ')}`);
|
setError(`Invalid channel names: ${invalidNames.join(', ')}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await onBulkAddHashtagChannels(channelNames, tryHistorical);
|
await onBulkAddHashtagChannels(channelNames, tryHistorical);
|
||||||
@@ -249,7 +249,7 @@ export function NewMessageModal({
|
|||||||
{tab === 'new-contact' && 'Add a new contact by entering their name and public key'}
|
{tab === 'new-contact' && 'Add a new contact by entering their name and public key'}
|
||||||
{tab === 'new-channel' && 'Create a private channel with a shared encryption key'}
|
{tab === 'new-channel' && 'Create a private channel with a shared encryption key'}
|
||||||
{tab === 'hashtag' && 'Join a public hashtag channel'}
|
{tab === 'hashtag' && 'Join a public hashtag channel'}
|
||||||
{tab === 'bulk-hashtag' && 'Paste multiple hashtag rooms to add them in one batch'}
|
{tab === 'bulk-hashtag' && 'Paste multiple hashtag channels to add them in one batch'}
|
||||||
</DialogDescription>
|
</DialogDescription>
|
||||||
</DialogHeader>
|
</DialogHeader>
|
||||||
|
|
||||||
@@ -377,11 +377,11 @@ export function NewMessageModal({
|
|||||||
aria-label="Bulk channel names"
|
aria-label="Bulk channel names"
|
||||||
value={bulkChannelText}
|
value={bulkChannelText}
|
||||||
onChange={(e) => setBulkChannelText(e.target.value)}
|
onChange={(e) => setBulkChannelText(e.target.value)}
|
||||||
placeholder={'#ops\nmesh-room\nanother-room'}
|
placeholder={'#ops\nmesh-chat\nanother-channel'}
|
||||||
className="min-h-48 w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm outline-none transition-colors placeholder:text-muted-foreground focus-visible:ring-2 focus-visible:ring-ring"
|
className="min-h-48 w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm outline-none transition-colors placeholder:text-muted-foreground focus-visible:ring-2 focus-visible:ring-ring"
|
||||||
/>
|
/>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-xs text-muted-foreground">
|
||||||
Paste room names separated by lines, spaces, or commas. Leading # marks are
|
Paste channel names separated by lines, spaces, or commas. Leading # marks are
|
||||||
stripped automatically.
|
stripped automatically.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { useState, useEffect, type ReactNode } from 'react';
|
|||||||
import type {
|
import type {
|
||||||
AppSettings,
|
AppSettings,
|
||||||
AppSettingsUpdate,
|
AppSettingsUpdate,
|
||||||
|
Channel,
|
||||||
Contact,
|
Contact,
|
||||||
HealthStatus,
|
HealthStatus,
|
||||||
RadioAdvertMode,
|
RadioAdvertMode,
|
||||||
@@ -49,6 +50,7 @@ interface SettingsModalBaseProps {
|
|||||||
onToggleBlockedKey?: (key: string) => void;
|
onToggleBlockedKey?: (key: string) => void;
|
||||||
onToggleBlockedName?: (name: string) => void;
|
onToggleBlockedName?: (name: string) => void;
|
||||||
contacts?: Contact[];
|
contacts?: Contact[];
|
||||||
|
channels?: Channel[];
|
||||||
onBulkDeleteContacts?: (deletedKeys: string[]) => void;
|
onBulkDeleteContacts?: (deletedKeys: string[]) => void;
|
||||||
trackedTelemetryRepeaters?: string[];
|
trackedTelemetryRepeaters?: string[];
|
||||||
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
||||||
@@ -86,6 +88,7 @@ export function SettingsModal(props: SettingsModalProps) {
|
|||||||
onToggleBlockedKey,
|
onToggleBlockedKey,
|
||||||
onToggleBlockedName,
|
onToggleBlockedName,
|
||||||
contacts,
|
contacts,
|
||||||
|
channels,
|
||||||
onBulkDeleteContacts,
|
onBulkDeleteContacts,
|
||||||
trackedTelemetryRepeaters,
|
trackedTelemetryRepeaters,
|
||||||
onToggleTrackedTelemetry,
|
onToggleTrackedTelemetry,
|
||||||
@@ -228,6 +231,8 @@ export function SettingsModal(props: SettingsModalProps) {
|
|||||||
{isSectionVisible('local') && (
|
{isSectionVisible('local') && (
|
||||||
<SettingsLocalSection
|
<SettingsLocalSection
|
||||||
onLocalLabelChange={onLocalLabelChange}
|
onLocalLabelChange={onLocalLabelChange}
|
||||||
|
contacts={contacts}
|
||||||
|
channels={channels}
|
||||||
className={sectionContentClass}
|
className={sectionContentClass}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -265,6 +265,12 @@ export function Sidebar({
|
|||||||
const sortContactsByOrder = useCallback(
|
const sortContactsByOrder = useCallback(
|
||||||
(items: Contact[], order: SortOrder) =>
|
(items: Contact[], order: SortOrder) =>
|
||||||
[...items].sort((a, b) => {
|
[...items].sort((a, b) => {
|
||||||
|
// Unread DM contacts always float to the top
|
||||||
|
const unreadA = unreadCounts[getStateKey('contact', a.public_key)] || 0;
|
||||||
|
const unreadB = unreadCounts[getStateKey('contact', b.public_key)] || 0;
|
||||||
|
if (unreadA > 0 && unreadB === 0) return -1;
|
||||||
|
if (unreadA === 0 && unreadB > 0) return 1;
|
||||||
|
|
||||||
if (order === 'recent') {
|
if (order === 'recent') {
|
||||||
const timeA = getContactRecentTime(a);
|
const timeA = getContactRecentTime(a);
|
||||||
const timeB = getContactRecentTime(b);
|
const timeB = getContactRecentTime(b);
|
||||||
@@ -274,7 +280,7 @@ export function Sidebar({
|
|||||||
}
|
}
|
||||||
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
||||||
}),
|
}),
|
||||||
[getContactRecentTime]
|
[getContactRecentTime, unreadCounts]
|
||||||
);
|
);
|
||||||
|
|
||||||
const sortRepeatersByOrder = useCallback(
|
const sortRepeatersByOrder = useCallback(
|
||||||
@@ -364,7 +370,7 @@ export function Sidebar({
|
|||||||
() =>
|
() =>
|
||||||
query
|
query
|
||||||
? sortedChannels.filter(
|
? sortedChannels.filter(
|
||||||
(c) => c.name.toLowerCase().includes(query) || c.key.toLowerCase().includes(query)
|
(c) => c.name.toLowerCase().includes(query) || c.key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: sortedChannels,
|
: sortedChannels,
|
||||||
[sortedChannels, query]
|
[sortedChannels, query]
|
||||||
@@ -374,7 +380,8 @@ export function Sidebar({
|
|||||||
const visible = sortedNonRepeaterContacts.filter((c) => !isContactBlocked(c));
|
const visible = sortedNonRepeaterContacts.filter((c) => !isContactBlocked(c));
|
||||||
return query
|
return query
|
||||||
? visible.filter(
|
? visible.filter(
|
||||||
(c) => c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().includes(query)
|
(c) =>
|
||||||
|
c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: visible;
|
: visible;
|
||||||
}, [sortedNonRepeaterContacts, query, isContactBlocked]);
|
}, [sortedNonRepeaterContacts, query, isContactBlocked]);
|
||||||
@@ -383,7 +390,8 @@ export function Sidebar({
|
|||||||
const visible = sortedRooms.filter((c) => !isContactBlocked(c));
|
const visible = sortedRooms.filter((c) => !isContactBlocked(c));
|
||||||
return query
|
return query
|
||||||
? visible.filter(
|
? visible.filter(
|
||||||
(c) => c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().includes(query)
|
(c) =>
|
||||||
|
c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: visible;
|
: visible;
|
||||||
}, [sortedRooms, query, isContactBlocked]);
|
}, [sortedRooms, query, isContactBlocked]);
|
||||||
@@ -392,7 +400,8 @@ export function Sidebar({
|
|||||||
const visible = sortedRepeaters.filter((c) => !isContactBlocked(c));
|
const visible = sortedRepeaters.filter((c) => !isContactBlocked(c));
|
||||||
return query
|
return query
|
||||||
? visible.filter(
|
? visible.filter(
|
||||||
(c) => c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().includes(query)
|
(c) =>
|
||||||
|
c.name?.toLowerCase().includes(query) || c.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: visible;
|
: visible;
|
||||||
}, [sortedRepeaters, query, isContactBlocked]);
|
}, [sortedRepeaters, query, isContactBlocked]);
|
||||||
|
|||||||
@@ -12,13 +12,21 @@ import type { HealthStatus, RadioConfig } from '../types';
|
|||||||
import { api } from '../api';
|
import { api } from '../api';
|
||||||
import { toast } from './ui/sonner';
|
import { toast } from './ui/sonner';
|
||||||
import { handleKeyboardActivate } from '../utils/a11y';
|
import { handleKeyboardActivate } from '../utils/a11y';
|
||||||
import { applyTheme, getSavedTheme, THEME_CHANGE_EVENT } from '../utils/theme';
|
import { applyTheme, getEffectiveTheme, THEME_CHANGE_EVENT } from '../utils/theme';
|
||||||
import {
|
import {
|
||||||
BATTERY_DISPLAY_CHANGE_EVENT,
|
BATTERY_DISPLAY_CHANGE_EVENT,
|
||||||
getShowBatteryPercent,
|
getShowBatteryPercent,
|
||||||
getShowBatteryVoltage,
|
getShowBatteryVoltage,
|
||||||
mvToPercent,
|
mvToPercent,
|
||||||
} from '../utils/batteryDisplay';
|
} from '../utils/batteryDisplay';
|
||||||
|
import {
|
||||||
|
STATUS_DOT_PULSE_CHANGE_EVENT,
|
||||||
|
STATUS_DOT_PULSE_DURATION_MS,
|
||||||
|
STATUS_DOT_PULSE_PACKET_EVENT,
|
||||||
|
getStatusDotPulseEnabled,
|
||||||
|
pulseColorFor,
|
||||||
|
type StatusDotPulseKind,
|
||||||
|
} from '../utils/statusDotPulse';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
|
|
||||||
interface StatusBarProps {
|
interface StatusBarProps {
|
||||||
@@ -84,17 +92,71 @@ export function StatusBar({
|
|||||||
? 'Radio OK'
|
? 'Radio OK'
|
||||||
: 'Radio Disconnected';
|
: 'Radio Disconnected';
|
||||||
const [reconnecting, setReconnecting] = useState(false);
|
const [reconnecting, setReconnecting] = useState(false);
|
||||||
const [currentTheme, setCurrentTheme] = useState(getSavedTheme);
|
// Track the *effective* theme (follow-os is resolved to original/light) so the
|
||||||
|
// toggle icon and action match what the user currently sees rendered.
|
||||||
|
const [currentTheme, setCurrentTheme] = useState(getEffectiveTheme);
|
||||||
|
const [pulseEnabled, setPulseEnabled] = useState(getStatusDotPulseEnabled);
|
||||||
|
const [pulseKind, setPulseKind] = useState<StatusDotPulseKind | null>(null);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleThemeChange = (event: Event) => {
|
const handler = () => setPulseEnabled(getStatusDotPulseEnabled());
|
||||||
const themeId = (event as CustomEvent<string>).detail;
|
window.addEventListener(STATUS_DOT_PULSE_CHANGE_EVENT, handler);
|
||||||
setCurrentTheme(typeof themeId === 'string' && themeId ? themeId : getSavedTheme());
|
return () => window.removeEventListener(STATUS_DOT_PULSE_CHANGE_EVENT, handler);
|
||||||
};
|
}, []);
|
||||||
|
|
||||||
window.addEventListener(THEME_CHANGE_EVENT, handleThemeChange as EventListener);
|
useEffect(() => {
|
||||||
|
if (!pulseEnabled) {
|
||||||
|
setPulseKind(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let timer: number | null = null;
|
||||||
|
const handler = (event: Event) => {
|
||||||
|
const kind = (event as CustomEvent<StatusDotPulseKind>).detail;
|
||||||
|
setPulseKind(kind);
|
||||||
|
if (timer !== null) {
|
||||||
|
window.clearTimeout(timer);
|
||||||
|
}
|
||||||
|
timer = window.setTimeout(() => {
|
||||||
|
setPulseKind(null);
|
||||||
|
timer = null;
|
||||||
|
}, STATUS_DOT_PULSE_DURATION_MS);
|
||||||
|
};
|
||||||
|
window.addEventListener(STATUS_DOT_PULSE_PACKET_EVENT, handler);
|
||||||
return () => {
|
return () => {
|
||||||
window.removeEventListener(THEME_CHANGE_EVENT, handleThemeChange as EventListener);
|
window.removeEventListener(STATUS_DOT_PULSE_PACKET_EVENT, handler);
|
||||||
|
if (timer !== null) {
|
||||||
|
window.clearTimeout(timer);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [pulseEnabled]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const syncEffective = () => setCurrentTheme(getEffectiveTheme());
|
||||||
|
window.addEventListener(THEME_CHANGE_EVENT, syncEffective);
|
||||||
|
|
||||||
|
// When saved theme is "follow-os", OS appearance changes alter the effective
|
||||||
|
// theme without firing a THEME_CHANGE_EVENT, so also watch matchMedia.
|
||||||
|
const mql =
|
||||||
|
typeof window.matchMedia === 'function'
|
||||||
|
? window.matchMedia('(prefers-color-scheme: light)')
|
||||||
|
: null;
|
||||||
|
if (mql) {
|
||||||
|
if (typeof mql.addEventListener === 'function') {
|
||||||
|
mql.addEventListener('change', syncEffective);
|
||||||
|
} else if (typeof (mql as MediaQueryList).addListener === 'function') {
|
||||||
|
(mql as MediaQueryList).addListener(syncEffective);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener(THEME_CHANGE_EVENT, syncEffective);
|
||||||
|
if (mql) {
|
||||||
|
if (typeof mql.removeEventListener === 'function') {
|
||||||
|
mql.removeEventListener('change', syncEffective);
|
||||||
|
} else if (typeof (mql as MediaQueryList).removeListener === 'function') {
|
||||||
|
(mql as MediaQueryList).removeListener(syncEffective);
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@@ -154,9 +216,12 @@ export function StatusBar({
|
|||||||
radioState === 'initializing' || radioState === 'connecting'
|
radioState === 'initializing' || radioState === 'connecting'
|
||||||
? 'bg-warning'
|
? 'bg-warning'
|
||||||
: connected
|
: connected
|
||||||
? 'bg-status-connected shadow-[0_0_6px_hsl(var(--status-connected)/0.5)]'
|
? pulseKind
|
||||||
|
? ''
|
||||||
|
: 'bg-status-connected shadow-[0_0_6px_hsl(var(--status-connected)/0.5)]'
|
||||||
: 'bg-status-disconnected'
|
: 'bg-status-disconnected'
|
||||||
)}
|
)}
|
||||||
|
style={connected && pulseKind ? { backgroundColor: pulseColorFor(pulseKind) } : undefined}
|
||||||
aria-hidden="true"
|
aria-hidden="true"
|
||||||
/>
|
/>
|
||||||
<span className="hidden lg:inline text-muted-foreground">{statusLabel}</span>
|
<span className="hidden lg:inline text-muted-foreground">{statusLabel}</span>
|
||||||
|
|||||||
@@ -224,8 +224,8 @@ export function TracePane({ contacts, config, onRunTracePath }: TracePaneProps)
|
|||||||
const matching = query
|
const matching = query
|
||||||
? repeaters.filter(
|
? repeaters.filter(
|
||||||
(contact) =>
|
(contact) =>
|
||||||
contact.public_key.toLowerCase().includes(query) ||
|
(contact.name ?? '').toLowerCase().includes(query) ||
|
||||||
(contact.name ?? '').toLowerCase().includes(query)
|
contact.public_key.toLowerCase().startsWith(query)
|
||||||
)
|
)
|
||||||
: repeaters;
|
: repeaters;
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { RepeaterPane, NotFetched, LppSensorRow } from './repeaterPaneShared';
|
import { RepeaterPane, NotFetched, LppSensorRow } from './repeaterPaneShared';
|
||||||
|
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||||
import type { RepeaterLppTelemetryResponse, PaneState } from '../../types';
|
import type { RepeaterLppTelemetryResponse, PaneState } from '../../types';
|
||||||
|
|
||||||
export function LppTelemetryPane({
|
export function LppTelemetryPane({
|
||||||
@@ -12,6 +13,7 @@ export function LppTelemetryPane({
|
|||||||
onRefresh: () => void;
|
onRefresh: () => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
}) {
|
}) {
|
||||||
|
const { distanceUnit } = useDistanceUnit();
|
||||||
return (
|
return (
|
||||||
<RepeaterPane title="LPP Sensors" state={state} onRefresh={onRefresh} disabled={disabled}>
|
<RepeaterPane title="LPP Sensors" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||||
{!data ? (
|
{!data ? (
|
||||||
@@ -21,7 +23,7 @@ export function LppTelemetryPane({
|
|||||||
) : (
|
) : (
|
||||||
<div className="space-y-0.5">
|
<div className="space-y-0.5">
|
||||||
{data.sensors.map((sensor, i) => (
|
{data.sensors.map((sensor, i) => (
|
||||||
<LppSensorRow key={i} sensor={sensor} />
|
<LppSensorRow key={i} sensor={sensor} unitPref={distanceUnit} />
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -1,6 +1,15 @@
|
|||||||
import { RepeaterPane, NotFetched, KvRow } from './repeaterPaneShared';
|
import { RepeaterPane, NotFetched, KvRow } from './repeaterPaneShared';
|
||||||
import type { RepeaterOwnerInfoResponse, PaneState } from '../../types';
|
import type { RepeaterOwnerInfoResponse, PaneState } from '../../types';
|
||||||
|
|
||||||
|
function LabeledBlock({ label, value }: { label: string; value: string }) {
|
||||||
|
return (
|
||||||
|
<div className="py-0.5">
|
||||||
|
<span className="text-sm text-muted-foreground whitespace-nowrap">{label}</span>
|
||||||
|
<p className="text-sm font-medium mt-0.5 break-words">{value}</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export function OwnerInfoPane({
|
export function OwnerInfoPane({
|
||||||
data,
|
data,
|
||||||
state,
|
state,
|
||||||
@@ -17,8 +26,8 @@ export function OwnerInfoPane({
|
|||||||
{!data ? (
|
{!data ? (
|
||||||
<NotFetched />
|
<NotFetched />
|
||||||
) : (
|
) : (
|
||||||
<div className="break-all">
|
<div className="space-y-1">
|
||||||
<KvRow label="Owner Info" value={data.owner_info ?? '—'} />
|
<LabeledBlock label="Owner Info" value={data.owner_info ?? '—'} />
|
||||||
<KvRow label="Guest Password" value={data.guest_password ?? '—'} />
|
<KvRow label="Guest Password" value={data.guest_password ?? '—'} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -11,19 +11,37 @@ import {
|
|||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
import { Separator } from '../ui/separator';
|
import { Separator } from '../ui/separator';
|
||||||
import type { TelemetryHistoryEntry, Contact } from '../../types';
|
import { lppDisplayUnit } from './repeaterPaneShared';
|
||||||
|
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||||
|
import type { TelemetryHistoryEntry, TelemetryLppSensor, Contact } from '../../types';
|
||||||
|
|
||||||
const MAX_TRACKED = 8;
|
const MAX_TRACKED = 8;
|
||||||
|
|
||||||
type Metric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
type BuiltinMetric = 'battery_volts' | 'noise_floor_dbm' | 'packets' | 'uptime_seconds';
|
||||||
|
|
||||||
const METRIC_CONFIG: Record<Metric, { label: string; unit: string; color: string }> = {
|
interface MetricConfig {
|
||||||
|
label: string;
|
||||||
|
unit: string;
|
||||||
|
color: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const BUILTIN_METRIC_CONFIG: Record<BuiltinMetric, MetricConfig> = {
|
||||||
battery_volts: { label: 'Voltage', unit: 'V', color: '#22c55e' },
|
battery_volts: { label: 'Voltage', unit: 'V', color: '#22c55e' },
|
||||||
noise_floor_dbm: { label: 'Noise Floor', unit: 'dBm', color: '#8b5cf6' },
|
noise_floor_dbm: { label: 'Noise Floor', unit: 'dBm', color: '#8b5cf6' },
|
||||||
packets: { label: 'Packets', unit: '', color: '#0ea5e9' },
|
packets: { label: 'Packets', unit: '', color: '#0ea5e9' },
|
||||||
uptime_seconds: { label: 'Uptime', unit: 's', color: '#f59e0b' },
|
uptime_seconds: { label: 'Uptime', unit: 's', color: '#f59e0b' },
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const BUILTIN_METRICS: BuiltinMetric[] = Object.keys(BUILTIN_METRIC_CONFIG) as BuiltinMetric[];
|
||||||
|
|
||||||
|
// Stable color rotation for dynamic LPP sensors
|
||||||
|
const LPP_COLORS = ['#ec4899', '#14b8a6', '#f97316', '#6366f1', '#84cc16', '#e11d48'];
|
||||||
|
|
||||||
|
/** Build a flat data key for an LPP sensor: lpp_{type_name}_ch{channel} */
|
||||||
|
function lppKey(s: TelemetryLppSensor): string {
|
||||||
|
return `lpp_${s.type_name}_ch${s.channel}`;
|
||||||
|
}
|
||||||
|
|
||||||
const TOOLTIP_STYLE = {
|
const TOOLTIP_STYLE = {
|
||||||
contentStyle: {
|
contentStyle: {
|
||||||
backgroundColor: 'hsl(var(--popover))',
|
backgroundColor: 'hsl(var(--popover))',
|
||||||
@@ -66,18 +84,62 @@ export function TelemetryHistoryPane({
|
|||||||
trackedTelemetryRepeaters,
|
trackedTelemetryRepeaters,
|
||||||
onToggleTrackedTelemetry,
|
onToggleTrackedTelemetry,
|
||||||
}: TelemetryHistoryPaneProps) {
|
}: TelemetryHistoryPaneProps) {
|
||||||
const [metric, setMetric] = useState<Metric>('battery_volts');
|
const { distanceUnit } = useDistanceUnit();
|
||||||
|
const [metric, setMetric] = useState<string>('battery_volts');
|
||||||
const [toggling, setToggling] = useState(false);
|
const [toggling, setToggling] = useState(false);
|
||||||
|
|
||||||
const isTracked = trackedTelemetryRepeaters.includes(publicKey);
|
const isTracked = trackedTelemetryRepeaters.includes(publicKey);
|
||||||
const slotsFull = trackedTelemetryRepeaters.length >= MAX_TRACKED && !isTracked;
|
const slotsFull = trackedTelemetryRepeaters.length >= MAX_TRACKED && !isTracked;
|
||||||
|
|
||||||
const config = METRIC_CONFIG[metric];
|
// Discover unique LPP sensors across all history entries
|
||||||
|
const lppMetrics = useMemo(() => {
|
||||||
|
const seen = new Map<string, { type_name: string; channel: number }>();
|
||||||
|
for (const e of entries) {
|
||||||
|
for (const s of e.data.lpp_sensors ?? []) {
|
||||||
|
const k = lppKey(s);
|
||||||
|
if (!seen.has(k)) seen.set(k, { type_name: s.type_name, channel: s.channel });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const result: { key: string; config: MetricConfig; type_name: string; channel: number }[] = [];
|
||||||
|
let colorIdx = 0;
|
||||||
|
for (const [k, info] of seen) {
|
||||||
|
const label =
|
||||||
|
info.type_name.charAt(0).toUpperCase() +
|
||||||
|
info.type_name.slice(1).replace(/_/g, ' ') +
|
||||||
|
` Ch${info.channel}`;
|
||||||
|
const { unit } = lppDisplayUnit(info.type_name, 0, distanceUnit);
|
||||||
|
result.push({
|
||||||
|
key: k,
|
||||||
|
config: { label, unit, color: LPP_COLORS[colorIdx % LPP_COLORS.length] },
|
||||||
|
type_name: info.type_name,
|
||||||
|
channel: info.channel,
|
||||||
|
});
|
||||||
|
colorIdx++;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}, [entries, distanceUnit]);
|
||||||
|
|
||||||
|
const allMetricKeys = useMemo(
|
||||||
|
() => [...BUILTIN_METRICS, ...lppMetrics.map((m) => m.key)],
|
||||||
|
[lppMetrics]
|
||||||
|
);
|
||||||
|
|
||||||
|
// If the selected metric disappears (e.g. different repeater), reset to default
|
||||||
|
const activeMetric = allMetricKeys.includes(metric) ? metric : 'battery_volts';
|
||||||
|
|
||||||
|
const isBuiltin = BUILTIN_METRICS.includes(activeMetric as BuiltinMetric);
|
||||||
|
const activeConfig: MetricConfig = isBuiltin
|
||||||
|
? BUILTIN_METRIC_CONFIG[activeMetric as BuiltinMetric]
|
||||||
|
: (lppMetrics.find((m) => m.key === activeMetric)?.config ?? {
|
||||||
|
label: activeMetric,
|
||||||
|
unit: '',
|
||||||
|
color: '#888',
|
||||||
|
});
|
||||||
|
|
||||||
const chartData = useMemo(() => {
|
const chartData = useMemo(() => {
|
||||||
return entries.map((e) => {
|
return entries.map((e) => {
|
||||||
const d = e.data;
|
const d = e.data;
|
||||||
return {
|
const point: Record<string, number | undefined> = {
|
||||||
timestamp: e.timestamp,
|
timestamp: e.timestamp,
|
||||||
battery_volts: d.battery_volts,
|
battery_volts: d.battery_volts,
|
||||||
noise_floor_dbm: d.noise_floor_dbm,
|
noise_floor_dbm: d.noise_floor_dbm,
|
||||||
@@ -85,19 +147,27 @@ export function TelemetryHistoryPane({
|
|||||||
packets_sent: d.packets_sent,
|
packets_sent: d.packets_sent,
|
||||||
uptime_seconds: d.uptime_seconds,
|
uptime_seconds: d.uptime_seconds,
|
||||||
};
|
};
|
||||||
|
// Flatten LPP sensors into the point, converting units as needed
|
||||||
|
for (const s of d.lpp_sensors ?? []) {
|
||||||
|
if (typeof s.value === 'number') {
|
||||||
|
point[lppKey(s)] = lppDisplayUnit(s.type_name, s.value, distanceUnit).value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return point;
|
||||||
});
|
});
|
||||||
}, [entries]);
|
}, [entries, distanceUnit]);
|
||||||
|
|
||||||
const dataKeys = metric === 'packets' ? ['packets_received', 'packets_sent'] : [metric];
|
const dataKeys =
|
||||||
|
activeMetric === 'packets' ? ['packets_received', 'packets_sent'] : [activeMetric];
|
||||||
|
|
||||||
const yDomain = useMemo<[number, number] | undefined>(() => {
|
const yDomain = useMemo<[number, number] | undefined>(() => {
|
||||||
if (metric !== 'battery_volts' || chartData.length === 0) return undefined;
|
if (activeMetric !== 'battery_volts' || chartData.length === 0) return undefined;
|
||||||
const values = chartData.map((d) => d.battery_volts).filter((v) => v != null) as number[];
|
const values = chartData.map((d) => d.battery_volts).filter((v) => v != null) as number[];
|
||||||
if (values.length === 0) return [3, 5];
|
if (values.length === 0) return [3, 5];
|
||||||
const lo = Math.min(...values);
|
const lo = Math.min(...values);
|
||||||
const hi = Math.max(...values);
|
const hi = Math.max(...values);
|
||||||
return [Math.min(3, Math.floor(lo) - 1), Math.max(5, Math.ceil(hi) + 1)];
|
return [Math.min(3, Math.floor(lo) - 1), Math.max(5, Math.ceil(hi) + 1)];
|
||||||
}, [metric, chartData]);
|
}, [activeMetric, chartData]);
|
||||||
|
|
||||||
const handleToggle = async () => {
|
const handleToggle = async () => {
|
||||||
setToggling(true);
|
setToggling(true);
|
||||||
@@ -181,20 +251,35 @@ export function TelemetryHistoryPane({
|
|||||||
<Separator className="mb-3" />
|
<Separator className="mb-3" />
|
||||||
|
|
||||||
{/* Metric selector */}
|
{/* Metric selector */}
|
||||||
<div className="flex gap-1 mb-2">
|
<div className="flex flex-wrap gap-1 mb-2">
|
||||||
{(Object.keys(METRIC_CONFIG) as Metric[]).map((m) => (
|
{BUILTIN_METRICS.map((m) => (
|
||||||
<button
|
<button
|
||||||
key={m}
|
key={m}
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setMetric(m)}
|
onClick={() => setMetric(m)}
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
||||||
metric === m
|
activeMetric === m
|
||||||
? 'bg-primary text-primary-foreground'
|
? 'bg-primary text-primary-foreground'
|
||||||
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{METRIC_CONFIG[m].label}
|
{BUILTIN_METRIC_CONFIG[m].label}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
{lppMetrics.map((m) => (
|
||||||
|
<button
|
||||||
|
key={m.key}
|
||||||
|
type="button"
|
||||||
|
onClick={() => setMetric(m.key)}
|
||||||
|
className={cn(
|
||||||
|
'text-[0.6875rem] px-2 py-0.5 rounded transition-colors',
|
||||||
|
activeMetric === m.key
|
||||||
|
? 'bg-primary text-primary-foreground'
|
||||||
|
: 'text-muted-foreground hover:text-foreground hover:bg-accent'
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{m.config.label}
|
||||||
</button>
|
</button>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
@@ -221,7 +306,9 @@ export function TelemetryHistoryPane({
|
|||||||
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
tick={{ fontSize: 10, fill: 'hsl(var(--muted-foreground))' }}
|
||||||
tickLine={false}
|
tickLine={false}
|
||||||
axisLine={false}
|
axisLine={false}
|
||||||
tickFormatter={(v) => (metric === 'uptime_seconds' ? formatUptime(v) : `${v}`)}
|
tickFormatter={(v) =>
|
||||||
|
activeMetric === 'uptime_seconds' ? formatUptime(v) : `${v}`
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
<RechartsTooltip
|
<RechartsTooltip
|
||||||
{...TOOLTIP_STYLE}
|
{...TOOLTIP_STYLE}
|
||||||
@@ -234,15 +321,20 @@ export function TelemetryHistoryPane({
|
|||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
formatter={(value: any, name: any) => {
|
formatter={(value: any, name: any) => {
|
||||||
const numVal = typeof value === 'number' ? value : Number(value);
|
const numVal = typeof value === 'number' ? value : Number(value);
|
||||||
const display = metric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
const display =
|
||||||
|
activeMetric === 'uptime_seconds' ? formatUptime(numVal) : `${value}`;
|
||||||
const suffix =
|
const suffix =
|
||||||
metric === 'uptime_seconds' ? '' : config.unit ? ` ${config.unit}` : '';
|
activeMetric === 'uptime_seconds'
|
||||||
|
? ''
|
||||||
|
: activeConfig.unit
|
||||||
|
? ` ${activeConfig.unit}`
|
||||||
|
: '';
|
||||||
const label =
|
const label =
|
||||||
metric === 'packets'
|
activeMetric === 'packets'
|
||||||
? name === 'packets_received'
|
? name === 'packets_received'
|
||||||
? 'Received'
|
? 'Received'
|
||||||
: 'Sent'
|
: 'Sent'
|
||||||
: config.label;
|
: activeConfig.label;
|
||||||
return [`${display}${suffix}`, label];
|
return [`${display}${suffix}`, label];
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
@@ -251,19 +343,41 @@ export function TelemetryHistoryPane({
|
|||||||
key={key}
|
key={key}
|
||||||
type="linear"
|
type="linear"
|
||||||
dataKey={key}
|
dataKey={key}
|
||||||
stroke={metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color}
|
stroke={
|
||||||
fill={metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color}
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color
|
||||||
|
}
|
||||||
|
fill={
|
||||||
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color
|
||||||
|
}
|
||||||
fillOpacity={0.15}
|
fillOpacity={0.15}
|
||||||
strokeWidth={1.5}
|
strokeWidth={1.5}
|
||||||
dot={{
|
dot={{
|
||||||
r: 4,
|
r: 4,
|
||||||
fill: metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color,
|
fill:
|
||||||
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color,
|
||||||
strokeWidth: 1.5,
|
strokeWidth: 1.5,
|
||||||
stroke: 'hsl(var(--popover))',
|
stroke: 'hsl(var(--popover))',
|
||||||
}}
|
}}
|
||||||
activeDot={{
|
activeDot={{
|
||||||
r: 6,
|
r: 6,
|
||||||
fill: metric === 'packets' ? (i === 0 ? '#0ea5e9' : '#f43f5e') : config.color,
|
fill:
|
||||||
|
activeMetric === 'packets'
|
||||||
|
? i === 0
|
||||||
|
? '#0ea5e9'
|
||||||
|
: '#f43f5e'
|
||||||
|
: activeConfig.color,
|
||||||
strokeWidth: 2,
|
strokeWidth: 2,
|
||||||
stroke: 'hsl(var(--popover))',
|
stroke: 'hsl(var(--popover))',
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -1,7 +1,23 @@
|
|||||||
|
import type { ReactNode } from 'react';
|
||||||
import { Separator } from '../ui/separator';
|
import { Separator } from '../ui/separator';
|
||||||
import { RepeaterPane, NotFetched, KvRow, formatDuration } from './repeaterPaneShared';
|
import { RepeaterPane, NotFetched, KvRow, formatDuration } from './repeaterPaneShared';
|
||||||
import type { RepeaterStatusResponse, PaneState } from '../../types';
|
import type { RepeaterStatusResponse, PaneState } from '../../types';
|
||||||
|
|
||||||
|
function Secondary({ children }: { children: ReactNode }) {
|
||||||
|
return <span className="ml-1.5 font-normal text-muted-foreground">{children}</span>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatAirtimePercent(airtimeSec: number, uptimeSec: number): string | null {
|
||||||
|
if (uptimeSec <= 0) return null;
|
||||||
|
return `${((airtimeSec / uptimeSec) * 100).toFixed(2)}%`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatPerMinute(count: number, uptimeSec: number): string | null {
|
||||||
|
if (uptimeSec <= 0) return null;
|
||||||
|
const rate = (count * 60) / uptimeSec;
|
||||||
|
return rate >= 10 ? rate.toFixed(0) : rate.toFixed(1);
|
||||||
|
}
|
||||||
|
|
||||||
export function TelemetryPane({
|
export function TelemetryPane({
|
||||||
data,
|
data,
|
||||||
state,
|
state,
|
||||||
@@ -13,6 +29,11 @@ export function TelemetryPane({
|
|||||||
onRefresh: () => void;
|
onRefresh: () => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
}) {
|
}) {
|
||||||
|
const txPct = data ? formatAirtimePercent(data.airtime_seconds, data.uptime_seconds) : null;
|
||||||
|
const rxPct = data ? formatAirtimePercent(data.rx_airtime_seconds, data.uptime_seconds) : null;
|
||||||
|
const rxPerMin = data ? formatPerMinute(data.packets_received, data.uptime_seconds) : null;
|
||||||
|
const txPerMin = data ? formatPerMinute(data.packets_sent, data.uptime_seconds) : null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<RepeaterPane title="Telemetry" state={state} onRefresh={onRefresh} disabled={disabled}>
|
<RepeaterPane title="Telemetry" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||||
{!data ? (
|
{!data ? (
|
||||||
@@ -21,8 +42,24 @@ export function TelemetryPane({
|
|||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<KvRow label="Battery" value={`${data.battery_volts.toFixed(3)}V`} />
|
<KvRow label="Battery" value={`${data.battery_volts.toFixed(3)}V`} />
|
||||||
<KvRow label="Uptime" value={formatDuration(data.uptime_seconds)} />
|
<KvRow label="Uptime" value={formatDuration(data.uptime_seconds)} />
|
||||||
<KvRow label="TX Airtime" value={formatDuration(data.airtime_seconds)} />
|
<KvRow
|
||||||
<KvRow label="RX Airtime" value={formatDuration(data.rx_airtime_seconds)} />
|
label="TX Airtime"
|
||||||
|
value={
|
||||||
|
<>
|
||||||
|
{formatDuration(data.airtime_seconds)}
|
||||||
|
{txPct && <Secondary>({txPct})</Secondary>}
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<KvRow
|
||||||
|
label="RX Airtime"
|
||||||
|
value={
|
||||||
|
<>
|
||||||
|
{formatDuration(data.rx_airtime_seconds)}
|
||||||
|
{rxPct && <Secondary>({rxPct})</Secondary>}
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
/>
|
||||||
<Separator className="my-1" />
|
<Separator className="my-1" />
|
||||||
<KvRow label="Noise Floor" value={`${data.noise_floor_dbm} dBm`} />
|
<KvRow label="Noise Floor" value={`${data.noise_floor_dbm} dBm`} />
|
||||||
<KvRow label="Last RSSI" value={`${data.last_rssi_dbm} dBm`} />
|
<KvRow label="Last RSSI" value={`${data.last_rssi_dbm} dBm`} />
|
||||||
@@ -30,7 +67,17 @@ export function TelemetryPane({
|
|||||||
<Separator className="my-1" />
|
<Separator className="my-1" />
|
||||||
<KvRow
|
<KvRow
|
||||||
label="Packets"
|
label="Packets"
|
||||||
value={`${data.packets_received.toLocaleString()} rx / ${data.packets_sent.toLocaleString()} tx`}
|
value={
|
||||||
|
<>
|
||||||
|
{data.packets_received.toLocaleString()} rx / {data.packets_sent.toLocaleString()}{' '}
|
||||||
|
tx
|
||||||
|
{rxPerMin && txPerMin && (
|
||||||
|
<Secondary>
|
||||||
|
(avg {rxPerMin} rx/min / {txPerMin} tx/min)
|
||||||
|
</Secondary>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
<KvRow
|
<KvRow
|
||||||
label="Flood"
|
label="Flood"
|
||||||
|
|||||||
@@ -223,11 +223,26 @@ export const LPP_UNIT_MAP: Record<string, string> = {
|
|||||||
colour: '',
|
colour: '',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the display unit and converted value for an LPP sensor,
|
||||||
|
* respecting the user's unit preference for temperature.
|
||||||
|
*/
|
||||||
|
export function lppDisplayUnit(
|
||||||
|
typeName: string,
|
||||||
|
value: number,
|
||||||
|
unitPref: 'metric' | 'imperial' | string
|
||||||
|
): { unit: string; value: number } {
|
||||||
|
if (typeName === 'temperature' && unitPref === 'imperial') {
|
||||||
|
return { unit: '°F', value: (value * 9) / 5 + 32 };
|
||||||
|
}
|
||||||
|
return { unit: LPP_UNIT_MAP[typeName] ?? '', value };
|
||||||
|
}
|
||||||
|
|
||||||
export function formatLppLabel(typeName: string): string {
|
export function formatLppLabel(typeName: string): string {
|
||||||
return typeName.charAt(0).toUpperCase() + typeName.slice(1).replace(/_/g, ' ');
|
return typeName.charAt(0).toUpperCase() + typeName.slice(1).replace(/_/g, ' ');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function LppSensorRow({ sensor }: { sensor: LppSensor }) {
|
export function LppSensorRow({ sensor, unitPref }: { sensor: LppSensor; unitPref?: string }) {
|
||||||
const label = formatLppLabel(sensor.type_name);
|
const label = formatLppLabel(sensor.type_name);
|
||||||
|
|
||||||
if (typeof sensor.value === 'object' && sensor.value !== null) {
|
if (typeof sensor.value === 'object' && sensor.value !== null) {
|
||||||
@@ -248,10 +263,10 @@ export function LppSensorRow({ sensor }: { sensor: LppSensor }) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const unit = LPP_UNIT_MAP[sensor.type_name] ?? '';
|
const display = lppDisplayUnit(sensor.type_name, sensor.value as number, unitPref ?? 'metric');
|
||||||
const formatted =
|
const formatted =
|
||||||
typeof sensor.value === 'number'
|
typeof sensor.value === 'number'
|
||||||
? `${sensor.value % 1 === 0 ? sensor.value : sensor.value.toFixed(2)}${unit ? ` ${unit}` : ''}`
|
? `${display.value % 1 === 0 ? display.value : display.value.toFixed(2)}${display.unit ? ` ${display.unit}` : ''}`
|
||||||
: String(sensor.value);
|
: String(sensor.value);
|
||||||
|
|
||||||
return <KvRow label={label} value={formatted} />;
|
return <KvRow label={label} value={formatted} />;
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import { Separator } from '../ui/separator';
|
|||||||
import { toast } from '../ui/sonner';
|
import { toast } from '../ui/sonner';
|
||||||
import { api } from '../../api';
|
import { api } from '../../api';
|
||||||
import { formatTime } from '../../utils/messageParser';
|
import { formatTime } from '../../utils/messageParser';
|
||||||
|
import { lppDisplayUnit } from '../repeater/repeaterPaneShared';
|
||||||
|
import { useDistanceUnit } from '../../contexts/DistanceUnitContext';
|
||||||
import { BulkDeleteContactsModal } from './BulkDeleteContactsModal';
|
import { BulkDeleteContactsModal } from './BulkDeleteContactsModal';
|
||||||
import type {
|
import type {
|
||||||
AppSettings,
|
AppSettings,
|
||||||
@@ -13,6 +15,7 @@ import type {
|
|||||||
Contact,
|
Contact,
|
||||||
HealthStatus,
|
HealthStatus,
|
||||||
TelemetryHistoryEntry,
|
TelemetryHistoryEntry,
|
||||||
|
TelemetrySchedule,
|
||||||
} from '../../types';
|
} from '../../types';
|
||||||
|
|
||||||
export function SettingsDatabaseSection({
|
export function SettingsDatabaseSection({
|
||||||
@@ -44,6 +47,7 @@ export function SettingsDatabaseSection({
|
|||||||
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
onToggleTrackedTelemetry?: (publicKey: string) => Promise<void>;
|
||||||
className?: string;
|
className?: string;
|
||||||
}) {
|
}) {
|
||||||
|
const { distanceUnit } = useDistanceUnit();
|
||||||
const [retentionDays, setRetentionDays] = useState('14');
|
const [retentionDays, setRetentionDays] = useState('14');
|
||||||
const [cleaning, setCleaning] = useState(false);
|
const [cleaning, setCleaning] = useState(false);
|
||||||
const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false);
|
const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false);
|
||||||
@@ -51,19 +55,45 @@ export function SettingsDatabaseSection({
|
|||||||
const [discoveryBlockedTypes, setDiscoveryBlockedTypes] = useState<number[]>([]);
|
const [discoveryBlockedTypes, setDiscoveryBlockedTypes] = useState<number[]>([]);
|
||||||
const [bulkDeleteOpen, setBulkDeleteOpen] = useState(false);
|
const [bulkDeleteOpen, setBulkDeleteOpen] = useState(false);
|
||||||
|
|
||||||
const [busy, setBusy] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const [latestTelemetry, setLatestTelemetry] = useState<
|
const [latestTelemetry, setLatestTelemetry] = useState<
|
||||||
Record<string, TelemetryHistoryEntry | null>
|
Record<string, TelemetryHistoryEntry | null>
|
||||||
>({});
|
>({});
|
||||||
const telemetryFetchedRef = useRef(false);
|
const telemetryFetchedRef = useRef(false);
|
||||||
|
|
||||||
|
const [schedule, setSchedule] = useState<TelemetrySchedule | null>(null);
|
||||||
|
const [intervalDraft, setIntervalDraft] = useState<number>(appSettings.telemetry_interval_hours);
|
||||||
|
|
||||||
|
// Serialization chain for every auto-persisted control on this page.
|
||||||
|
// Without this, rapid successive toggles (or mixed dropdown + checkbox
|
||||||
|
// interactions) can dispatch overlapping PATCHes that land out of order
|
||||||
|
// on HTTP/2 — a stale write then wins, reverting the user's last click.
|
||||||
|
// Each call awaits the previous one before sending its request, so the
|
||||||
|
// server sees updates in the order the user made them.
|
||||||
|
const saveChainRef = useRef<Promise<void>>(Promise.resolve());
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setAutoDecryptOnAdvert(appSettings.auto_decrypt_dm_on_advert);
|
setAutoDecryptOnAdvert(appSettings.auto_decrypt_dm_on_advert);
|
||||||
setDiscoveryBlockedTypes(appSettings.discovery_blocked_types ?? []);
|
setDiscoveryBlockedTypes(appSettings.discovery_blocked_types ?? []);
|
||||||
|
setIntervalDraft(appSettings.telemetry_interval_hours);
|
||||||
}, [appSettings]);
|
}, [appSettings]);
|
||||||
|
|
||||||
|
// Re-fetch the scheduler derivation whenever the tracked list changes or
|
||||||
|
// the stored preference changes. Cheap: single GET, no radio lock.
|
||||||
|
useEffect(() => {
|
||||||
|
let cancelled = false;
|
||||||
|
api
|
||||||
|
.getTelemetrySchedule()
|
||||||
|
.then((s) => {
|
||||||
|
if (!cancelled) setSchedule(s);
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
// Non-critical: dropdown falls back to the unfiltered menu.
|
||||||
|
});
|
||||||
|
return () => {
|
||||||
|
cancelled = true;
|
||||||
|
};
|
||||||
|
}, [trackedTelemetryRepeaters.length, appSettings.telemetry_interval_hours]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (trackedTelemetryRepeaters.length === 0 || telemetryFetchedRef.current) return;
|
if (trackedTelemetryRepeaters.length === 0 || telemetryFetchedRef.current) return;
|
||||||
telemetryFetchedRef.current = true;
|
telemetryFetchedRef.current = true;
|
||||||
@@ -129,35 +159,33 @@ export function SettingsDatabaseSection({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSave = async () => {
|
/**
|
||||||
setBusy(true);
|
* Apply an AppSettings PATCH after any already-queued saves finish, and
|
||||||
setError(null);
|
* revert local state if the save fails. Every auto-persist control on
|
||||||
|
* this page routes through here so the user-visible order of clicks is
|
||||||
try {
|
* the order the backend sees, regardless of network reordering.
|
||||||
const update: AppSettingsUpdate = { auto_decrypt_dm_on_advert: autoDecryptOnAdvert };
|
*/
|
||||||
const currentBlocked = appSettings.discovery_blocked_types ?? [];
|
const persistAppSettings = (update: AppSettingsUpdate, revert: () => void): Promise<void> => {
|
||||||
if (
|
const chained = saveChainRef.current.then(async () => {
|
||||||
discoveryBlockedTypes.length !== currentBlocked.length ||
|
try {
|
||||||
discoveryBlockedTypes.some((t) => !currentBlocked.includes(t))
|
await onSaveAppSettings(update);
|
||||||
) {
|
} catch (err) {
|
||||||
update.discovery_blocked_types = discoveryBlockedTypes;
|
console.error('Failed to save database settings:', err);
|
||||||
|
revert();
|
||||||
|
toast.error('Failed to save setting', {
|
||||||
|
description: err instanceof Error ? err.message : 'Unknown error',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
await onSaveAppSettings(update);
|
});
|
||||||
toast.success('Database settings saved');
|
saveChainRef.current = chained;
|
||||||
} catch (err) {
|
return chained;
|
||||||
console.error('Failed to save database settings:', err);
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to save');
|
|
||||||
toast.error('Failed to save settings');
|
|
||||||
} finally {
|
|
||||||
setBusy(false);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={className}>
|
<div className={className}>
|
||||||
{/* ── Database Overview ── */}
|
{/* ── Database Overview ── */}
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label className="text-base">Database Overview</Label>
|
<h3 className="text-base font-semibold tracking-tight">Database Overview</h3>
|
||||||
<div className="rounded-md border border-border bg-muted/30 p-3 space-y-2">
|
<div className="rounded-md border border-border bg-muted/30 p-3 space-y-2">
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-between items-center">
|
||||||
<span className="text-sm">Database size</span>
|
<span className="text-sm">Database size</span>
|
||||||
@@ -184,11 +212,11 @@ export function SettingsDatabaseSection({
|
|||||||
|
|
||||||
{/* ── Storage Cleanup ── */}
|
{/* ── Storage Cleanup ── */}
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
<Label className="text-base">Storage Cleanup</Label>
|
<h3 className="text-base font-semibold tracking-tight">Storage Cleanup</h3>
|
||||||
|
|
||||||
<div className="rounded-md border border-border p-3 space-y-2">
|
<div className="rounded-md border border-border p-3 space-y-2">
|
||||||
<Label className="text-sm">Delete Undecrypted Packets</Label>
|
<h3 className="text-sm font-semibold">Delete Undecrypted Packets</h3>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Permanently deletes stored raw packets that have not yet been decrypted. These are
|
Permanently deletes stored raw packets that have not yet been decrypted. These are
|
||||||
retained in case you later obtain the correct key — once deleted, these messages can
|
retained in case you later obtain the correct key — once deleted, these messages can
|
||||||
never be recovered.
|
never be recovered.
|
||||||
@@ -220,8 +248,8 @@ export function SettingsDatabaseSection({
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="rounded-md border border-border p-3 space-y-2">
|
<div className="rounded-md border border-border p-3 space-y-2">
|
||||||
<Label className="text-sm">Purge Archival Raw Packets</Label>
|
<h3 className="text-sm font-semibold">Purge Archival Raw Packets</h3>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Deletes the raw packet bytes behind messages that are already decrypted and visible in
|
Deletes the raw packet bytes behind messages that are already decrypted and visible in
|
||||||
chat. This frees space but removes packet-analysis availability for those messages. It
|
chat. This frees space but removes packet-analysis availability for those messages. It
|
||||||
does not affect displayed messages or future decryption.
|
does not affect displayed messages or future decryption.
|
||||||
@@ -241,17 +269,24 @@ export function SettingsDatabaseSection({
|
|||||||
|
|
||||||
{/* ── DM Decryption ── */}
|
{/* ── DM Decryption ── */}
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label className="text-base">DM Decryption</Label>
|
<h3 className="text-base font-semibold tracking-tight">DM Decryption</h3>
|
||||||
<label className="flex items-center gap-3 cursor-pointer">
|
<label className="flex items-center gap-3 cursor-pointer">
|
||||||
<input
|
<input
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={autoDecryptOnAdvert}
|
checked={autoDecryptOnAdvert}
|
||||||
onChange={(e) => setAutoDecryptOnAdvert(e.target.checked)}
|
onChange={(e) => {
|
||||||
|
const next = e.target.checked;
|
||||||
|
const prev = autoDecryptOnAdvert;
|
||||||
|
setAutoDecryptOnAdvert(next);
|
||||||
|
void persistAppSettings({ auto_decrypt_dm_on_advert: next }, () =>
|
||||||
|
setAutoDecryptOnAdvert(prev)
|
||||||
|
);
|
||||||
|
}}
|
||||||
className="w-4 h-4 rounded border-input accent-primary"
|
className="w-4 h-4 rounded border-input accent-primary"
|
||||||
/>
|
/>
|
||||||
<span className="text-sm">Auto-decrypt historical DMs when new contact advertises</span>
|
<span className="text-sm">Auto-decrypt historical DMs when new contact advertises</span>
|
||||||
</label>
|
</label>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
When enabled, the server will automatically try to decrypt stored DM packets when a new
|
When enabled, the server will automatically try to decrypt stored DM packets when a new
|
||||||
contact sends an advertisement. This may cause brief delays on large packet backlogs.
|
contact sends an advertisement. This may cause brief delays on large packet backlogs.
|
||||||
</p>
|
</p>
|
||||||
@@ -261,12 +296,63 @@ export function SettingsDatabaseSection({
|
|||||||
|
|
||||||
{/* ── Tracked Repeater Telemetry ── */}
|
{/* ── Tracked Repeater Telemetry ── */}
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label className="text-base">Tracked Repeater Telemetry</Label>
|
<h3 className="text-base font-semibold tracking-tight">Tracked Repeater Telemetry</h3>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Repeaters opted into automatic telemetry collection are polled every 8 hours. Up to 8
|
Repeaters opted into automatic telemetry collection are polled on a scheduled interval. To
|
||||||
repeaters may be tracked at a time ({trackedTelemetryRepeaters.length} / 8 slots used).
|
limit mesh traffic, the app caps telemetry at 24 checks per day across all tracked
|
||||||
|
repeaters — so fewer tracked repeaters allows shorter intervals, and more tracked
|
||||||
|
repeaters forces longer ones. Up to {schedule?.max_tracked ?? 8} repeaters may be tracked
|
||||||
|
at once ({trackedTelemetryRepeaters.length} / {schedule?.max_tracked ?? 8} slots used).
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
{/* Interval picker. Legal options depend on current tracked count;
|
||||||
|
we list only those. If the saved preference is no longer legal,
|
||||||
|
the effective interval is shown below so the user knows what the
|
||||||
|
scheduler is actually using. */}
|
||||||
|
<div className="space-y-1.5">
|
||||||
|
<Label htmlFor="telemetry-interval" className="text-sm">
|
||||||
|
Collection interval
|
||||||
|
</Label>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<select
|
||||||
|
id="telemetry-interval"
|
||||||
|
value={intervalDraft}
|
||||||
|
onChange={(e) => {
|
||||||
|
const nextValue = Number(e.target.value);
|
||||||
|
if (!Number.isFinite(nextValue) || nextValue === intervalDraft) return;
|
||||||
|
const prevValue = intervalDraft;
|
||||||
|
setIntervalDraft(nextValue);
|
||||||
|
void persistAppSettings({ telemetry_interval_hours: nextValue }, () =>
|
||||||
|
setIntervalDraft(prevValue)
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
className="h-9 px-3 rounded-md border border-input bg-background text-sm ring-offset-background focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2"
|
||||||
|
>
|
||||||
|
{(schedule?.options ?? [1, 2, 3, 4, 6, 8, 12, 24]).map((hrs) => (
|
||||||
|
<option key={hrs} value={hrs}>
|
||||||
|
Every {hrs} hour{hrs === 1 ? '' : 's'} ({Math.floor(24 / hrs)} check
|
||||||
|
{Math.floor(24 / hrs) === 1 ? '' : 's'}/day)
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
{schedule && schedule.effective_hours !== schedule.preferred_hours && (
|
||||||
|
<p className="text-xs text-warning">
|
||||||
|
Saved preference is {schedule.preferred_hours} hour
|
||||||
|
{schedule.preferred_hours === 1 ? '' : 's'}, but the scheduler is using{' '}
|
||||||
|
{schedule.effective_hours} hours because {schedule.tracked_count} repeater
|
||||||
|
{schedule.tracked_count === 1 ? '' : 's'}{' '}
|
||||||
|
{schedule.tracked_count === 1 ? 'is' : 'are'} tracked. Your preference will be
|
||||||
|
restored if you drop back to a supported count.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{schedule?.next_run_at != null && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Next run at {formatTime(schedule.next_run_at)} (UTC top of hour).
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
{trackedTelemetryRepeaters.length === 0 ? (
|
{trackedTelemetryRepeaters.length === 0 ? (
|
||||||
<p className="text-sm text-muted-foreground italic">
|
<p className="text-sm text-muted-foreground italic">
|
||||||
No repeaters are being tracked. Enable tracking from a repeater's dashboard.
|
No repeaters are being tracked. Enable tracking from a repeater's dashboard.
|
||||||
@@ -308,6 +394,22 @@ export function SettingsDatabaseSection({
|
|||||||
<span>
|
<span>
|
||||||
tx {d.packets_sent != null ? d.packets_sent.toLocaleString() : '?'}
|
tx {d.packets_sent != null ? d.packets_sent.toLocaleString() : '?'}
|
||||||
</span>
|
</span>
|
||||||
|
{d.lpp_sensors?.map((s) => {
|
||||||
|
const display = lppDisplayUnit(s.type_name, s.value, distanceUnit);
|
||||||
|
const val =
|
||||||
|
typeof display.value === 'number'
|
||||||
|
? display.value % 1 === 0
|
||||||
|
? display.value
|
||||||
|
: display.value.toFixed(1)
|
||||||
|
: display.value;
|
||||||
|
const label = s.type_name.charAt(0).toUpperCase() + s.type_name.slice(1);
|
||||||
|
return (
|
||||||
|
<span key={`${s.type_name}-${s.channel}`}>
|
||||||
|
{label} {val}
|
||||||
|
{display.unit ? ` ${display.unit}` : ''}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
})}
|
||||||
<span className="ml-auto">checked {formatTime(snap.timestamp)}</span>
|
<span className="ml-auto">checked {formatTime(snap.timestamp)}</span>
|
||||||
</div>
|
</div>
|
||||||
) : snap === null ? (
|
) : snap === null ? (
|
||||||
@@ -322,155 +424,146 @@ export function SettingsDatabaseSection({
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{error && (
|
|
||||||
<div className="text-sm text-destructive" role="alert">
|
|
||||||
{error}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<Button onClick={handleSave} disabled={busy} className="w-full">
|
|
||||||
{busy ? 'Saving...' : 'Save Settings'}
|
|
||||||
</Button>
|
|
||||||
|
|
||||||
<Separator />
|
<Separator />
|
||||||
|
|
||||||
{/* ── Contact Management ── */}
|
{/* ── Contact Management ── */}
|
||||||
<div className="space-y-2">
|
<div className="space-y-5">
|
||||||
<Label className="text-base">Contact Management</Label>
|
<h3 className="text-base font-semibold tracking-tight">Contact Management</h3>
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Block discovery of new node types */}
|
{/* Block discovery of new node types */}
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label>Block Discovery of New Node Types</Label>
|
<h4 className="text-sm font-semibold">Block Discovery of New Node Types</h4>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Checked types will be ignored when heard via advertisement. Existing contacts of these
|
Checked types will be ignored when heard via advertisement. Existing contacts of these
|
||||||
types are still updated. This does not affect contacts added manually or via DM.
|
types are still updated. This does not affect contacts added manually or via DM.
|
||||||
</p>
|
|
||||||
<div className="space-y-1.5">
|
|
||||||
{(
|
|
||||||
[
|
|
||||||
[1, 'Block clients'],
|
|
||||||
[2, 'Block repeaters'],
|
|
||||||
[3, 'Block room servers'],
|
|
||||||
[4, 'Block sensors'],
|
|
||||||
] as const
|
|
||||||
).map(([typeCode, label]) => {
|
|
||||||
const checked = discoveryBlockedTypes.includes(typeCode);
|
|
||||||
return (
|
|
||||||
<label key={typeCode} className="flex items-center gap-2 text-sm cursor-pointer">
|
|
||||||
<input
|
|
||||||
type="checkbox"
|
|
||||||
checked={checked}
|
|
||||||
onChange={() =>
|
|
||||||
setDiscoveryBlockedTypes((prev) =>
|
|
||||||
checked ? prev.filter((t) => t !== typeCode) : [...prev, typeCode]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
className="rounded border-input"
|
|
||||||
/>
|
|
||||||
{label}
|
|
||||||
</label>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
{discoveryBlockedTypes.length > 0 && (
|
|
||||||
<p className="text-xs text-warning">
|
|
||||||
New{' '}
|
|
||||||
{discoveryBlockedTypes
|
|
||||||
.map((t) =>
|
|
||||||
t === 1 ? 'clients' : t === 2 ? 'repeaters' : t === 3 ? 'room servers' : 'sensors'
|
|
||||||
)
|
|
||||||
.join(', ')}{' '}
|
|
||||||
heard via advertisement will not be added to your contact list.
|
|
||||||
</p>
|
</p>
|
||||||
)}
|
<div className="space-y-1.5">
|
||||||
</div>
|
{(
|
||||||
|
[
|
||||||
<Separator />
|
[1, 'Block clients'],
|
||||||
|
[2, 'Block repeaters'],
|
||||||
{/* Blocked contacts list */}
|
[3, 'Block room servers'],
|
||||||
<div className="space-y-3">
|
[4, 'Block sensors'],
|
||||||
<Label>Blocked Contacts</Label>
|
] as const
|
||||||
<p className="text-xs text-muted-foreground">
|
).map(([typeCode, label]) => {
|
||||||
Blocked contacts are hidden from the sidebar. Blocking only hides messages from the UI —
|
const checked = discoveryBlockedTypes.includes(typeCode);
|
||||||
MQTT forwarding and bot responses are not affected. Messages are still stored and will
|
return (
|
||||||
reappear if unblocked.
|
<label key={typeCode} className="flex items-center gap-2 text-sm cursor-pointer">
|
||||||
</p>
|
<input
|
||||||
|
type="checkbox"
|
||||||
{blockedKeys.length === 0 && blockedNames.length === 0 ? (
|
checked={checked}
|
||||||
<p className="text-sm text-muted-foreground italic">
|
onChange={() => {
|
||||||
No blocked contacts. Block contacts from their info pane, viewed by clicking their
|
const prev = discoveryBlockedTypes;
|
||||||
avatar in any channel, or their name within the top status bar with the conversation
|
const next = checked
|
||||||
open.
|
? prev.filter((t) => t !== typeCode)
|
||||||
</p>
|
: [...prev, typeCode];
|
||||||
) : (
|
setDiscoveryBlockedTypes(next);
|
||||||
<div className="space-y-2">
|
void persistAppSettings({ discovery_blocked_types: next }, () =>
|
||||||
{blockedKeys.length > 0 && (
|
setDiscoveryBlockedTypes(prev)
|
||||||
<div>
|
);
|
||||||
<span className="text-xs text-muted-foreground font-medium">Blocked Keys</span>
|
}}
|
||||||
<div className="mt-1 space-y-1">
|
className="rounded border-input"
|
||||||
{blockedKeys.map((key) => (
|
/>
|
||||||
<div key={key} className="flex items-center justify-between gap-2">
|
{label}
|
||||||
<span className="text-xs font-mono truncate flex-1">{key}</span>
|
</label>
|
||||||
{onToggleBlockedKey && (
|
);
|
||||||
<Button
|
})}
|
||||||
variant="ghost"
|
|
||||||
size="sm"
|
|
||||||
onClick={() => onToggleBlockedKey(key)}
|
|
||||||
className="h-7 text-xs flex-shrink-0"
|
|
||||||
>
|
|
||||||
Unblock
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{blockedNames.length > 0 && (
|
|
||||||
<div>
|
|
||||||
<span className="text-xs text-muted-foreground font-medium">Blocked Names</span>
|
|
||||||
<div className="mt-1 space-y-1">
|
|
||||||
{blockedNames.map((name) => (
|
|
||||||
<div key={name} className="flex items-center justify-between gap-2">
|
|
||||||
<span className="text-sm truncate flex-1">{name}</span>
|
|
||||||
{onToggleBlockedName && (
|
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
size="sm"
|
|
||||||
onClick={() => onToggleBlockedName(name)}
|
|
||||||
className="h-7 text-xs flex-shrink-0"
|
|
||||||
>
|
|
||||||
Unblock
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
{discoveryBlockedTypes.length > 0 && (
|
||||||
</div>
|
<p className="text-xs text-warning">
|
||||||
|
New{' '}
|
||||||
|
{discoveryBlockedTypes
|
||||||
|
.map((t) =>
|
||||||
|
t === 1 ? 'clients' : t === 2 ? 'repeaters' : t === 3 ? 'room servers' : 'sensors'
|
||||||
|
)
|
||||||
|
.join(', ')}{' '}
|
||||||
|
heard via advertisement will not be added to your contact list.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
<Separator />
|
{/* Blocked contacts list */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<h4 className="text-sm font-semibold">Blocked Contacts</h4>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Blocked contacts are hidden from the sidebar. Blocking only hides messages from the UI —
|
||||||
|
MQTT forwarding and bot responses are not affected. Messages are still stored and will
|
||||||
|
reappear if unblocked.
|
||||||
|
</p>
|
||||||
|
|
||||||
{/* Bulk delete */}
|
{blockedKeys.length === 0 && blockedNames.length === 0 ? (
|
||||||
<div className="space-y-3">
|
<p className="text-sm text-muted-foreground italic">
|
||||||
<Label>Bulk Delete Contacts</Label>
|
No blocked contacts. Block contacts from their info pane, viewed by clicking their
|
||||||
<p className="text-xs text-muted-foreground">
|
avatar in any channel, or their name within the top status bar with the conversation
|
||||||
Remove multiple contacts or repeaters at once. Useful for cleaning up spam or unwanted
|
open.
|
||||||
nodes. Message history will be preserved.
|
</p>
|
||||||
</p>
|
) : (
|
||||||
<Button variant="outline" className="w-full" onClick={() => setBulkDeleteOpen(true)}>
|
<div className="space-y-2">
|
||||||
Open Bulk Delete
|
{blockedKeys.length > 0 && (
|
||||||
</Button>
|
<div>
|
||||||
<BulkDeleteContactsModal
|
<span className="text-xs text-muted-foreground font-medium">Blocked Keys</span>
|
||||||
open={bulkDeleteOpen}
|
<div className="mt-1 space-y-1">
|
||||||
onClose={() => setBulkDeleteOpen(false)}
|
{blockedKeys.map((key) => (
|
||||||
contacts={contacts}
|
<div key={key} className="flex items-center justify-between gap-2">
|
||||||
onDeleted={(keys) => onBulkDeleteContacts?.(keys)}
|
<span className="text-xs font-mono truncate flex-1">{key}</span>
|
||||||
/>
|
{onToggleBlockedKey && (
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => onToggleBlockedKey(key)}
|
||||||
|
className="h-7 text-xs flex-shrink-0"
|
||||||
|
>
|
||||||
|
Unblock
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{blockedNames.length > 0 && (
|
||||||
|
<div>
|
||||||
|
<span className="text-xs text-muted-foreground font-medium">Blocked Names</span>
|
||||||
|
<div className="mt-1 space-y-1">
|
||||||
|
{blockedNames.map((name) => (
|
||||||
|
<div key={name} className="flex items-center justify-between gap-2">
|
||||||
|
<span className="text-sm truncate flex-1">{name}</span>
|
||||||
|
{onToggleBlockedName && (
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => onToggleBlockedName(name)}
|
||||||
|
className="h-7 text-xs flex-shrink-0"
|
||||||
|
>
|
||||||
|
Unblock
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Bulk delete */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<h4 className="text-sm font-semibold">Bulk Delete Contacts</h4>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Remove multiple contacts or repeaters at once. Useful for cleaning up spam or unwanted
|
||||||
|
nodes. Message history will be preserved.
|
||||||
|
</p>
|
||||||
|
<Button variant="outline" className="w-full" onClick={() => setBulkDeleteOpen(true)}>
|
||||||
|
Open Bulk Delete
|
||||||
|
</Button>
|
||||||
|
<BulkDeleteContactsModal
|
||||||
|
open={bulkDeleteOpen}
|
||||||
|
onClose={() => setBulkDeleteOpen(false)}
|
||||||
|
contacts={contacts}
|
||||||
|
onDeleted={(keys) => onBulkDeleteContacts?.(keys)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,11 @@
|
|||||||
import { useState } from 'react';
|
import { useState, useEffect } from 'react';
|
||||||
import { ChevronRight, Logs, MessageSquare, Send, Settings } from 'lucide-react';
|
import { ChevronRight, Logs, MessageSquare, Send, Settings, X } from 'lucide-react';
|
||||||
|
import { toast } from '../ui/sonner';
|
||||||
|
import { usePush } from '../../contexts/PushSubscriptionContext';
|
||||||
|
import type { Channel, Contact } from '../../types';
|
||||||
|
import { getContactDisplayName } from '../../utils/pubkey';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
|
import { Checkbox } from '../ui/checkbox';
|
||||||
import { Input } from '../ui/input';
|
import { Input } from '../ui/input';
|
||||||
import { Label } from '../ui/label';
|
import { Label } from '../ui/label';
|
||||||
import { Separator } from '../ui/separator';
|
import { Separator } from '../ui/separator';
|
||||||
@@ -35,30 +40,198 @@ import {
|
|||||||
getShowBatteryVoltage,
|
getShowBatteryVoltage,
|
||||||
setShowBatteryVoltage as saveBatteryVoltage,
|
setShowBatteryVoltage as saveBatteryVoltage,
|
||||||
} from '../../utils/batteryDisplay';
|
} from '../../utils/batteryDisplay';
|
||||||
|
import {
|
||||||
|
STATUS_DOT_PULSE_CHANGE_EVENT,
|
||||||
|
getStatusDotPulseEnabled,
|
||||||
|
setStatusDotPulseEnabled as saveStatusDotPulse,
|
||||||
|
} from '../../utils/statusDotPulse';
|
||||||
|
|
||||||
|
/** Resolve a state key like "contact-abc123" or "channel-def456" to a display name. */
|
||||||
|
function resolveConversationName(
|
||||||
|
stateKey: string,
|
||||||
|
contacts: Contact[],
|
||||||
|
channels: Channel[]
|
||||||
|
): string {
|
||||||
|
if (stateKey.startsWith('contact-')) {
|
||||||
|
const pubkey = stateKey.slice('contact-'.length);
|
||||||
|
const contact = contacts.find((c) => c.public_key === pubkey);
|
||||||
|
return contact ? getContactDisplayName(contact.name, contact.public_key) : pubkey.slice(0, 12);
|
||||||
|
}
|
||||||
|
if (stateKey.startsWith('channel-')) {
|
||||||
|
const key = stateKey.slice('channel-'.length);
|
||||||
|
const channel = channels.find((c) => c.key === key);
|
||||||
|
if (channel?.name) return channel.name.startsWith('#') ? channel.name : `#${channel.name}`;
|
||||||
|
return `#${key.slice(0, 12)}`;
|
||||||
|
}
|
||||||
|
return stateKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
function PushDeviceManagement({
|
||||||
|
contacts = [],
|
||||||
|
channels = [],
|
||||||
|
}: {
|
||||||
|
contacts?: Contact[];
|
||||||
|
channels?: Channel[];
|
||||||
|
}) {
|
||||||
|
const {
|
||||||
|
isSupported,
|
||||||
|
allSubscriptions,
|
||||||
|
pushConversations,
|
||||||
|
loading,
|
||||||
|
subscribe,
|
||||||
|
currentSubscriptionId,
|
||||||
|
toggleConversation,
|
||||||
|
deleteSubscription,
|
||||||
|
testPush,
|
||||||
|
refreshSubscriptions,
|
||||||
|
} = usePush();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
refreshSubscriptions();
|
||||||
|
}, [refreshSubscriptions]);
|
||||||
|
|
||||||
|
if (!isSupported) {
|
||||||
|
return (
|
||||||
|
<div className="space-y-3">
|
||||||
|
<h3 className="text-base font-semibold tracking-tight">Web Push Notifications</h3>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
{window.isSecureContext
|
||||||
|
? 'Push notifications are not supported by this browser.'
|
||||||
|
: 'Web Push requires HTTPS. Access RemoteTerm over HTTPS (self-signed certificates work) to enable push notifications.'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="space-y-1">
|
||||||
|
<h3 className="text-base font-semibold tracking-tight">Web Push Notifications</h3>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Receive notifications even when the browser is closed. Use the bell icon in any
|
||||||
|
conversation header to enable push for that contact or channel, or subscribe this browser
|
||||||
|
to receive notifications for all push-enabled conversations.
|
||||||
|
</p>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
The set of channels or DMs that trigger push notifications are global per-install (i.e.
|
||||||
|
all devices that register for Web Push will have the same set of channels/DMs that trigger
|
||||||
|
notifications). Subscribing or unsubscribing a particular browser only controls whether
|
||||||
|
that browser receives notifications for the configured set of channels/DMs.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!currentSubscriptionId && (
|
||||||
|
<Button variant="outline" size="sm" onClick={() => void subscribe()} disabled={loading}>
|
||||||
|
{loading ? 'Subscribing...' : 'Subscribe This Browser'}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{pushConversations.length > 0 && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<span className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
|
Push-enabled conversations
|
||||||
|
</span>
|
||||||
|
<div className="flex flex-wrap gap-1.5">
|
||||||
|
{pushConversations.map((key) => (
|
||||||
|
<span
|
||||||
|
key={key}
|
||||||
|
className="inline-flex items-center gap-1 rounded-full bg-muted px-2.5 py-1 text-sm"
|
||||||
|
>
|
||||||
|
{resolveConversationName(key, contacts, channels)}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => void toggleConversation(key)}
|
||||||
|
className="rounded-full p-0.5 hover:bg-accent transition-colors"
|
||||||
|
title="Remove"
|
||||||
|
aria-label={`Remove ${resolveConversationName(key, contacts, channels)} from push`}
|
||||||
|
>
|
||||||
|
<X className="h-3.5 w-3.5" />
|
||||||
|
</button>
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{allSubscriptions.length > 0 && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<span className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
|
Registered Devices
|
||||||
|
</span>
|
||||||
|
<div className="mt-2 space-y-2">
|
||||||
|
{allSubscriptions.map((sub) => (
|
||||||
|
<div
|
||||||
|
key={sub.id}
|
||||||
|
className="flex items-center justify-between gap-3 rounded-md border border-border px-3 py-2"
|
||||||
|
>
|
||||||
|
<div className="min-w-0 flex-1">
|
||||||
|
<div className="flex items-center gap-2 overflow-hidden">
|
||||||
|
<span className="truncate text-sm font-medium">
|
||||||
|
{sub.label || 'Unknown device'}
|
||||||
|
</span>
|
||||||
|
{sub.id === currentSubscriptionId && (
|
||||||
|
<span className="shrink-0 rounded bg-primary/10 px-1.5 py-0.5 text-[0.625rem] font-medium text-primary">
|
||||||
|
Current device
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{sub.last_success_at
|
||||||
|
? `Last push: ${new Date(sub.last_success_at * 1000).toLocaleDateString()}`
|
||||||
|
: 'Never pushed'}
|
||||||
|
{sub.failure_count > 0 && ` · ${sub.failure_count} failures`}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-1">
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
className="h-8 text-sm"
|
||||||
|
onClick={() => void testPush(sub.id)}
|
||||||
|
>
|
||||||
|
Test
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
className="h-8 text-sm text-destructive hover:text-destructive"
|
||||||
|
onClick={() => {
|
||||||
|
void deleteSubscription(sub.id).then(() => toast.success('Device removed'));
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Unsubscribe this device
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export function SettingsLocalSection({
|
export function SettingsLocalSection({
|
||||||
onLocalLabelChange,
|
onLocalLabelChange,
|
||||||
|
contacts,
|
||||||
|
channels,
|
||||||
className,
|
className,
|
||||||
}: {
|
}: {
|
||||||
onLocalLabelChange?: (label: LocalLabel) => void;
|
onLocalLabelChange?: (label: LocalLabel) => void;
|
||||||
|
contacts?: Contact[];
|
||||||
|
channels?: Channel[];
|
||||||
className?: string;
|
className?: string;
|
||||||
}) {
|
}) {
|
||||||
const { distanceUnit, setDistanceUnit } = useDistanceUnit();
|
const { distanceUnit, setDistanceUnit } = useDistanceUnit();
|
||||||
const [reopenLastConversation, setReopenLastConversation] = useState(
|
const [reopenLastConversation, setReopenLastConversation] = useState(
|
||||||
getReopenLastConversationEnabled
|
getReopenLastConversationEnabled
|
||||||
);
|
);
|
||||||
const [darkMap, setDarkMap] = useState(() => {
|
|
||||||
try {
|
|
||||||
return localStorage.getItem('remoteterm-dark-map') === 'true';
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const [localLabelText, setLocalLabelText] = useState(() => getLocalLabel().text);
|
const [localLabelText, setLocalLabelText] = useState(() => getLocalLabel().text);
|
||||||
const [localLabelColor, setLocalLabelColor] = useState(() => getLocalLabel().color);
|
const [localLabelColor, setLocalLabelColor] = useState(() => getLocalLabel().color);
|
||||||
const [autoFocusInput, setAutoFocusInput] = useState(getAutoFocusInputEnabled);
|
const [autoFocusInput, setAutoFocusInput] = useState(getAutoFocusInputEnabled);
|
||||||
const [batteryPercent, setBatteryPercent] = useState(getShowBatteryPercent);
|
const [batteryPercent, setBatteryPercent] = useState(getShowBatteryPercent);
|
||||||
const [batteryVoltage, setBatteryVoltage] = useState(getShowBatteryVoltage);
|
const [batteryVoltage, setBatteryVoltage] = useState(getShowBatteryVoltage);
|
||||||
|
const [statusDotPulse, setStatusDotPulse] = useState(getStatusDotPulseEnabled);
|
||||||
const [fontScale, setFontScale] = useState(getSavedFontScale);
|
const [fontScale, setFontScale] = useState(getSavedFontScale);
|
||||||
const [fontScaleSlider, setFontScaleSlider] = useState(getSavedFontScale);
|
const [fontScaleSlider, setFontScaleSlider] = useState(getSavedFontScale);
|
||||||
const [fontScaleInput, setFontScaleInput] = useState(() => String(getSavedFontScale()));
|
const [fontScaleInput, setFontScaleInput] = useState(() => String(getSavedFontScale()));
|
||||||
@@ -93,12 +266,12 @@ export function SettingsLocalSection({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={className}>
|
<div className={className}>
|
||||||
<p className="text-sm text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
These settings apply only to this device/browser.
|
These settings apply only to this device/browser.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<div className="space-y-1">
|
<div className="space-y-1">
|
||||||
<Label>Color Scheme</Label>
|
<h3 className="text-base font-semibold tracking-tight">Color Scheme</h3>
|
||||||
<ThemeSelector />
|
<ThemeSelector />
|
||||||
<ThemePreview className="mt-6" />
|
<ThemePreview className="mt-6" />
|
||||||
</div>
|
</div>
|
||||||
@@ -106,7 +279,7 @@ export function SettingsLocalSection({
|
|||||||
<Separator />
|
<Separator />
|
||||||
|
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label>Local Label</Label>
|
<h3 className="text-base font-semibold tracking-tight">Local Label</h3>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<Input
|
<Input
|
||||||
value={localLabelText}
|
value={localLabelText}
|
||||||
@@ -133,7 +306,7 @@ export function SettingsLocalSection({
|
|||||||
className="w-10 h-9 rounded border border-input cursor-pointer bg-transparent p-0.5"
|
className="w-10 h-9 rounded border border-input cursor-pointer bg-transparent p-0.5"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Display a colored banner at the top of the page to identify this instance.
|
Display a colored banner at the top of the page to identify this instance.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -158,7 +331,7 @@ export function SettingsLocalSection({
|
|||||||
</option>
|
</option>
|
||||||
))}
|
))}
|
||||||
</select>
|
</select>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Controls how distances are shown throughout the app.
|
Controls how distances are shown throughout the app.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -166,86 +339,107 @@ export function SettingsLocalSection({
|
|||||||
<Separator />
|
<Separator />
|
||||||
|
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label>UI Tweaks</Label>
|
<h3 className="text-base font-semibold tracking-tight">UI Tweaks</h3>
|
||||||
|
|
||||||
<label className="flex items-center gap-3 cursor-pointer">
|
<div className="space-y-2">
|
||||||
<input
|
<div className="flex items-start gap-3 rounded-md border border-border/60 p-3">
|
||||||
type="checkbox"
|
<Checkbox
|
||||||
checked={reopenLastConversation}
|
id="reopen-last"
|
||||||
onChange={(e) => handleToggleReopenLastConversation(e.target.checked)}
|
checked={reopenLastConversation}
|
||||||
className="w-4 h-4 rounded border-input accent-primary"
|
onCheckedChange={(checked) => handleToggleReopenLastConversation(checked === true)}
|
||||||
/>
|
className="mt-0.5"
|
||||||
<span className="text-sm">Reopen to last viewed channel/conversation</span>
|
/>
|
||||||
</label>
|
<div className="space-y-1">
|
||||||
|
<Label htmlFor="reopen-last">Reopen Last Conversation</Label>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Automatically reopen to the last-open channel or contact when the app loads to the
|
||||||
|
bare URL.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<label className="flex items-center gap-3 cursor-pointer">
|
<div className="flex items-start gap-3 rounded-md border border-border/60 p-3">
|
||||||
<input
|
<Checkbox
|
||||||
type="checkbox"
|
id="auto-focus-input"
|
||||||
checked={darkMap}
|
checked={autoFocusInput}
|
||||||
onChange={(e) => {
|
onCheckedChange={(checked) => {
|
||||||
const v = e.target.checked;
|
const v = checked === true;
|
||||||
setDarkMap(v);
|
setAutoFocusInput(v);
|
||||||
try {
|
setAutoFocusInputEnabled(v);
|
||||||
localStorage.setItem('remoteterm-dark-map', String(v));
|
}}
|
||||||
} catch {
|
className="mt-0.5"
|
||||||
// localStorage may be disabled
|
/>
|
||||||
}
|
<div className="space-y-1">
|
||||||
}}
|
<Label htmlFor="auto-focus-input">Auto-Focus Message Input</Label>
|
||||||
className="w-4 h-4 rounded border-input accent-primary"
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
/>
|
Place the cursor in the message input when switching conversations. Desktop only.
|
||||||
<span className="text-sm">Dark mode map tiles</span>
|
</p>
|
||||||
</label>
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<label className="flex items-center gap-3 cursor-pointer">
|
<div className="flex items-start gap-3 rounded-md border border-border/60 p-3">
|
||||||
<input
|
<Checkbox
|
||||||
type="checkbox"
|
id="battery-percent"
|
||||||
checked={autoFocusInput}
|
checked={batteryPercent}
|
||||||
onChange={(e) => {
|
onCheckedChange={(checked) => {
|
||||||
const v = e.target.checked;
|
const v = checked === true;
|
||||||
setAutoFocusInput(v);
|
setBatteryPercent(v);
|
||||||
setAutoFocusInputEnabled(v);
|
saveBatteryPercent(v);
|
||||||
}}
|
window.dispatchEvent(new Event(BATTERY_DISPLAY_CHANGE_EVENT));
|
||||||
className="w-4 h-4 rounded border-input accent-primary"
|
}}
|
||||||
/>
|
className="mt-0.5"
|
||||||
<span className="text-sm">Auto-focus input on conversation load (desktop only)</span>
|
/>
|
||||||
</label>
|
<div className="space-y-1">
|
||||||
|
<Label htmlFor="battery-percent">Show Battery Percentage</Label>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Display the radio's battery percentage in the status bar. Data updates every 60
|
||||||
|
seconds and may take up to a minute to appear after connecting.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<label className="flex items-center gap-3 cursor-pointer">
|
<div className="flex items-start gap-3 rounded-md border border-border/60 p-3">
|
||||||
<input
|
<Checkbox
|
||||||
type="checkbox"
|
id="battery-voltage"
|
||||||
checked={batteryPercent}
|
checked={batteryVoltage}
|
||||||
onChange={(e) => {
|
onCheckedChange={(checked) => {
|
||||||
const v = e.target.checked;
|
const v = checked === true;
|
||||||
setBatteryPercent(v);
|
setBatteryVoltage(v);
|
||||||
saveBatteryPercent(v);
|
saveBatteryVoltage(v);
|
||||||
window.dispatchEvent(new Event(BATTERY_DISPLAY_CHANGE_EVENT));
|
window.dispatchEvent(new Event(BATTERY_DISPLAY_CHANGE_EVENT));
|
||||||
}}
|
}}
|
||||||
className="w-4 h-4 rounded border-input accent-primary"
|
className="mt-0.5"
|
||||||
/>
|
/>
|
||||||
<span className="text-sm">Show battery percentage in status bar</span>
|
<div className="space-y-1">
|
||||||
</label>
|
<Label htmlFor="battery-voltage">Show Battery Voltage</Label>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Display the radio's battery voltage in the status bar (in mV). Data updates
|
||||||
|
every 60 seconds and may take up to a minute to appear after connecting.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<label className="flex items-center gap-3 cursor-pointer">
|
<div className="flex items-start gap-3 rounded-md border border-border/60 p-3">
|
||||||
<input
|
<Checkbox
|
||||||
type="checkbox"
|
id="status-dot-pulse"
|
||||||
checked={batteryVoltage}
|
checked={statusDotPulse}
|
||||||
onChange={(e) => {
|
onCheckedChange={(checked) => {
|
||||||
const v = e.target.checked;
|
const v = checked === true;
|
||||||
setBatteryVoltage(v);
|
setStatusDotPulse(v);
|
||||||
saveBatteryVoltage(v);
|
saveStatusDotPulse(v);
|
||||||
window.dispatchEvent(new Event(BATTERY_DISPLAY_CHANGE_EVENT));
|
window.dispatchEvent(new Event(STATUS_DOT_PULSE_CHANGE_EVENT));
|
||||||
}}
|
}}
|
||||||
className="w-4 h-4 rounded border-input accent-primary"
|
className="mt-0.5"
|
||||||
/>
|
/>
|
||||||
<span className="text-sm">Show battery voltage in status bar</span>
|
<div className="space-y-1">
|
||||||
</label>
|
<Label htmlFor="status-dot-pulse">Status Dot Glitters</Label>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
{(batteryPercent || batteryVoltage) && (
|
Flash the connection status dot in color as packets arrive: blue for channel, purple
|
||||||
<p className="text-xs text-muted-foreground ml-7">
|
for DM, cyan for advert, dark green for other.
|
||||||
Battery data updates every 60 seconds and may take up to a minute to appear after
|
</p>
|
||||||
connecting.
|
</div>
|
||||||
</p>
|
</div>
|
||||||
)}
|
</div>
|
||||||
|
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label htmlFor="font-scale-input">Relative Font Size</Label>
|
<Label htmlFor="font-scale-input">Relative Font Size</Label>
|
||||||
@@ -318,12 +512,16 @@ export function SettingsLocalSection({
|
|||||||
Reset
|
Reset
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Scales the app's typography for this browser only. The slider moves in 5% steps;
|
Scales the app's typography for this browser only. The slider moves in 5% steps;
|
||||||
the number field accepts any value from 25% to 400%.
|
the number field accepts any value from 25% to 400%.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<Separator />
|
||||||
|
|
||||||
|
<PushDeviceManagement contacts={contacts} channels={channels} />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -427,15 +625,15 @@ function ThemePreview({ className }: { className?: string }) {
|
|||||||
desc="Sheet / dialog title"
|
desc="Sheet / dialog title"
|
||||||
/>
|
/>
|
||||||
<PreviewTextRow
|
<PreviewTextRow
|
||||||
classes="text-base font-semibold"
|
classes="text-base font-semibold tracking-tight"
|
||||||
label="text-base font-semibold"
|
label="text-base font-semibold tracking-tight"
|
||||||
desc="Section title"
|
desc="Section / group title"
|
||||||
/>
|
/>
|
||||||
<PreviewTextRow classes="text-sm" label="text-sm" desc="Body text, form labels" />
|
<PreviewTextRow classes="text-sm" label="text-sm" desc="Body text, form labels" />
|
||||||
<PreviewTextRow
|
<PreviewTextRow
|
||||||
classes="text-xs text-muted-foreground"
|
classes="text-[0.8125rem] text-muted-foreground"
|
||||||
label="text-xs text-muted-foreground"
|
label="text-[0.8125rem] text-muted-foreground"
|
||||||
desc="Helper text"
|
desc="Helper / description text"
|
||||||
/>
|
/>
|
||||||
<PreviewTextRow
|
<PreviewTextRow
|
||||||
classes="text-[0.6875rem] text-muted-foreground"
|
classes="text-[0.6875rem] text-muted-foreground"
|
||||||
@@ -444,7 +642,7 @@ function ThemePreview({ className }: { className?: string }) {
|
|||||||
/>
|
/>
|
||||||
<div>
|
<div>
|
||||||
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
<p className="text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium">
|
||||||
Section Label
|
Metadata Label
|
||||||
</p>
|
</p>
|
||||||
<p className="text-[0.625rem] text-muted-foreground/60 mt-0.5">
|
<p className="text-[0.625rem] text-muted-foreground/60 mt-0.5">
|
||||||
text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium
|
text-[0.625rem] uppercase tracking-wider text-muted-foreground font-medium
|
||||||
|
|||||||
@@ -392,7 +392,7 @@ export function SettingsRadioSection({
|
|||||||
<div className={className}>
|
<div className={className}>
|
||||||
{/* ── Connection ── */}
|
{/* ── Connection ── */}
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<Label className="text-base">Connection</Label>
|
<h3 className="text-base font-semibold tracking-tight">Connection</h3>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<div
|
<div
|
||||||
className={`w-2 h-2 rounded-full ${
|
className={`w-2 h-2 rounded-full ${
|
||||||
@@ -423,7 +423,7 @@ export function SettingsRadioSection({
|
|||||||
>
|
>
|
||||||
{connectionBusy ? `${connectionActionLabel}...` : connectionActionLabel}
|
{connectionBusy ? `${connectionActionLabel}...` : connectionActionLabel}
|
||||||
</Button>
|
</Button>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Disconnect pauses automatic reconnect attempts so another device can use the radio.
|
Disconnect pauses automatic reconnect attempts so another device can use the radio.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -432,7 +432,7 @@ export function SettingsRadioSection({
|
|||||||
|
|
||||||
{/* ── Identity ── */}
|
{/* ── Identity ── */}
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<Label className="text-base">Identity</Label>
|
<h3 className="text-base font-semibold tracking-tight">Identity</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
@@ -477,7 +477,7 @@ export function SettingsRadioSection({
|
|||||||
|
|
||||||
{/* ── Radio Parameters ── */}
|
{/* ── Radio Parameters ── */}
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<Label className="text-base">Radio Parameters</Label>
|
<h3 className="text-base font-semibold tracking-tight">Radio Parameters</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
@@ -590,7 +590,7 @@ export function SettingsRadioSection({
|
|||||||
{/* ── Location ── */}
|
{/* ── Location ── */}
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<Label className="text-base">Location</Label>
|
<h3 className="text-base font-semibold tracking-tight">Location</h3>
|
||||||
<Button
|
<Button
|
||||||
type="button"
|
type="button"
|
||||||
variant="outline"
|
variant="outline"
|
||||||
@@ -645,7 +645,7 @@ export function SettingsRadioSection({
|
|||||||
<option value="off">Off</option>
|
<option value="off">Off</option>
|
||||||
<option value="current">Include Node Location</option>
|
<option value="current">Include Node Location</option>
|
||||||
</select>
|
</select>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Companion-radio firmware does not distinguish between saved coordinates and live GPS
|
Companion-radio firmware does not distinguish between saved coordinates and live GPS
|
||||||
here. When enabled, adverts include the node's current location state. That may be
|
here. When enabled, adverts include the node's current location state. That may be
|
||||||
the last coordinates you set from RemoteTerm or live GPS coordinates if the node itself
|
the last coordinates you set from RemoteTerm or live GPS coordinates if the node itself
|
||||||
@@ -668,13 +668,13 @@ export function SettingsRadioSection({
|
|||||||
variant="outline"
|
variant="outline"
|
||||||
className="flex-1"
|
className="flex-1"
|
||||||
>
|
>
|
||||||
{busy && !rebooting ? 'Saving...' : 'Save'}
|
{busy && !rebooting ? 'Saving...' : 'Save Radio Config'}
|
||||||
</Button>
|
</Button>
|
||||||
<Button onClick={handleSaveAndReboot} disabled={busy || rebooting} className="flex-1">
|
<Button onClick={handleSaveAndReboot} disabled={busy || rebooting} className="flex-1">
|
||||||
{rebooting ? 'Rebooting...' : 'Save & Reboot'}
|
{rebooting ? 'Rebooting...' : 'Save Radio Config & Reboot'}
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Some settings may require a reboot to take effect on some radios.
|
Some settings may require a reboot to take effect on some radios.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@@ -682,7 +682,7 @@ export function SettingsRadioSection({
|
|||||||
|
|
||||||
{/* ── Messaging ── */}
|
{/* ── Messaging ── */}
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<Label className="text-base">Messaging</Label>
|
<h3 className="text-base font-semibold tracking-tight">Messaging</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
@@ -695,7 +695,7 @@ export function SettingsRadioSection({
|
|||||||
/>
|
/>
|
||||||
<div className="space-y-1">
|
<div className="space-y-1">
|
||||||
<Label htmlFor="multi-acks-enabled">Extra Direct ACK Transmission</Label>
|
<Label htmlFor="multi-acks-enabled">Extra Direct ACK Transmission</Label>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
When enabled, the radio sends one extra direct ACK transmission before the normal ACK
|
When enabled, the radio sends one extra direct ACK transmission before the normal ACK
|
||||||
for received direct messages. This is a firmware-level receive behavior, not a
|
for received direct messages. This is a firmware-level receive behavior, not a
|
||||||
RemoteTerm retry setting.
|
RemoteTerm retry setting.
|
||||||
@@ -714,7 +714,7 @@ export function SettingsRadioSection({
|
|||||||
/>
|
/>
|
||||||
<div className="space-y-1">
|
<div className="space-y-1">
|
||||||
<Label htmlFor="auto-resend-channel">Auto-Resend Unheard Channel Messages</Label>
|
<Label htmlFor="auto-resend-channel">Auto-Resend Unheard Channel Messages</Label>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
When enabled, outgoing channel messages that receive no echo within 2 seconds are
|
When enabled, outgoing channel messages that receive no echo within 2 seconds are
|
||||||
automatically resent once (byte-perfect, within the 30-second dedup window). Repeaters
|
automatically resent once (byte-perfect, within the 30-second dedup window). Repeaters
|
||||||
that already heard the original will ignore the duplicate. This functionality will NOT
|
that already heard the original will ignore the duplicate. This functionality will NOT
|
||||||
@@ -732,7 +732,7 @@ export function SettingsRadioSection({
|
|||||||
onChange={(e) => setFloodScope(e.target.value)}
|
onChange={(e) => setFloodScope(e.target.value)}
|
||||||
placeholder="MyRegion"
|
placeholder="MyRegion"
|
||||||
/>
|
/>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Tag outgoing flood messages with a region name (e.g. MyRegion). Repeaters configured for
|
Tag outgoing flood messages with a region name (e.g. MyRegion). Repeaters configured for
|
||||||
that region can forward the traffic, while repeaters configured to deny other regions may
|
that region can forward the traffic, while repeaters configured to deny other regions may
|
||||||
drop it. Leave empty to disable.
|
drop it. Leave empty to disable.
|
||||||
@@ -749,7 +749,7 @@ export function SettingsRadioSection({
|
|||||||
value={maxRadioContacts}
|
value={maxRadioContacts}
|
||||||
onChange={(e) => setMaxRadioContacts(e.target.value)}
|
onChange={(e) => setMaxRadioContacts(e.target.value)}
|
||||||
/>
|
/>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Configured radio contact capacity. Favorites reload first, then background maintenance
|
Configured radio contact capacity. Favorites reload first, then background maintenance
|
||||||
refills to about 80% of this value and offloads once occupancy reaches about 95%.
|
refills to about 80% of this value and offloads once occupancy reaches about 95%.
|
||||||
</p>
|
</p>
|
||||||
@@ -769,140 +769,143 @@ export function SettingsRadioSection({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<Button onClick={handleSaveFloodSettings} disabled={floodBusy} className="w-full">
|
<Button onClick={handleSaveFloodSettings} disabled={floodBusy} className="w-full">
|
||||||
{floodBusy ? 'Saving...' : 'Save Settings'}
|
{floodBusy ? 'Saving...' : 'Save Messaging Settings'}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
<Separator />
|
<Separator />
|
||||||
|
|
||||||
{/* ── Advertising & Discovery ── */}
|
{/* ── Advertising & Discovery ── */}
|
||||||
<div className="space-y-2">
|
<div className="space-y-5">
|
||||||
<Label className="text-base">Advertising & Discovery</Label>
|
<h3 className="text-base font-semibold tracking-tight">Advertising & Discovery</h3>
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<Label htmlFor="advert-interval">Periodic Advertising Interval</Label>
|
<Label htmlFor="advert-interval">Periodic Advertising Interval</Label>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<Input
|
<Input
|
||||||
id="advert-interval"
|
id="advert-interval"
|
||||||
type="number"
|
type="number"
|
||||||
min="0"
|
min="0"
|
||||||
value={advertIntervalHours}
|
value={advertIntervalHours}
|
||||||
onChange={(e) => setAdvertIntervalHours(e.target.value)}
|
onChange={(e) => setAdvertIntervalHours(e.target.value)}
|
||||||
className="w-28"
|
className="w-28"
|
||||||
/>
|
/>
|
||||||
<span className="text-sm text-muted-foreground">hours (0 = off)</span>
|
<span className="text-sm text-muted-foreground">hours (0 = off)</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
How often to automatically advertise presence. Set to 0 to disable. Minimum: 1 hour.
|
||||||
|
Recommended: 24 hours or higher.
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs text-muted-foreground">
|
|
||||||
How often to automatically advertise presence. Set to 0 to disable. Minimum: 1 hour.
|
|
||||||
Recommended: 24 hours or higher.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<Label>Send Advertisement</Label>
|
<h4 className="text-sm font-semibold">Send Advertisement</h4>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
Flood adverts propagate through repeaters. Zero-hop adverts are local-only and use less
|
Flood adverts propagate through repeaters. Zero-hop adverts are local-only and use less
|
||||||
airtime.
|
airtime.
|
||||||
</p>
|
</p>
|
||||||
<div className="grid grid-cols-1 gap-2 sm:grid-cols-2">
|
<div className="grid grid-cols-1 gap-2 sm:grid-cols-2">
|
||||||
<Button
|
|
||||||
onClick={() => handleAdvertise('flood')}
|
|
||||||
disabled={advertisingMode !== null || !health?.radio_connected}
|
|
||||||
className="w-full bg-warning hover:bg-warning/90 text-warning-foreground"
|
|
||||||
>
|
|
||||||
{advertisingMode === 'flood' ? 'Sending...' : 'Send Flood Advertisement'}
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
onClick={() => handleAdvertise('zero_hop')}
|
|
||||||
disabled={advertisingMode !== null || !health?.radio_connected}
|
|
||||||
className="w-full"
|
|
||||||
>
|
|
||||||
{advertisingMode === 'zero_hop' ? 'Sending...' : 'Send Zero-Hop Advertisement'}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
{!health?.radio_connected && (
|
|
||||||
<p className="text-sm text-destructive">Radio not connected</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-3">
|
|
||||||
<Label>Mesh Discovery</Label>
|
|
||||||
<p className="text-xs text-muted-foreground">
|
|
||||||
Discover nearby node types that currently respond to mesh discovery requests: repeaters
|
|
||||||
and sensors.
|
|
||||||
</p>
|
|
||||||
<div className="grid grid-cols-1 gap-2 sm:grid-cols-3">
|
|
||||||
{[
|
|
||||||
{ target: 'repeaters', label: 'Discover Repeaters' },
|
|
||||||
{ target: 'sensors', label: 'Discover Sensors' },
|
|
||||||
{ target: 'all', label: 'Discover Both' },
|
|
||||||
].map(({ target, label }) => (
|
|
||||||
<Button
|
<Button
|
||||||
key={target}
|
onClick={() => handleAdvertise('flood')}
|
||||||
type="button"
|
disabled={advertisingMode !== null || !health?.radio_connected}
|
||||||
variant="outline"
|
className="w-full bg-warning hover:bg-warning/90 text-warning-foreground"
|
||||||
onClick={() => handleDiscover(target as RadioDiscoveryTarget)}
|
>
|
||||||
disabled={meshDiscoveryLoadingTarget !== null || !health?.radio_connected}
|
{advertisingMode === 'flood' ? 'Sending...' : 'Send Flood Advertisement'}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
onClick={() => handleAdvertise('zero_hop')}
|
||||||
|
disabled={advertisingMode !== null || !health?.radio_connected}
|
||||||
className="w-full"
|
className="w-full"
|
||||||
>
|
>
|
||||||
{meshDiscoveryLoadingTarget === target ? 'Listening...' : label}
|
{advertisingMode === 'zero_hop' ? 'Sending...' : 'Send Zero-Hop Advertisement'}
|
||||||
</Button>
|
</Button>
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
{!health?.radio_connected && (
|
|
||||||
<p className="text-sm text-destructive">Radio not connected</p>
|
|
||||||
)}
|
|
||||||
{discoverError && (
|
|
||||||
<p className="text-sm text-destructive" role="alert">
|
|
||||||
{discoverError}
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
{meshDiscovery && (
|
|
||||||
<div className="space-y-2 rounded-md border border-input bg-muted/20 p-3">
|
|
||||||
<div className="flex items-center justify-between gap-4">
|
|
||||||
<p className="text-sm font-medium">
|
|
||||||
Last sweep: {meshDiscovery.results.length} node
|
|
||||||
{meshDiscovery.results.length === 1 ? '' : 's'}
|
|
||||||
</p>
|
|
||||||
<p className="text-xs text-muted-foreground">
|
|
||||||
{meshDiscovery.duration_seconds.toFixed(0)}s listen window
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
{meshDiscovery.results.length === 0 ? (
|
|
||||||
<p className="text-sm text-muted-foreground">
|
|
||||||
No supported nodes responded during the last discovery sweep.
|
|
||||||
</p>
|
|
||||||
) : (
|
|
||||||
<div className="space-y-2">
|
|
||||||
{meshDiscovery.results.map((result) => (
|
|
||||||
<div
|
|
||||||
key={result.public_key}
|
|
||||||
className="rounded-md border border-input bg-background px-3 py-2"
|
|
||||||
>
|
|
||||||
<div className="flex items-center justify-between gap-3">
|
|
||||||
<span className="text-sm font-medium">
|
|
||||||
{result.name ?? <span className="capitalize">{result.node_type}</span>}
|
|
||||||
</span>
|
|
||||||
<span className="text-xs text-muted-foreground">
|
|
||||||
heard {result.heard_count} time{result.heard_count === 1 ? '' : 's'}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
{result.name && (
|
|
||||||
<p className="text-xs capitalize text-muted-foreground">{result.node_type}</p>
|
|
||||||
)}
|
|
||||||
<p className="mt-1 break-all font-mono text-xs text-muted-foreground">
|
|
||||||
{result.public_key}
|
|
||||||
</p>
|
|
||||||
<p className="mt-1 text-xs text-muted-foreground">
|
|
||||||
Heard here: {result.local_snr ?? 'n/a'} dB SNR / {result.local_rssi ?? 'n/a'}{' '}
|
|
||||||
dBm RSSI. Remote heard us: {result.remote_snr ?? 'n/a'} dB SNR.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
{!health?.radio_connected && (
|
||||||
|
<p className="text-sm text-destructive">Radio not connected</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="space-y-3">
|
||||||
|
<h4 className="text-sm font-semibold">Mesh Discovery</h4>
|
||||||
|
<p className="text-[0.8125rem] text-muted-foreground">
|
||||||
|
Discover nearby node types that currently respond to mesh discovery requests: repeaters
|
||||||
|
and sensors.
|
||||||
|
</p>
|
||||||
|
<div className="grid grid-cols-1 gap-2 sm:grid-cols-3">
|
||||||
|
{[
|
||||||
|
{ target: 'repeaters', label: 'Discover Repeaters' },
|
||||||
|
{ target: 'sensors', label: 'Discover Sensors' },
|
||||||
|
{ target: 'all', label: 'Discover Both' },
|
||||||
|
].map(({ target, label }) => (
|
||||||
|
<Button
|
||||||
|
key={target}
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => handleDiscover(target as RadioDiscoveryTarget)}
|
||||||
|
disabled={meshDiscoveryLoadingTarget !== null || !health?.radio_connected}
|
||||||
|
className="w-full"
|
||||||
|
>
|
||||||
|
{meshDiscoveryLoadingTarget === target ? 'Listening...' : label}
|
||||||
|
</Button>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
{!health?.radio_connected && (
|
||||||
|
<p className="text-sm text-destructive">Radio not connected</p>
|
||||||
|
)}
|
||||||
|
{discoverError && (
|
||||||
|
<p className="text-sm text-destructive" role="alert">
|
||||||
|
{discoverError}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{meshDiscovery && (
|
||||||
|
<div className="space-y-2 rounded-md border border-input bg-muted/20 p-3">
|
||||||
|
<div className="flex items-center justify-between gap-4">
|
||||||
|
<p className="text-sm font-medium">
|
||||||
|
Last sweep: {meshDiscovery.results.length} node
|
||||||
|
{meshDiscovery.results.length === 1 ? '' : 's'}
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{meshDiscovery.duration_seconds.toFixed(0)}s listen window
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{meshDiscovery.results.length === 0 ? (
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
No supported nodes responded during the last discovery sweep.
|
||||||
|
</p>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{meshDiscovery.results.map((result) => (
|
||||||
|
<div
|
||||||
|
key={result.public_key}
|
||||||
|
className="rounded-md border border-input bg-background px-3 py-2"
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between gap-3">
|
||||||
|
<span className="text-sm font-medium">
|
||||||
|
{result.name ?? <span className="capitalize">{result.node_type}</span>}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
heard {result.heard_count} time{result.heard_count === 1 ? '' : 's'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{result.name && (
|
||||||
|
<p className="text-xs capitalize text-muted-foreground">
|
||||||
|
{result.node_type}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
<p className="mt-1 break-all font-mono text-xs text-muted-foreground">
|
||||||
|
{result.public_key}
|
||||||
|
</p>
|
||||||
|
<p className="mt-1 text-xs text-muted-foreground">
|
||||||
|
Heard here: {result.local_snr ?? 'n/a'} dB SNR /{' '}
|
||||||
|
{result.local_rssi ?? 'n/a'} dBm RSSI. Remote heard us:{' '}
|
||||||
|
{result.remote_snr ?? 'n/a'} dB SNR.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -226,7 +226,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
{/* Network */}
|
{/* Network */}
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Network</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">Network</h3>
|
||||||
<div className="grid grid-cols-3 gap-3">
|
<div className="grid grid-cols-3 gap-3">
|
||||||
<div className="text-center p-3 bg-muted/50 rounded-md">
|
<div className="text-center p-3 bg-muted/50 rounded-md">
|
||||||
<div className="text-2xl font-bold">{stats.contact_count}</div>
|
<div className="text-2xl font-bold">{stats.contact_count}</div>
|
||||||
@@ -247,7 +247,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
|
|
||||||
{/* Messages */}
|
{/* Messages */}
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Messages</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">Messages</h3>
|
||||||
<div className="grid grid-cols-3 gap-3">
|
<div className="grid grid-cols-3 gap-3">
|
||||||
<div className="text-center p-3 bg-muted/50 rounded-md">
|
<div className="text-center p-3 bg-muted/50 rounded-md">
|
||||||
<div className="text-2xl font-bold">{stats.total_dms}</div>
|
<div className="text-2xl font-bold">{stats.total_dms}</div>
|
||||||
@@ -268,7 +268,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
|
|
||||||
{/* Activity */}
|
{/* Activity */}
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Activity</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">Activity</h3>
|
||||||
<table className="w-full text-sm">
|
<table className="w-full text-sm">
|
||||||
<thead>
|
<thead>
|
||||||
<tr className="text-muted-foreground">
|
<tr className="text-muted-foreground">
|
||||||
@@ -305,7 +305,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
|
|
||||||
{/* Packets */}
|
{/* Packets */}
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Packets</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">Packets</h3>
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-between items-center">
|
||||||
<span className="text-sm text-muted-foreground">Total stored</span>
|
<span className="text-sm text-muted-foreground">Total stored</span>
|
||||||
@@ -327,7 +327,9 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
<>
|
<>
|
||||||
<Separator />
|
<Separator />
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Packets per Hour (72h)</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">
|
||||||
|
Packets per Hour (72h)
|
||||||
|
</h3>
|
||||||
<PacketsPerHourChart buckets={stats.packets_per_hour_72h} />
|
<PacketsPerHourChart buckets={stats.packets_per_hour_72h} />
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
@@ -337,7 +339,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
|
|
||||||
{/* Path Hash Width */}
|
{/* Path Hash Width */}
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Path Hash Width (24h)</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">Path Hash Width (24h)</h3>
|
||||||
<div className="mb-2 text-xs text-muted-foreground">
|
<div className="mb-2 text-xs text-muted-foreground">
|
||||||
Parsed stored raw packets from the last 24 hours:{' '}
|
Parsed stored raw packets from the last 24 hours:{' '}
|
||||||
{stats.path_hash_width_24h.total_packets}
|
{stats.path_hash_width_24h.total_packets}
|
||||||
@@ -407,7 +409,9 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
<>
|
<>
|
||||||
<Separator />
|
<Separator />
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Busiest Channels (24h)</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">
|
||||||
|
Busiest Channels (24h)
|
||||||
|
</h3>
|
||||||
<ResponsiveContainer
|
<ResponsiveContainer
|
||||||
width="100%"
|
width="100%"
|
||||||
height={stats.busiest_channels_24h.length * 28 + 8}
|
height={stats.busiest_channels_24h.length * 28 + 8}
|
||||||
@@ -451,7 +455,7 @@ export function SettingsStatisticsSection({ className }: { className?: string })
|
|||||||
<>
|
<>
|
||||||
<Separator />
|
<Separator />
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-sm font-medium mb-2">Noise Floor (24h)</h4>
|
<h3 className="text-base font-semibold tracking-tight mb-2">Noise Floor (24h)</h3>
|
||||||
{stats.noise_floor_24h.latest_noise_floor_dbm != null && (
|
{stats.noise_floor_24h.latest_noise_floor_dbm != null && (
|
||||||
<div className="mb-2 text-xs text-muted-foreground">
|
<div className="mb-2 text-xs text-muted-foreground">
|
||||||
Latest reading: {stats.noise_floor_24h.latest_noise_floor_dbm} dBm
|
Latest reading: {stats.noise_floor_24h.latest_noise_floor_dbm} dBm
|
||||||
|
|||||||
@@ -56,15 +56,68 @@ interface SheetContentProps
|
|||||||
hideCloseButton?: boolean;
|
hideCloseButton?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Safe-area insets for each sheet side. Sheets are position:fixed and escape
|
||||||
|
// body padding, so without this they render under the iOS status bar/home
|
||||||
|
// indicator when the app is installed as a PWA.
|
||||||
|
//
|
||||||
|
// NOTE: these inline styles override the matching sides of the `p-6` default
|
||||||
|
// in sheetVariants. All current consumers pass `p-0`; future sheets that want
|
||||||
|
// the default padding should compose explicit per-side padding in their own
|
||||||
|
// className rather than relying on the `p-6` shorthand being preserved.
|
||||||
|
type SheetSide = Exclude<VariantProps<typeof sheetVariants>['side'], null | undefined>;
|
||||||
|
|
||||||
|
const sheetSafeAreaStyles: Record<SheetSide, React.CSSProperties> = {
|
||||||
|
top: {
|
||||||
|
paddingTop: 'var(--safe-area-top)',
|
||||||
|
paddingLeft: 'var(--safe-area-left)',
|
||||||
|
paddingRight: 'var(--safe-area-right)',
|
||||||
|
},
|
||||||
|
bottom: {
|
||||||
|
paddingBottom: 'var(--safe-area-bottom)',
|
||||||
|
paddingLeft: 'var(--safe-area-left)',
|
||||||
|
paddingRight: 'var(--safe-area-right)',
|
||||||
|
},
|
||||||
|
left: {
|
||||||
|
paddingTop: 'var(--safe-area-top)',
|
||||||
|
paddingLeft: 'var(--safe-area-left)',
|
||||||
|
paddingBottom: 'var(--safe-area-bottom)',
|
||||||
|
},
|
||||||
|
right: {
|
||||||
|
paddingTop: 'var(--safe-area-top)',
|
||||||
|
paddingRight: 'var(--safe-area-right)',
|
||||||
|
paddingBottom: 'var(--safe-area-bottom)',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const SheetContent = React.forwardRef<
|
const SheetContent = React.forwardRef<
|
||||||
React.ElementRef<typeof SheetPrimitive.Content>,
|
React.ElementRef<typeof SheetPrimitive.Content>,
|
||||||
SheetContentProps
|
SheetContentProps
|
||||||
>(({ side = 'right', className, children, hideCloseButton = false, ...props }, ref) => (
|
>(({ side = 'right', className, children, hideCloseButton = false, style, ...props }, ref) => (
|
||||||
<SheetPortal>
|
<SheetPortal>
|
||||||
<SheetOverlay />
|
<SheetOverlay />
|
||||||
<SheetPrimitive.Content ref={ref} className={cn(sheetVariants({ side }), className)} {...props}>
|
<SheetPrimitive.Content
|
||||||
|
ref={ref}
|
||||||
|
className={cn(sheetVariants({ side }), className)}
|
||||||
|
style={{ ...sheetSafeAreaStyles[side as SheetSide], ...style }}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
{!hideCloseButton && (
|
{!hideCloseButton && (
|
||||||
<SheetPrimitive.Close className="absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-secondary">
|
<SheetPrimitive.Close
|
||||||
|
// Absolute positioning is measured from the containing block's
|
||||||
|
// padding edge, so the safe-area padding on SheetContent does not
|
||||||
|
// push this button down. We offset `top` by safe-area-top manually
|
||||||
|
// for sheets that pin to the viewport top (top/left/right). Bottom
|
||||||
|
// sheets start mid-viewport, so no adjustment is needed there.
|
||||||
|
style={
|
||||||
|
side === 'bottom'
|
||||||
|
? undefined
|
||||||
|
: {
|
||||||
|
top: 'calc(var(--safe-area-top) + 1rem)',
|
||||||
|
right: 'calc(var(--safe-area-right) + 1rem)',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
className="absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-secondary"
|
||||||
|
>
|
||||||
<X className="h-4 w-4" />
|
<X className="h-4 w-4" />
|
||||||
<span className="sr-only">Close</span>
|
<span className="sr-only">Close</span>
|
||||||
</SheetPrimitive.Close>
|
</SheetPrimitive.Close>
|
||||||
|
|||||||
@@ -0,0 +1,35 @@
|
|||||||
|
import { createContext, useContext, type ReactNode } from 'react';
|
||||||
|
import { usePushSubscription, type PushSubscriptionState } from '../hooks/usePushSubscription';
|
||||||
|
|
||||||
|
const noopAsync = async () => {};
|
||||||
|
const noopAsyncNull = async () => null;
|
||||||
|
|
||||||
|
const defaultState: PushSubscriptionState = {
|
||||||
|
isSupported: false,
|
||||||
|
isSubscribed: false,
|
||||||
|
currentSubscriptionId: null,
|
||||||
|
allSubscriptions: [],
|
||||||
|
pushConversations: [],
|
||||||
|
loading: false,
|
||||||
|
subscribe: noopAsyncNull,
|
||||||
|
unsubscribe: noopAsync,
|
||||||
|
toggleConversation: noopAsync,
|
||||||
|
isConversationPushEnabled: () => false,
|
||||||
|
deleteSubscription: noopAsync,
|
||||||
|
testPush: noopAsync,
|
||||||
|
refreshSubscriptions: async () => [],
|
||||||
|
refreshConversations: noopAsync,
|
||||||
|
};
|
||||||
|
|
||||||
|
const PushSubscriptionContext = createContext<PushSubscriptionState>(defaultState);
|
||||||
|
|
||||||
|
export function PushSubscriptionProvider({ children }: { children: ReactNode }) {
|
||||||
|
const push = usePushSubscription();
|
||||||
|
return (
|
||||||
|
<PushSubscriptionContext.Provider value={push}>{children}</PushSubscriptionContext.Provider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function usePush(): PushSubscriptionState {
|
||||||
|
return useContext(PushSubscriptionContext);
|
||||||
|
}
|
||||||
@@ -0,0 +1,277 @@
|
|||||||
|
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||||
|
import { toast } from '../components/ui/sonner';
|
||||||
|
import { api } from '../api';
|
||||||
|
import type { PushSubscriptionInfo } from '../types';
|
||||||
|
|
||||||
|
function generateLabel(): string {
|
||||||
|
const ua = navigator.userAgent;
|
||||||
|
if (/Firefox/i.test(ua)) {
|
||||||
|
if (/Android/i.test(ua)) return 'Firefox on Android';
|
||||||
|
if (/Mac/i.test(ua)) return 'Firefox on macOS';
|
||||||
|
if (/Windows/i.test(ua)) return 'Firefox on Windows';
|
||||||
|
if (/Linux/i.test(ua)) return 'Firefox on Linux';
|
||||||
|
return 'Firefox';
|
||||||
|
}
|
||||||
|
if (/Chrome/i.test(ua) && !/Edg/i.test(ua)) {
|
||||||
|
if (/Android/i.test(ua)) return 'Chrome on Android';
|
||||||
|
if (/CrOS/i.test(ua)) return 'Chrome on ChromeOS';
|
||||||
|
if (/Mac/i.test(ua)) return 'Chrome on macOS';
|
||||||
|
if (/Windows/i.test(ua)) return 'Chrome on Windows';
|
||||||
|
if (/Linux/i.test(ua)) return 'Chrome on Linux';
|
||||||
|
return 'Chrome';
|
||||||
|
}
|
||||||
|
if (/Edg/i.test(ua)) return 'Edge';
|
||||||
|
if (/Safari/i.test(ua)) {
|
||||||
|
if (/iPhone|iPad/i.test(ua)) return 'Safari on iOS';
|
||||||
|
return 'Safari on macOS';
|
||||||
|
}
|
||||||
|
return 'Browser';
|
||||||
|
}
|
||||||
|
|
||||||
|
function urlBase64ToUint8Array(base64String: string): Uint8Array {
|
||||||
|
const padding = '='.repeat((4 - (base64String.length % 4)) % 4);
|
||||||
|
const base64 = (base64String + padding).replace(/-/g, '+').replace(/_/g, '/');
|
||||||
|
const raw = atob(base64);
|
||||||
|
const arr = new Uint8Array(raw.length);
|
||||||
|
for (let i = 0; i < raw.length; i++) arr[i] = raw.charCodeAt(i);
|
||||||
|
return arr;
|
||||||
|
}
|
||||||
|
|
||||||
|
function uint8ArraysEqual(a: Uint8Array | null, b: Uint8Array): boolean {
|
||||||
|
if (!a || a.length !== b.length) return false;
|
||||||
|
for (let i = 0; i < a.length; i++) {
|
||||||
|
if (a[i] !== b[i]) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getApplicationServerKeyBytes(
|
||||||
|
key: ArrayBuffer | ArrayBufferView | null | undefined
|
||||||
|
): Uint8Array | null {
|
||||||
|
if (!key) return null;
|
||||||
|
if (ArrayBuffer.isView(key)) {
|
||||||
|
return new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
|
||||||
|
}
|
||||||
|
return new Uint8Array(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PushSubscriptionState {
|
||||||
|
isSupported: boolean;
|
||||||
|
isSubscribed: boolean;
|
||||||
|
currentSubscriptionId: string | null;
|
||||||
|
allSubscriptions: PushSubscriptionInfo[];
|
||||||
|
/** Global list of push-enabled conversation state keys (device-independent). */
|
||||||
|
pushConversations: string[];
|
||||||
|
loading: boolean;
|
||||||
|
subscribe: () => Promise<string | null>;
|
||||||
|
unsubscribe: () => Promise<void>;
|
||||||
|
/** Toggle a conversation in the global push list (device-independent). */
|
||||||
|
toggleConversation: (conversationKey: string) => Promise<void>;
|
||||||
|
isConversationPushEnabled: (conversationKey: string) => boolean;
|
||||||
|
deleteSubscription: (subscriptionId: string) => Promise<void>;
|
||||||
|
testPush: (subscriptionId: string) => Promise<void>;
|
||||||
|
refreshSubscriptions: () => Promise<PushSubscriptionInfo[]>;
|
||||||
|
refreshConversations: () => Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function usePushSubscription(): PushSubscriptionState {
|
||||||
|
const [isSupported, setIsSupported] = useState(false);
|
||||||
|
const [currentSubscriptionId, setCurrentSubscriptionId] = useState<string | null>(null);
|
||||||
|
const [allSubscriptions, setAllSubscriptions] = useState<PushSubscriptionInfo[]>([]);
|
||||||
|
const [pushConversations, setPushConversations] = useState<string[]>([]);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const vapidKeyRef = useRef<string | null>(null);
|
||||||
|
|
||||||
|
const reconcileCurrentSubscription = useCallback(
|
||||||
|
(subs: PushSubscriptionInfo[], endpoint: string | null) => {
|
||||||
|
setAllSubscriptions(subs);
|
||||||
|
if (!endpoint) {
|
||||||
|
setCurrentSubscriptionId(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const match = subs.find((sub) => sub.endpoint === endpoint);
|
||||||
|
setCurrentSubscriptionId(match?.id ?? null);
|
||||||
|
},
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const supported =
|
||||||
|
window.isSecureContext &&
|
||||||
|
'serviceWorker' in navigator &&
|
||||||
|
'PushManager' in window &&
|
||||||
|
'Notification' in window;
|
||||||
|
setIsSupported(supported);
|
||||||
|
|
||||||
|
if (supported) {
|
||||||
|
// Always load all registered devices so Settings can manage them even
|
||||||
|
// when this particular browser isn't subscribed.
|
||||||
|
const subsPromise = api.getPushSubscriptions().catch(() => [] as PushSubscriptionInfo[]);
|
||||||
|
|
||||||
|
// Check if THIS browser has an active push subscription and match it
|
||||||
|
// to a backend record.
|
||||||
|
navigator.serviceWorker.ready
|
||||||
|
.then((reg) => reg.pushManager.getSubscription())
|
||||||
|
.then(async (sub) => {
|
||||||
|
const existing = await subsPromise;
|
||||||
|
reconcileCurrentSubscription(existing, sub?.endpoint ?? null);
|
||||||
|
})
|
||||||
|
.catch(() => {});
|
||||||
|
|
||||||
|
// Load global conversation list
|
||||||
|
api
|
||||||
|
.getPushConversations()
|
||||||
|
.then(setPushConversations)
|
||||||
|
.catch(() => {});
|
||||||
|
}
|
||||||
|
}, [reconcileCurrentSubscription]);
|
||||||
|
|
||||||
|
const refreshSubscriptions = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
const subs = await api.getPushSubscriptions();
|
||||||
|
const reg = await navigator.serviceWorker.ready;
|
||||||
|
const sub = await reg.pushManager.getSubscription();
|
||||||
|
reconcileCurrentSubscription(subs, sub?.endpoint ?? null);
|
||||||
|
return subs;
|
||||||
|
} catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}, [reconcileCurrentSubscription]);
|
||||||
|
|
||||||
|
const refreshConversations = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
const convos = await api.getPushConversations();
|
||||||
|
setPushConversations(convos);
|
||||||
|
} catch {
|
||||||
|
// best effort
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const subscribe = useCallback(async (): Promise<string | null> => {
|
||||||
|
if (!isSupported) return null;
|
||||||
|
setLoading(true);
|
||||||
|
try {
|
||||||
|
const resp = await api.getVapidPublicKey();
|
||||||
|
vapidKeyRef.current = resp.public_key;
|
||||||
|
const vapidKeyBytes = urlBase64ToUint8Array(resp.public_key);
|
||||||
|
|
||||||
|
const reg = await navigator.serviceWorker.ready;
|
||||||
|
let pushSub = await reg.pushManager.getSubscription();
|
||||||
|
const existingKeyBytes = getApplicationServerKeyBytes(pushSub?.options?.applicationServerKey);
|
||||||
|
const requiresRecreate =
|
||||||
|
pushSub !== null && !uint8ArraysEqual(existingKeyBytes, vapidKeyBytes);
|
||||||
|
|
||||||
|
if (requiresRecreate) {
|
||||||
|
await pushSub!.unsubscribe();
|
||||||
|
pushSub = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!pushSub) {
|
||||||
|
pushSub = await reg.pushManager.subscribe({
|
||||||
|
userVisibleOnly: true,
|
||||||
|
applicationServerKey: vapidKeyBytes.buffer as ArrayBuffer,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const json = pushSub.toJSON();
|
||||||
|
const result = await api.pushSubscribe({
|
||||||
|
endpoint: json.endpoint!,
|
||||||
|
p256dh: json.keys!.p256dh!,
|
||||||
|
auth: json.keys!.auth!,
|
||||||
|
label: generateLabel(),
|
||||||
|
});
|
||||||
|
|
||||||
|
setCurrentSubscriptionId(result.id);
|
||||||
|
await refreshSubscriptions();
|
||||||
|
return result.id;
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Push subscribe failed:', err);
|
||||||
|
toast.error('Failed to enable push notifications', {
|
||||||
|
description: err instanceof Error ? err.message : 'Check that notifications are allowed',
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [isSupported, refreshSubscriptions]);
|
||||||
|
|
||||||
|
const unsubscribe = useCallback(async () => {
|
||||||
|
setLoading(true);
|
||||||
|
try {
|
||||||
|
const reg = await navigator.serviceWorker.ready;
|
||||||
|
const pushSub = await reg.pushManager.getSubscription();
|
||||||
|
if (pushSub) await pushSub.unsubscribe();
|
||||||
|
|
||||||
|
if (currentSubscriptionId) {
|
||||||
|
await api.deletePushSubscription(currentSubscriptionId).catch(() => {});
|
||||||
|
}
|
||||||
|
|
||||||
|
setCurrentSubscriptionId(null);
|
||||||
|
await refreshSubscriptions();
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Push unsubscribe failed:', err);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [currentSubscriptionId, refreshSubscriptions]);
|
||||||
|
|
||||||
|
const toggleConversation = useCallback(async (conversationKey: string) => {
|
||||||
|
try {
|
||||||
|
const updated = await api.togglePushConversation(conversationKey);
|
||||||
|
setPushConversations(updated);
|
||||||
|
} catch {
|
||||||
|
toast.error('Failed to update push preferences');
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const isConversationPushEnabled = useCallback(
|
||||||
|
(conversationKey: string): boolean => {
|
||||||
|
return pushConversations.includes(conversationKey);
|
||||||
|
},
|
||||||
|
[pushConversations]
|
||||||
|
);
|
||||||
|
|
||||||
|
const deleteSubscription = useCallback(
|
||||||
|
async (subscriptionId: string) => {
|
||||||
|
await api.deletePushSubscription(subscriptionId);
|
||||||
|
if (subscriptionId === currentSubscriptionId) {
|
||||||
|
setCurrentSubscriptionId(null);
|
||||||
|
try {
|
||||||
|
const reg = await navigator.serviceWorker.ready;
|
||||||
|
const pushSub = await reg.pushManager.getSubscription();
|
||||||
|
if (pushSub) await pushSub.unsubscribe();
|
||||||
|
} catch {
|
||||||
|
// best effort
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await refreshSubscriptions();
|
||||||
|
},
|
||||||
|
[currentSubscriptionId, refreshSubscriptions]
|
||||||
|
);
|
||||||
|
|
||||||
|
const testPush = useCallback(async (subscriptionId: string) => {
|
||||||
|
try {
|
||||||
|
await api.testPushSubscription(subscriptionId);
|
||||||
|
toast.success('Test notification sent');
|
||||||
|
} catch {
|
||||||
|
toast.error('Test notification failed');
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return {
|
||||||
|
isSupported,
|
||||||
|
isSubscribed: !!currentSubscriptionId,
|
||||||
|
currentSubscriptionId,
|
||||||
|
allSubscriptions,
|
||||||
|
pushConversations,
|
||||||
|
loading,
|
||||||
|
subscribe,
|
||||||
|
unsubscribe,
|
||||||
|
toggleConversation,
|
||||||
|
isConversationPushEnabled,
|
||||||
|
deleteSubscription,
|
||||||
|
testPush,
|
||||||
|
refreshSubscriptions,
|
||||||
|
refreshConversations,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -12,6 +12,7 @@ import { getStateKey } from '../utils/conversationState';
|
|||||||
import { mergeContactIntoList } from '../utils/contactMerge';
|
import { mergeContactIntoList } from '../utils/contactMerge';
|
||||||
import { getContactDisplayName } from '../utils/pubkey';
|
import { getContactDisplayName } from '../utils/pubkey';
|
||||||
import { appendRawPacketUnique } from '../utils/rawPacketIdentity';
|
import { appendRawPacketUnique } from '../utils/rawPacketIdentity';
|
||||||
|
import { emitStatusDotPulse } from '../utils/statusDotPulse';
|
||||||
import type {
|
import type {
|
||||||
Channel,
|
Channel,
|
||||||
Contact,
|
Contact,
|
||||||
@@ -253,6 +254,7 @@ export function useRealtimeAppState({
|
|||||||
},
|
},
|
||||||
onRawPacket: (packet: RawPacket) => {
|
onRawPacket: (packet: RawPacket) => {
|
||||||
recordRawPacketObservation?.(packet);
|
recordRawPacketObservation?.(packet);
|
||||||
|
emitStatusDotPulse(packet.payload_type);
|
||||||
setRawPackets((prev) => appendRawPacketUnique(prev, packet, maxRawPackets));
|
setRawPackets((prev) => appendRawPacketUnique(prev, packet, maxRawPackets));
|
||||||
},
|
},
|
||||||
onMessageAcked: (
|
onMessageAcked: (
|
||||||
|
|||||||
+12
-2
@@ -4,15 +4,25 @@ import { App } from './App';
|
|||||||
import './index.css';
|
import './index.css';
|
||||||
import './themes.css';
|
import './themes.css';
|
||||||
import './styles.css';
|
import './styles.css';
|
||||||
import { getSavedTheme, applyTheme } from './utils/theme';
|
import { getSavedTheme, applyTheme, initFollowOSListener } from './utils/theme';
|
||||||
import { applyFontScale, getSavedFontScale } from './utils/fontScale';
|
import { applyFontScale, getSavedFontScale } from './utils/fontScale';
|
||||||
|
import { PushSubscriptionProvider } from './contexts/PushSubscriptionContext';
|
||||||
|
|
||||||
// Apply saved theme before first render
|
// Apply saved theme before first render
|
||||||
applyTheme(getSavedTheme());
|
applyTheme(getSavedTheme());
|
||||||
|
// Re-apply when the OS color-scheme preference changes, if on "Follow OS".
|
||||||
|
initFollowOSListener();
|
||||||
applyFontScale(getSavedFontScale());
|
applyFontScale(getSavedFontScale());
|
||||||
|
|
||||||
createRoot(document.getElementById('root')!).render(
|
createRoot(document.getElementById('root')!).render(
|
||||||
<StrictMode>
|
<StrictMode>
|
||||||
<App />
|
<PushSubscriptionProvider>
|
||||||
|
<App />
|
||||||
|
</PushSubscriptionProvider>
|
||||||
</StrictMode>
|
</StrictMode>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Register service worker for Web Push (requires secure context)
|
||||||
|
if ('serviceWorker' in navigator && window.isSecureContext) {
|
||||||
|
navigator.serviceWorker.register('./sw.js').catch(() => {});
|
||||||
|
}
|
||||||
|
|||||||
@@ -29,6 +29,13 @@ const mocks = vi.hoisted(() => ({
|
|||||||
success: vi.fn(),
|
success: vi.fn(),
|
||||||
error: vi.fn(),
|
error: vi.fn(),
|
||||||
},
|
},
|
||||||
|
push: {
|
||||||
|
isSupported: false,
|
||||||
|
isSubscribed: false,
|
||||||
|
subscribe: vi.fn<() => Promise<string | null>>(async () => null),
|
||||||
|
toggleConversation: vi.fn(async () => {}),
|
||||||
|
isConversationPushEnabled: vi.fn(() => false),
|
||||||
|
},
|
||||||
hookFns: {
|
hookFns: {
|
||||||
fetchOlderMessages: vi.fn(async () => {}),
|
fetchOlderMessages: vi.fn(async () => {}),
|
||||||
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
||||||
@@ -51,6 +58,25 @@ vi.mock('../useWebSocket', () => ({
|
|||||||
useWebSocket: vi.fn(),
|
useWebSocket: vi.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
vi.mock('../contexts/PushSubscriptionContext', () => ({
|
||||||
|
usePush: () => ({
|
||||||
|
isSupported: mocks.push.isSupported,
|
||||||
|
isSubscribed: mocks.push.isSubscribed,
|
||||||
|
currentSubscriptionId: mocks.push.isSubscribed ? 'sub-1' : null,
|
||||||
|
allSubscriptions: [],
|
||||||
|
pushConversations: [],
|
||||||
|
loading: false,
|
||||||
|
subscribe: mocks.push.subscribe,
|
||||||
|
unsubscribe: vi.fn(async () => {}),
|
||||||
|
toggleConversation: mocks.push.toggleConversation,
|
||||||
|
isConversationPushEnabled: mocks.push.isConversationPushEnabled,
|
||||||
|
deleteSubscription: vi.fn(async () => {}),
|
||||||
|
testPush: vi.fn(async () => {}),
|
||||||
|
refreshSubscriptions: vi.fn(async () => []),
|
||||||
|
refreshConversations: vi.fn(async () => {}),
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
vi.mock('../hooks', async (importOriginal) => {
|
vi.mock('../hooks', async (importOriginal) => {
|
||||||
const actual = await importOriginal<typeof import('../hooks')>();
|
const actual = await importOriginal<typeof import('../hooks')>();
|
||||||
return {
|
return {
|
||||||
@@ -209,6 +235,10 @@ const publicChannel = {
|
|||||||
describe('App favorite toggle flow', () => {
|
describe('App favorite toggle flow', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mocks.push.isSupported = false;
|
||||||
|
mocks.push.isSubscribed = false;
|
||||||
|
mocks.push.subscribe.mockResolvedValue(null);
|
||||||
|
mocks.push.isConversationPushEnabled.mockReturnValue(false);
|
||||||
|
|
||||||
mocks.api.getRadioConfig.mockResolvedValue(baseConfig);
|
mocks.api.getRadioConfig.mockResolvedValue(baseConfig);
|
||||||
mocks.api.getSettings.mockResolvedValue({ ...baseSettings });
|
mocks.api.getSettings.mockResolvedValue({ ...baseSettings });
|
||||||
@@ -313,4 +343,44 @@ describe('App favorite toggle flow', () => {
|
|||||||
expect(screen.queryByTestId('settings-modal-section')).not.toBeInTheDocument();
|
expect(screen.queryByTestId('settings-modal-section')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('subscribes this browser before enabling web push for a conversation', async () => {
|
||||||
|
mocks.push.isSupported = true;
|
||||||
|
mocks.push.isSubscribed = false;
|
||||||
|
mocks.push.subscribe.mockResolvedValue('sub-1');
|
||||||
|
|
||||||
|
render(<App />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByRole('button', { name: 'Notification settings' })).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: 'Notification settings' }));
|
||||||
|
fireEvent.click(screen.getByRole('checkbox', { name: /web push/i }));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mocks.push.subscribe).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.push.toggleConversation).toHaveBeenCalledWith(`channel-${publicChannel.key}`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not enable web push when subscription setup fails', async () => {
|
||||||
|
mocks.push.isSupported = true;
|
||||||
|
mocks.push.isSubscribed = false;
|
||||||
|
mocks.push.subscribe.mockResolvedValue(null);
|
||||||
|
|
||||||
|
render(<App />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByRole('button', { name: 'Notification settings' })).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: 'Notification settings' }));
|
||||||
|
fireEvent.click(screen.getByRole('checkbox', { name: /web push/i }));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mocks.push.subscribe).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
expect(mocks.push.toggleConversation).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -43,6 +43,6 @@ describe('BulkAddChannelResultModal', () => {
|
|||||||
expect(opsLink.getAttribute('href')).toContain('#channel/');
|
expect(opsLink.getAttribute('href')).toContain('#channel/');
|
||||||
expect(meshLink.getAttribute('href')).toContain('#channel/');
|
expect(meshLink.getAttribute('href')).toContain('#channel/');
|
||||||
expect(screen.queryByRole('link', { name: /bad_room/i })).toBeNull();
|
expect(screen.queryByRole('link', { name: /bad_room/i })).toBeNull();
|
||||||
expect(screen.getByText(/Ignored invalid room names: bad_room/)).toBeTruthy();
|
expect(screen.getByText(/Ignored invalid channel names: bad_room/)).toBeTruthy();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ describe('ChatHeader key visibility', () => {
|
|||||||
expect(screen.getAllByText('#Esperance')).toHaveLength(2);
|
expect(screen.getAllByText('#Esperance')).toHaveLength(2);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows enabled notification state and toggles when clicked', () => {
|
it('shows filled bell when notifications are enabled and toggles via dropdown', () => {
|
||||||
const conversation: Conversation = { type: 'contact', id: '11'.repeat(32), name: 'Alice' };
|
const conversation: Conversation = { type: 'contact', id: '11'.repeat(32), name: 'Alice' };
|
||||||
const onToggleNotifications = vi.fn();
|
const onToggleNotifications = vi.fn();
|
||||||
|
|
||||||
@@ -164,12 +164,40 @@ describe('ChatHeader key visibility', () => {
|
|||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
fireEvent.click(screen.getByText('Notifications On'));
|
// Bell button should be present; open the dropdown
|
||||||
|
const bellBtn = screen.getByRole('button', { name: 'Notification settings' });
|
||||||
|
fireEvent.click(bellBtn);
|
||||||
|
|
||||||
expect(screen.getByText('Notifications On')).toBeInTheDocument();
|
// Desktop notifications checkbox should be checked
|
||||||
|
const checkbox = screen.getByRole('checkbox', { name: /desktop notifications/i });
|
||||||
|
expect(checkbox).toBeChecked();
|
||||||
|
|
||||||
|
// Toggling calls the handler
|
||||||
|
fireEvent.click(checkbox);
|
||||||
expect(onToggleNotifications).toHaveBeenCalledTimes(1);
|
expect(onToggleNotifications).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('keeps desktop notifications available when web push is also supported', () => {
|
||||||
|
const conversation: Conversation = { type: 'contact', id: '13'.repeat(32), name: 'Alice' };
|
||||||
|
|
||||||
|
render(
|
||||||
|
<ChatHeader
|
||||||
|
{...baseProps}
|
||||||
|
conversation={conversation}
|
||||||
|
channels={[]}
|
||||||
|
pushSupported
|
||||||
|
pushSubscribed
|
||||||
|
pushEnabledForConversation
|
||||||
|
onTogglePush={vi.fn()}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: 'Notification settings' }));
|
||||||
|
|
||||||
|
expect(screen.getByRole('checkbox', { name: /desktop notifications/i })).toBeInTheDocument();
|
||||||
|
expect(screen.getByRole('checkbox', { name: /web push/i })).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('hides trace and notification controls for room-server contacts', () => {
|
it('hides trace and notification controls for room-server contacts', () => {
|
||||||
const pubKey = '41'.repeat(32);
|
const pubKey = '41'.repeat(32);
|
||||||
const contact: Contact = {
|
const contact: Contact = {
|
||||||
@@ -198,9 +226,7 @@ describe('ChatHeader key visibility', () => {
|
|||||||
|
|
||||||
expect(screen.queryByRole('button', { name: 'Path Discovery' })).not.toBeInTheDocument();
|
expect(screen.queryByRole('button', { name: 'Path Discovery' })).not.toBeInTheDocument();
|
||||||
expect(screen.queryByRole('button', { name: 'Direct Trace' })).not.toBeInTheDocument();
|
expect(screen.queryByRole('button', { name: 'Direct Trace' })).not.toBeInTheDocument();
|
||||||
expect(
|
expect(screen.queryByRole('button', { name: 'Notification settings' })).not.toBeInTheDocument();
|
||||||
screen.queryByRole('button', { name: 'Enable notifications for this conversation' })
|
|
||||||
).not.toBeInTheDocument();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('hides the delete button for the canonical Public channel', () => {
|
it('hides the delete button for the canonical Public channel', () => {
|
||||||
|
|||||||
@@ -145,6 +145,7 @@ function createProps(overrides: Partial<React.ComponentProps<typeof Conversation
|
|||||||
onDeleteContact: vi.fn(async () => {}),
|
onDeleteContact: vi.fn(async () => {}),
|
||||||
onDeleteChannel: vi.fn(async () => {}),
|
onDeleteChannel: vi.fn(async () => {}),
|
||||||
onSetChannelFloodScopeOverride: vi.fn(async () => {}),
|
onSetChannelFloodScopeOverride: vi.fn(async () => {}),
|
||||||
|
onSelectConversation: vi.fn(),
|
||||||
onOpenContactInfo: vi.fn(),
|
onOpenContactInfo: vi.fn(),
|
||||||
onOpenChannelInfo: vi.fn(),
|
onOpenChannelInfo: vi.fn(),
|
||||||
onSenderClick: vi.fn(),
|
onSenderClick: vi.fn(),
|
||||||
@@ -378,7 +379,7 @@ describe('ConversationPane', () => {
|
|||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(screen.getByText(/A full identity profile is not yet available/i)).toBeInTheDocument();
|
expect(screen.getByText(/profile details.*haven't arrived yet/i)).toBeInTheDocument();
|
||||||
expect(screen.getByTestId('message-input')).toBeInTheDocument();
|
expect(screen.getByTestId('message-input')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -415,7 +416,9 @@ describe('ConversationPane', () => {
|
|||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(screen.getByText(/This conversation is read-only/i)).toBeInTheDocument();
|
expect(
|
||||||
|
screen.getByText(/Sending is disabled until their identity is confirmed/i)
|
||||||
|
).toBeInTheDocument();
|
||||||
expect(screen.queryByTestId('message-input')).not.toBeInTheDocument();
|
expect(screen.queryByTestId('message-input')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ vi.mock('../api', () => ({
|
|||||||
deleteFanoutConfig: vi.fn(),
|
deleteFanoutConfig: vi.fn(),
|
||||||
getChannels: vi.fn(),
|
getChannels: vi.fn(),
|
||||||
getContacts: vi.fn(),
|
getContacts: vi.fn(),
|
||||||
|
getSettings: vi.fn(),
|
||||||
getRadioConfig: vi.fn(),
|
getRadioConfig: vi.fn(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
@@ -97,6 +98,20 @@ beforeEach(() => {
|
|||||||
mockedApi.getFanoutConfigs.mockResolvedValue([]);
|
mockedApi.getFanoutConfigs.mockResolvedValue([]);
|
||||||
mockedApi.getChannels.mockResolvedValue([]);
|
mockedApi.getChannels.mockResolvedValue([]);
|
||||||
mockedApi.getContacts.mockResolvedValue([]);
|
mockedApi.getContacts.mockResolvedValue([]);
|
||||||
|
mockedApi.getSettings.mockResolvedValue({
|
||||||
|
max_radio_contacts: 200,
|
||||||
|
auto_decrypt_dm_on_advert: true,
|
||||||
|
last_message_times: {},
|
||||||
|
advert_interval: 0,
|
||||||
|
last_advert_time: 0,
|
||||||
|
flood_scope: '',
|
||||||
|
blocked_keys: [],
|
||||||
|
blocked_names: [],
|
||||||
|
discovery_blocked_types: [],
|
||||||
|
tracked_telemetry_repeaters: [],
|
||||||
|
auto_resend_channel: false,
|
||||||
|
telemetry_interval_hours: 8,
|
||||||
|
});
|
||||||
mockedApi.getRadioConfig.mockResolvedValue({
|
mockedApi.getRadioConfig.mockResolvedValue({
|
||||||
public_key: 'aa'.repeat(32),
|
public_key: 'aa'.repeat(32),
|
||||||
name: 'TestNode',
|
name: 'TestNode',
|
||||||
@@ -118,7 +133,7 @@ describe('SettingsFanoutSection', () => {
|
|||||||
const optionButtons = within(dialog)
|
const optionButtons = within(dialog)
|
||||||
.getAllByRole('button')
|
.getAllByRole('button')
|
||||||
.filter((button) => button.hasAttribute('aria-pressed'));
|
.filter((button) => button.hasAttribute('aria-pressed'));
|
||||||
expect(optionButtons).toHaveLength(10);
|
expect(optionButtons).toHaveLength(11);
|
||||||
expect(within(dialog).getByRole('button', { name: 'Close' })).toBeInTheDocument();
|
expect(within(dialog).getByRole('button', { name: 'Close' })).toBeInTheDocument();
|
||||||
expect(within(dialog).getByRole('button', { name: 'Create' })).toBeInTheDocument();
|
expect(within(dialog).getByRole('button', { name: 'Create' })).toBeInTheDocument();
|
||||||
expect(
|
expect(
|
||||||
@@ -975,6 +990,90 @@ describe('SettingsFanoutSection', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('shows Home Assistant topic summary with device-key-derived node ids', async () => {
|
||||||
|
mockedApi.getContacts.mockResolvedValue([
|
||||||
|
{
|
||||||
|
public_key: 'bb'.repeat(32),
|
||||||
|
name: 'Alice',
|
||||||
|
type: 1,
|
||||||
|
flags: 0,
|
||||||
|
direct_path: null,
|
||||||
|
direct_path_len: -1,
|
||||||
|
direct_path_hash_mode: -1,
|
||||||
|
direct_path_updated_at: null,
|
||||||
|
route_override_path: null,
|
||||||
|
route_override_len: null,
|
||||||
|
route_override_hash_mode: null,
|
||||||
|
last_advert: null,
|
||||||
|
lat: null,
|
||||||
|
lon: null,
|
||||||
|
last_seen: null,
|
||||||
|
on_radio: false,
|
||||||
|
last_contacted: null,
|
||||||
|
first_seen: null,
|
||||||
|
last_read_at: null,
|
||||||
|
favorite: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
public_key: 'cc'.repeat(32),
|
||||||
|
name: 'Repeater One',
|
||||||
|
type: 2,
|
||||||
|
flags: 0,
|
||||||
|
direct_path: null,
|
||||||
|
direct_path_len: -1,
|
||||||
|
direct_path_hash_mode: -1,
|
||||||
|
direct_path_updated_at: null,
|
||||||
|
route_override_path: null,
|
||||||
|
route_override_len: null,
|
||||||
|
route_override_hash_mode: null,
|
||||||
|
last_advert: null,
|
||||||
|
lat: null,
|
||||||
|
lon: null,
|
||||||
|
last_seen: null,
|
||||||
|
on_radio: false,
|
||||||
|
last_contacted: null,
|
||||||
|
first_seen: null,
|
||||||
|
last_read_at: null,
|
||||||
|
favorite: false,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
mockedApi.getSettings.mockResolvedValue({
|
||||||
|
max_radio_contacts: 200,
|
||||||
|
auto_decrypt_dm_on_advert: true,
|
||||||
|
last_message_times: {},
|
||||||
|
advert_interval: 0,
|
||||||
|
last_advert_time: 0,
|
||||||
|
flood_scope: '',
|
||||||
|
blocked_keys: [],
|
||||||
|
blocked_names: [],
|
||||||
|
discovery_blocked_types: [],
|
||||||
|
tracked_telemetry_repeaters: ['cc'.repeat(32)],
|
||||||
|
auto_resend_channel: false,
|
||||||
|
telemetry_interval_hours: 8,
|
||||||
|
});
|
||||||
|
|
||||||
|
renderSection();
|
||||||
|
await openCreateIntegrationDialog();
|
||||||
|
selectCreateIntegration('Home Assistant MQTT Discovery');
|
||||||
|
confirmCreateIntegration();
|
||||||
|
|
||||||
|
expect(await screen.findByText('Published Topic Summary')).toBeInTheDocument();
|
||||||
|
|
||||||
|
fireEvent.click(await screen.findByLabelText(/Alice/));
|
||||||
|
fireEvent.click(await screen.findByLabelText(/Repeater One/));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getAllByText('node id aaaaaaaaaaaa').length).toBeGreaterThanOrEqual(2);
|
||||||
|
expect(screen.getByText('node id bbbbbbbbbbbb')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('node id cccccccccccc')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText('meshcore/aaaaaaaaaaaa/health')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('meshcore/aaaaaaaaaaaa/events/message')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('meshcore/bbbbbbbbbbbb/gps')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('meshcore/cccccccccccc/telemetry')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('LetsMesh (US) preset pre-fills the expected broker defaults', async () => {
|
it('LetsMesh (US) preset pre-fills the expected broker defaults', async () => {
|
||||||
const createdConfig: FanoutConfig = {
|
const createdConfig: FanoutConfig = {
|
||||||
id: 'comm-letsmesh-us',
|
id: 'comm-letsmesh-us',
|
||||||
|
|||||||
@@ -1,26 +1,43 @@
|
|||||||
import { forwardRef } from 'react';
|
import { forwardRef } from 'react';
|
||||||
import { render, screen } from '@testing-library/react';
|
import { fireEvent, render, screen } from '@testing-library/react';
|
||||||
import { describe, expect, it, vi } from 'vitest';
|
import { describe, expect, it, vi } from 'vitest';
|
||||||
import { MapView } from '../components/MapView';
|
import { MapView } from '../components/MapView';
|
||||||
import type { Contact } from '../types';
|
import type { Contact } from '../types';
|
||||||
|
|
||||||
vi.mock('react-leaflet', () => ({
|
vi.mock('react-leaflet', () => {
|
||||||
MapContainer: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
const BaseLayer = ({
|
||||||
TileLayer: () => null,
|
children,
|
||||||
CircleMarker: forwardRef<
|
}: {
|
||||||
HTMLDivElement,
|
children: React.ReactNode;
|
||||||
{ children: React.ReactNode; pathOptions?: { fillColor?: string } }
|
name: string;
|
||||||
>(({ children, pathOptions }, ref) => (
|
checked?: boolean;
|
||||||
<div ref={ref} data-fill-color={pathOptions?.fillColor}>
|
}) => <div>{children}</div>;
|
||||||
{children}
|
const LayersControlMock = ({ children }: { children: React.ReactNode }) => <div>{children}</div>;
|
||||||
</div>
|
(LayersControlMock as unknown as { BaseLayer: typeof BaseLayer }).BaseLayer = BaseLayer;
|
||||||
)),
|
return {
|
||||||
Popup: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
MapContainer: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
||||||
useMap: () => ({
|
TileLayer: () => null,
|
||||||
setView: vi.fn(),
|
CircleMarker: forwardRef<
|
||||||
fitBounds: vi.fn(),
|
HTMLDivElement,
|
||||||
}),
|
{ children: React.ReactNode; pathOptions?: { fillColor?: string } }
|
||||||
}));
|
>(({ children, pathOptions }, ref) => (
|
||||||
|
<div ref={ref} data-fill-color={pathOptions?.fillColor}>
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
)),
|
||||||
|
Popup: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
|
||||||
|
Polyline: () => null,
|
||||||
|
LayersControl: LayersControlMock,
|
||||||
|
useMap: () => ({
|
||||||
|
setView: vi.fn(),
|
||||||
|
fitBounds: vi.fn(),
|
||||||
|
setMaxZoom: vi.fn(),
|
||||||
|
setZoom: vi.fn(),
|
||||||
|
getZoom: vi.fn(() => 2),
|
||||||
|
}),
|
||||||
|
useMapEvents: () => null,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
describe('MapView', () => {
|
describe('MapView', () => {
|
||||||
it('renders a never-heard fallback for a focused contact without last_seen', () => {
|
it('renders a never-heard fallback for a focused contact without last_seen', () => {
|
||||||
@@ -54,6 +71,68 @@ describe('MapView', () => {
|
|||||||
expect(screen.getByText('Last heard: Never heard by this server')).toBeInTheDocument();
|
expect(screen.getByText('Last heard: Never heard by this server')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('invokes onSelectContact when the popup name is clicked', () => {
|
||||||
|
const contact: Contact = {
|
||||||
|
public_key: 'cc'.repeat(32),
|
||||||
|
name: 'Clickable',
|
||||||
|
type: 1,
|
||||||
|
flags: 0,
|
||||||
|
direct_path: null,
|
||||||
|
direct_path_len: -1,
|
||||||
|
direct_path_hash_mode: -1,
|
||||||
|
route_override_path: null,
|
||||||
|
route_override_len: null,
|
||||||
|
route_override_hash_mode: null,
|
||||||
|
last_advert: null,
|
||||||
|
lat: 42,
|
||||||
|
lon: -72,
|
||||||
|
last_seen: Math.floor(Date.now() / 1000),
|
||||||
|
on_radio: false,
|
||||||
|
favorite: false,
|
||||||
|
last_contacted: null,
|
||||||
|
last_read_at: null,
|
||||||
|
first_seen: null,
|
||||||
|
};
|
||||||
|
const onSelectContact = vi.fn();
|
||||||
|
|
||||||
|
render(<MapView contacts={[contact]} onSelectContact={onSelectContact} />);
|
||||||
|
|
||||||
|
const link = screen.getByRole('button', { name: 'Clickable' });
|
||||||
|
expect(link).toHaveAttribute('title', 'Open conversation with Clickable');
|
||||||
|
fireEvent.click(link);
|
||||||
|
|
||||||
|
expect(onSelectContact).toHaveBeenCalledWith(contact);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders the popup name as plain text when no onSelectContact is provided', () => {
|
||||||
|
const contact: Contact = {
|
||||||
|
public_key: 'dd'.repeat(32),
|
||||||
|
name: 'Static',
|
||||||
|
type: 1,
|
||||||
|
flags: 0,
|
||||||
|
direct_path: null,
|
||||||
|
direct_path_len: -1,
|
||||||
|
direct_path_hash_mode: -1,
|
||||||
|
route_override_path: null,
|
||||||
|
route_override_len: null,
|
||||||
|
route_override_hash_mode: null,
|
||||||
|
last_advert: null,
|
||||||
|
lat: 42,
|
||||||
|
lon: -72,
|
||||||
|
last_seen: Math.floor(Date.now() / 1000),
|
||||||
|
on_radio: false,
|
||||||
|
favorite: false,
|
||||||
|
last_contacted: null,
|
||||||
|
last_read_at: null,
|
||||||
|
first_seen: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(<MapView contacts={[contact]} />);
|
||||||
|
|
||||||
|
expect(screen.queryByRole('button', { name: /open conversation with static/i })).toBeNull();
|
||||||
|
expect(screen.getByText('Static')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('keeps the 7-day cutoff stable for the lifetime of the mounted map', () => {
|
it('keeps the 7-day cutoff stable for the lifetime of the mounted map', () => {
|
||||||
vi.useFakeTimers();
|
vi.useFakeTimers();
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -220,6 +220,24 @@ describe('MessageList channel sender rendering', () => {
|
|||||||
expect(onChannelReferenceClick).toHaveBeenCalledWith('#ops-room');
|
expect(onChannelReferenceClick).toHaveBeenCalledWith('#ops-room');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('does not strip colon-prefixed text in direct messages (issue #198)', () => {
|
||||||
|
render(
|
||||||
|
<MessageList
|
||||||
|
messages={[
|
||||||
|
createMessage({
|
||||||
|
type: 'PRIV',
|
||||||
|
conversation_key: 'ab'.repeat(32),
|
||||||
|
text: 'TEST1: TEST2',
|
||||||
|
}),
|
||||||
|
]}
|
||||||
|
contacts={[]}
|
||||||
|
loading={false}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByText('TEST1: TEST2')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('renders and dismisses an unread marker at the first unread message boundary', async () => {
|
it('renders and dismisses an unread marker at the first unread message boundary', async () => {
|
||||||
const user = userEvent.setup();
|
const user = userEvent.setup();
|
||||||
const messages = [
|
const messages = [
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ describe('NewMessageModal form reset', () => {
|
|||||||
expect(screen.queryByRole('tab', { name: 'Bulk Add Channel' })).toBeNull();
|
expect(screen.queryByRole('tab', { name: 'Bulk Add Channel' })).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('opens on the bulk tab when enabled and submits normalized room names', async () => {
|
it('opens on the bulk tab when enabled and submits normalized channel names', async () => {
|
||||||
const user = userEvent.setup();
|
const user = userEvent.setup();
|
||||||
renderModal(true, { showBulkAddChannelTab: true });
|
renderModal(true, { showBulkAddChannelTab: true });
|
||||||
|
|
||||||
@@ -145,7 +145,7 @@ describe('NewMessageModal form reset', () => {
|
|||||||
expect(onClose).toHaveBeenCalled();
|
expect(onClose).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows invalid bulk room names before submitting', async () => {
|
it('shows invalid bulk channel names before submitting', async () => {
|
||||||
const user = userEvent.setup();
|
const user = userEvent.setup();
|
||||||
renderModal(true, { showBulkAddChannelTab: true });
|
renderModal(true, { showBulkAddChannelTab: true });
|
||||||
|
|
||||||
@@ -156,7 +156,7 @@ describe('NewMessageModal form reset', () => {
|
|||||||
await user.click(screen.getByRole('button', { name: 'Add Channels' }));
|
await user.click(screen.getByRole('button', { name: 'Add Channels' }));
|
||||||
|
|
||||||
expect(onBulkAddHashtagChannels).not.toHaveBeenCalled();
|
expect(onBulkAddHashtagChannels).not.toHaveBeenCalled();
|
||||||
expect(screen.getByText('Invalid room names: bad_room')).toBeTruthy();
|
expect(screen.getByText('Invalid channel names: bad_room')).toBeTruthy();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react';
|
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
import { SettingsModal } from '../components/SettingsModal';
|
import { SettingsModal } from '../components/SettingsModal';
|
||||||
@@ -70,6 +70,7 @@ const baseSettings: AppSettings = {
|
|||||||
discovery_blocked_types: [],
|
discovery_blocked_types: [],
|
||||||
tracked_telemetry_repeaters: [],
|
tracked_telemetry_repeaters: [],
|
||||||
auto_resend_channel: false,
|
auto_resend_channel: false,
|
||||||
|
telemetry_interval_hours: 8,
|
||||||
};
|
};
|
||||||
|
|
||||||
function renderModal(overrides?: {
|
function renderModal(overrides?: {
|
||||||
@@ -333,7 +334,7 @@ describe('SettingsModal', () => {
|
|||||||
fireEvent.change(screen.getByLabelText('Advert Location Source'), {
|
fireEvent.change(screen.getByLabelText('Advert Location Source'), {
|
||||||
target: { value: 'off' },
|
target: { value: 'off' },
|
||||||
});
|
});
|
||||||
fireEvent.click(screen.getByRole('button', { name: 'Save' }));
|
fireEvent.click(screen.getByRole('button', { name: 'Save Radio Config' }));
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(onSave).toHaveBeenCalledWith(
|
expect(onSave).toHaveBeenCalledWith(
|
||||||
@@ -347,7 +348,7 @@ describe('SettingsModal', () => {
|
|||||||
openRadioSection();
|
openRadioSection();
|
||||||
|
|
||||||
fireEvent.click(screen.getByLabelText('Extra Direct ACK Transmission'));
|
fireEvent.click(screen.getByLabelText('Extra Direct ACK Transmission'));
|
||||||
fireEvent.click(screen.getByRole('button', { name: 'Save' }));
|
fireEvent.click(screen.getByRole('button', { name: 'Save Radio Config' }));
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(onSave).toHaveBeenCalledWith(expect.objectContaining({ multi_acks_enabled: true }));
|
expect(onSave).toHaveBeenCalledWith(expect.objectContaining({ multi_acks_enabled: true }));
|
||||||
@@ -361,8 +362,8 @@ describe('SettingsModal', () => {
|
|||||||
const maxContactsInput = screen.getByLabelText('Max Contacts on Radio');
|
const maxContactsInput = screen.getByLabelText('Max Contacts on Radio');
|
||||||
fireEvent.change(maxContactsInput, { target: { value: '250' } });
|
fireEvent.change(maxContactsInput, { target: { value: '250' } });
|
||||||
|
|
||||||
// Click the "Save Settings" button in the Flood & Advert Control section
|
// Click the "Save Messaging Settings" button
|
||||||
const saveButtons = screen.getAllByRole('button', { name: 'Save Settings' });
|
const saveButtons = screen.getAllByRole('button', { name: 'Save Messaging Settings' });
|
||||||
fireEvent.click(saveButtons[0]);
|
fireEvent.click(saveButtons[0]);
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
@@ -376,8 +377,8 @@ describe('SettingsModal', () => {
|
|||||||
});
|
});
|
||||||
openRadioSection();
|
openRadioSection();
|
||||||
|
|
||||||
// Click the "Save Settings" button in the Flood & Advert Control section
|
// Click the "Save Messaging Settings" button
|
||||||
const saveButtons = screen.getAllByRole('button', { name: 'Save Settings' });
|
const saveButtons = screen.getAllByRole('button', { name: 'Save Messaging Settings' });
|
||||||
fireEvent.click(saveButtons[0]);
|
fireEvent.click(saveButtons[0]);
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
@@ -442,52 +443,86 @@ describe('SettingsModal', () => {
|
|||||||
expect(screen.getByText('iPhone')).toBeInTheDocument();
|
expect(screen.getByText('iPhone')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('clears stale errors when switching external desktop sections', async () => {
|
it('reverts checkbox state when auto-persist fails on the database section', async () => {
|
||||||
|
// Auto-persist replaced the old "Save Settings" button on this section.
|
||||||
|
// The risk is now: a toggle gets applied optimistically, the PATCH fails,
|
||||||
|
// and we're left with the UI out of sync with saved state. Verify the
|
||||||
|
// revert-on-error path keeps the checkbox consistent with the server.
|
||||||
const onSaveAppSettings = vi.fn(async () => {
|
const onSaveAppSettings = vi.fn(async () => {
|
||||||
throw new Error('Save failed');
|
throw new Error('Save failed');
|
||||||
});
|
});
|
||||||
|
|
||||||
const { view } = renderModal({
|
renderModal({
|
||||||
externalSidebarNav: true,
|
externalSidebarNav: true,
|
||||||
desktopSection: 'database',
|
desktopSection: 'database',
|
||||||
onSaveAppSettings,
|
onSaveAppSettings,
|
||||||
});
|
});
|
||||||
|
|
||||||
fireEvent.click(screen.getByRole('button', { name: 'Save Settings' }));
|
const checkbox = screen.getByRole('checkbox', {
|
||||||
|
name: /Auto-decrypt historical DMs/i,
|
||||||
|
}) as HTMLInputElement;
|
||||||
|
const initialChecked = checkbox.checked;
|
||||||
|
|
||||||
|
fireEvent.click(checkbox);
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(screen.getByText('Save failed')).toBeInTheDocument();
|
expect(onSaveAppSettings).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(checkbox.checked).toBe(initialChecked);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('serializes rapid auto-persist clicks so stale writes cannot win', async () => {
|
||||||
|
// Regression test for a race where rapid consecutive checkbox toggles
|
||||||
|
// fire overlapping PATCHes that can land out of order. The page now
|
||||||
|
// chains saves through a single promise, so the server sees them in
|
||||||
|
// the order the user clicked. This test hand-controls resolution
|
||||||
|
// order to force the "stale write" scenario if serialization were off.
|
||||||
|
|
||||||
|
const deferred: { resolve: () => void }[] = [];
|
||||||
|
const callOrder: number[] = [];
|
||||||
|
|
||||||
|
const onSaveAppSettings = vi.fn(async (_update: unknown) => {
|
||||||
|
const index = deferred.length;
|
||||||
|
callOrder.push(index);
|
||||||
|
await new Promise<void>((res) => {
|
||||||
|
deferred.push({ resolve: res });
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
await act(async () => {
|
renderModal({
|
||||||
view.rerender(
|
externalSidebarNav: true,
|
||||||
<SettingsModal
|
desktopSection: 'database',
|
||||||
open
|
onSaveAppSettings,
|
||||||
externalSidebarNav
|
|
||||||
desktopSection="fanout"
|
|
||||||
config={baseConfig}
|
|
||||||
health={baseHealth}
|
|
||||||
appSettings={baseSettings}
|
|
||||||
onClose={vi.fn()}
|
|
||||||
onSave={vi.fn(async () => {})}
|
|
||||||
onSaveAppSettings={onSaveAppSettings}
|
|
||||||
onSetPrivateKey={vi.fn(async () => {})}
|
|
||||||
onReboot={vi.fn(async () => {})}
|
|
||||||
onDisconnect={vi.fn(async () => {})}
|
|
||||||
onReconnect={vi.fn(async () => {})}
|
|
||||||
onAdvertise={vi.fn(async () => {})}
|
|
||||||
meshDiscovery={null}
|
|
||||||
meshDiscoveryLoadingTarget={null}
|
|
||||||
onDiscoverMesh={vi.fn(async () => {})}
|
|
||||||
onHealthRefresh={vi.fn(async () => {})}
|
|
||||||
onRefreshAppSettings={vi.fn(async () => {})}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
await Promise.resolve();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(api.getFanoutConfigs).toHaveBeenCalled();
|
// Two distinct checkboxes in quick succession.
|
||||||
expect(screen.getByRole('button', { name: 'Add Integration' })).toBeInTheDocument();
|
const blockClients = screen.getByRole('checkbox', { name: /Block clients/i });
|
||||||
expect(screen.queryByText('Save failed')).not.toBeInTheDocument();
|
const blockRepeaters = screen.getByRole('checkbox', { name: /Block repeaters/i });
|
||||||
|
|
||||||
|
fireEvent.click(blockClients);
|
||||||
|
fireEvent.click(blockRepeaters);
|
||||||
|
|
||||||
|
// Wait for the first PATCH to be registered. Only the first should be
|
||||||
|
// in-flight — the second must be queued behind it.
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(deferred.length).toBe(1);
|
||||||
|
});
|
||||||
|
expect(callOrder).toEqual([0]);
|
||||||
|
|
||||||
|
// Resolve the first PATCH. The chain should now dispatch the second.
|
||||||
|
deferred[0].resolve();
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(deferred.length).toBe(2);
|
||||||
|
});
|
||||||
|
expect(callOrder).toEqual([0, 1]);
|
||||||
|
|
||||||
|
// Resolve the second so the test tears down cleanly.
|
||||||
|
deferred[1].resolve();
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(onSaveAppSettings).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does not call onClose after save/reboot flows in page mode', async () => {
|
it('does not call onClose after save/reboot flows in page mode', async () => {
|
||||||
@@ -507,7 +542,7 @@ describe('SettingsModal', () => {
|
|||||||
});
|
});
|
||||||
openRadioSection();
|
openRadioSection();
|
||||||
|
|
||||||
fireEvent.click(screen.getByRole('button', { name: 'Save & Reboot' }));
|
fireEvent.click(screen.getByRole('button', { name: 'Save Radio Config & Reboot' }));
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(onSave).toHaveBeenCalledTimes(1);
|
expect(onSave).toHaveBeenCalledTimes(1);
|
||||||
expect(onReboot).toHaveBeenCalledTimes(1);
|
expect(onReboot).toHaveBeenCalledTimes(1);
|
||||||
@@ -531,7 +566,7 @@ describe('SettingsModal', () => {
|
|||||||
renderModal();
|
renderModal();
|
||||||
openLocalSection();
|
openLocalSection();
|
||||||
|
|
||||||
const checkbox = screen.getByLabelText('Reopen to last viewed channel/conversation');
|
const checkbox = screen.getByLabelText('Reopen Last Conversation');
|
||||||
expect(checkbox).not.toBeChecked();
|
expect(checkbox).not.toBeChecked();
|
||||||
|
|
||||||
fireEvent.click(checkbox);
|
fireEvent.click(checkbox);
|
||||||
|
|||||||
@@ -8,9 +8,12 @@ class ResizeObserver {
|
|||||||
|
|
||||||
globalThis.ResizeObserver = ResizeObserver;
|
globalThis.ResizeObserver = ResizeObserver;
|
||||||
|
|
||||||
// Several components call matchMedia at import time for responsive detection
|
// Several components call matchMedia at import time for responsive detection.
|
||||||
|
// Use a configurable descriptor so individual tests can override the stub.
|
||||||
if (typeof globalThis.matchMedia === 'undefined') {
|
if (typeof globalThis.matchMedia === 'undefined') {
|
||||||
Object.defineProperty(globalThis, 'matchMedia', {
|
Object.defineProperty(globalThis, 'matchMedia', {
|
||||||
|
configurable: true,
|
||||||
|
writable: true,
|
||||||
value: (query: string) => ({
|
value: (query: string) => ({
|
||||||
matches: false,
|
matches: false,
|
||||||
media: query,
|
media: query,
|
||||||
|
|||||||
@@ -513,6 +513,42 @@ describe('Sidebar section summaries', () => {
|
|||||||
expect(contactRows).toEqual(['DM Recent', 'Advert Only', 'No Recency']);
|
expect(contactRows).toEqual(['DM Recent', 'Advert Only', 'No Recency']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('floats contacts with unread DMs above read contacts regardless of recency', () => {
|
||||||
|
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||||
|
const readRecent = makeContact('11'.repeat(32), 'Read Recent', 1, { last_advert: 500 });
|
||||||
|
const unreadOld = makeContact('22'.repeat(32), 'Unread Old', 1, { last_advert: 100 });
|
||||||
|
|
||||||
|
render(
|
||||||
|
<Sidebar
|
||||||
|
contacts={[readRecent, unreadOld]}
|
||||||
|
channels={[publicChannel]}
|
||||||
|
activeConversation={null}
|
||||||
|
onSelectConversation={vi.fn()}
|
||||||
|
onNewMessage={vi.fn()}
|
||||||
|
lastMessageTimes={{
|
||||||
|
[getStateKey('contact', readRecent.public_key)]: 500,
|
||||||
|
[getStateKey('contact', unreadOld.public_key)]: 200,
|
||||||
|
}}
|
||||||
|
unreadCounts={{
|
||||||
|
[getStateKey('contact', unreadOld.public_key)]: 3,
|
||||||
|
}}
|
||||||
|
mentions={{}}
|
||||||
|
showCracker={false}
|
||||||
|
crackerRunning={false}
|
||||||
|
onToggleCracker={vi.fn()}
|
||||||
|
onMarkAllRead={vi.fn()}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
const contactRows = screen
|
||||||
|
.getAllByText(/^(Read Recent|Unread Old)$/)
|
||||||
|
.map((node) => node.textContent)
|
||||||
|
.filter((text): text is string => Boolean(text));
|
||||||
|
|
||||||
|
// Unread Old has unread DMs so it floats above Read Recent despite older recency
|
||||||
|
expect(contactRows).toEqual(['Unread Old', 'Read Recent']);
|
||||||
|
});
|
||||||
|
|
||||||
it('sorts repeaters by heard recency even when message times disagree', () => {
|
it('sorts repeaters by heard recency even when message times disagree', () => {
|
||||||
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||||
const staleMessageRelay = makeContact(
|
const staleMessageRelay = makeContact(
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { fireEvent, render, screen } from '@testing-library/react';
|
import { fireEvent, render, screen } from '@testing-library/react';
|
||||||
import { describe, expect, it, vi } from 'vitest';
|
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
import { StatusBar } from '../components/StatusBar';
|
import { StatusBar } from '../components/StatusBar';
|
||||||
import type { HealthStatus } from '../types';
|
import type { HealthStatus } from '../types';
|
||||||
@@ -77,4 +77,57 @@ describe('StatusBar', () => {
|
|||||||
expect(localStorage.getItem('remoteterm-theme')).toBe('original');
|
expect(localStorage.getItem('remoteterm-theme')).toBe('original');
|
||||||
expect(document.documentElement.dataset.theme).toBeUndefined();
|
expect(document.documentElement.dataset.theme).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('with Follow OS theme saved', () => {
|
||||||
|
const originalMatchMedia = globalThis.matchMedia;
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
globalThis.matchMedia = originalMatchMedia;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Stub matchMedia so prefers-color-scheme: light returns the desired value.
|
||||||
|
const setPrefersLight = (isLight: boolean) => {
|
||||||
|
Object.defineProperty(globalThis, 'matchMedia', {
|
||||||
|
configurable: true,
|
||||||
|
value: (query: string) => ({
|
||||||
|
matches: query.includes('light') ? isLight : !isLight,
|
||||||
|
media: query,
|
||||||
|
onchange: null,
|
||||||
|
addListener: () => {},
|
||||||
|
removeListener: () => {},
|
||||||
|
addEventListener: () => {},
|
||||||
|
removeEventListener: () => {},
|
||||||
|
dispatchEvent: () => false,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
it('clicking toggle while OS prefers dark overrides follow-os into explicit light', () => {
|
||||||
|
setPrefersLight(false);
|
||||||
|
localStorage.setItem('remoteterm-theme', 'follow-os');
|
||||||
|
|
||||||
|
render(<StatusBar health={baseHealth} config={null} onSettingsClick={vi.fn()} />);
|
||||||
|
|
||||||
|
// OS is dark → effective is original → toggle offers "Switch to light theme"
|
||||||
|
const toggle = screen.getByRole('button', { name: 'Switch to light theme' });
|
||||||
|
fireEvent.click(toggle);
|
||||||
|
|
||||||
|
expect(localStorage.getItem('remoteterm-theme')).toBe('light');
|
||||||
|
expect(document.documentElement.dataset.theme).toBe('light');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clicking toggle while OS prefers light overrides follow-os into explicit dark', () => {
|
||||||
|
setPrefersLight(true);
|
||||||
|
localStorage.setItem('remoteterm-theme', 'follow-os');
|
||||||
|
|
||||||
|
render(<StatusBar health={baseHealth} config={null} onSettingsClick={vi.fn()} />);
|
||||||
|
|
||||||
|
// OS is light → effective is light → toggle offers "Switch to classic theme"
|
||||||
|
const toggle = screen.getByRole('button', { name: 'Switch to classic theme' });
|
||||||
|
fireEvent.click(toggle);
|
||||||
|
|
||||||
|
expect(localStorage.getItem('remoteterm-theme')).toBe('original');
|
||||||
|
expect(document.documentElement.dataset.theme).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -0,0 +1,87 @@
|
|||||||
|
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
|
import {
|
||||||
|
FOLLOW_OS_THEME_ID,
|
||||||
|
THEMES,
|
||||||
|
applyTheme,
|
||||||
|
getEffectiveTheme,
|
||||||
|
getSavedTheme,
|
||||||
|
} from '../utils/theme';
|
||||||
|
|
||||||
|
const originalMatchMedia = globalThis.matchMedia;
|
||||||
|
|
||||||
|
function stubPrefersLight(isLight: boolean) {
|
||||||
|
Object.defineProperty(globalThis, 'matchMedia', {
|
||||||
|
configurable: true,
|
||||||
|
value: (query: string) => ({
|
||||||
|
matches: query.includes('light') ? isLight : !isLight,
|
||||||
|
media: query,
|
||||||
|
onchange: null,
|
||||||
|
addListener: () => {},
|
||||||
|
removeListener: () => {},
|
||||||
|
addEventListener: () => {},
|
||||||
|
removeEventListener: () => {},
|
||||||
|
dispatchEvent: () => false,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('theme module', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
localStorage.clear();
|
||||||
|
delete document.documentElement.dataset.theme;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
globalThis.matchMedia = originalMatchMedia;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('exposes an OS-following theme in the selectable list', () => {
|
||||||
|
const followOS = THEMES.find((t) => t.id === FOLLOW_OS_THEME_ID);
|
||||||
|
expect(followOS).toBeDefined();
|
||||||
|
expect(followOS?.name).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('applyTheme("follow-os") resolves to light when OS prefers light', () => {
|
||||||
|
stubPrefersLight(true);
|
||||||
|
|
||||||
|
applyTheme(FOLLOW_OS_THEME_ID);
|
||||||
|
|
||||||
|
// Saved value is the follow-os preference, but the DOM reflects the resolved theme.
|
||||||
|
expect(localStorage.getItem('remoteterm-theme')).toBe(FOLLOW_OS_THEME_ID);
|
||||||
|
expect(getSavedTheme()).toBe(FOLLOW_OS_THEME_ID);
|
||||||
|
expect(document.documentElement.dataset.theme).toBe('light');
|
||||||
|
expect(getEffectiveTheme()).toBe('light');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('applyTheme("follow-os") resolves to original (dark) when OS prefers dark', () => {
|
||||||
|
stubPrefersLight(false);
|
||||||
|
|
||||||
|
applyTheme(FOLLOW_OS_THEME_ID);
|
||||||
|
|
||||||
|
expect(localStorage.getItem('remoteterm-theme')).toBe(FOLLOW_OS_THEME_ID);
|
||||||
|
// Original has no data-theme attribute, it's the default.
|
||||||
|
expect(document.documentElement.dataset.theme).toBeUndefined();
|
||||||
|
expect(getEffectiveTheme()).toBe('original');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('applyTheme updates the PWA meta theme-color to match the effective theme', () => {
|
||||||
|
// Seed the meta tag (jsdom base template has none).
|
||||||
|
const meta = document.createElement('meta');
|
||||||
|
meta.setAttribute('name', 'theme-color');
|
||||||
|
meta.setAttribute('content', '#000000');
|
||||||
|
document.head.appendChild(meta);
|
||||||
|
|
||||||
|
stubPrefersLight(true);
|
||||||
|
applyTheme(FOLLOW_OS_THEME_ID);
|
||||||
|
// Light theme's metaThemeColor
|
||||||
|
expect(meta.getAttribute('content')).toBe('#F8F7F4');
|
||||||
|
|
||||||
|
stubPrefersLight(false);
|
||||||
|
applyTheme(FOLLOW_OS_THEME_ID);
|
||||||
|
// Original theme's metaThemeColor
|
||||||
|
expect(meta.getAttribute('content')).toBe('#111419');
|
||||||
|
|
||||||
|
meta.remove();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,203 @@
|
|||||||
|
import { act, renderHook, waitFor } from '@testing-library/react';
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { usePushSubscription } from '../hooks/usePushSubscription';
|
||||||
|
|
||||||
|
const mocks = vi.hoisted(() => ({
|
||||||
|
api: {
|
||||||
|
getPushSubscriptions: vi.fn(),
|
||||||
|
getPushConversations: vi.fn(),
|
||||||
|
getVapidPublicKey: vi.fn(),
|
||||||
|
pushSubscribe: vi.fn(),
|
||||||
|
deletePushSubscription: vi.fn(),
|
||||||
|
togglePushConversation: vi.fn(),
|
||||||
|
testPushSubscription: vi.fn(),
|
||||||
|
},
|
||||||
|
toast: {
|
||||||
|
success: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../api', () => ({
|
||||||
|
api: mocks.api,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../components/ui/sonner', () => ({
|
||||||
|
toast: mocks.toast,
|
||||||
|
}));
|
||||||
|
|
||||||
|
function bytesToBase64Url(bytes: number[]): string {
|
||||||
|
return btoa(String.fromCharCode(...bytes))
|
||||||
|
.replace(/\+/g, '-')
|
||||||
|
.replace(/\//g, '_')
|
||||||
|
.replace(/=+$/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('usePushSubscription', () => {
|
||||||
|
const vapidOldBytes = [1, 2, 3, 4];
|
||||||
|
const vapidNewBytes = [5, 6, 7, 8];
|
||||||
|
const oldKey = new Uint8Array(vapidOldBytes).buffer;
|
||||||
|
const newKeyBase64 = bytesToBase64Url(vapidNewBytes);
|
||||||
|
|
||||||
|
let activeSubscription: {
|
||||||
|
endpoint: string;
|
||||||
|
options: { applicationServerKey: ArrayBuffer };
|
||||||
|
toJSON: () => { endpoint: string; keys: { p256dh: string; auth: string } };
|
||||||
|
unsubscribe: ReturnType<typeof vi.fn>;
|
||||||
|
} | null;
|
||||||
|
let replacementSubscription: {
|
||||||
|
endpoint: string;
|
||||||
|
options: { applicationServerKey: ArrayBuffer };
|
||||||
|
toJSON: () => { endpoint: string; keys: { p256dh: string; auth: string } };
|
||||||
|
unsubscribe: ReturnType<typeof vi.fn>;
|
||||||
|
};
|
||||||
|
let getSubscriptionMock: ReturnType<typeof vi.fn>;
|
||||||
|
let subscribeMock: ReturnType<typeof vi.fn>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
activeSubscription = {
|
||||||
|
endpoint: 'https://push.example.test/sub-old',
|
||||||
|
options: { applicationServerKey: oldKey },
|
||||||
|
toJSON: () => ({
|
||||||
|
endpoint: 'https://push.example.test/sub-old',
|
||||||
|
keys: { p256dh: 'p256dh-old', auth: 'auth-old' },
|
||||||
|
}),
|
||||||
|
unsubscribe: vi.fn(async () => {
|
||||||
|
activeSubscription = null;
|
||||||
|
return true;
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
replacementSubscription = {
|
||||||
|
endpoint: 'https://push.example.test/sub-new',
|
||||||
|
options: { applicationServerKey: new Uint8Array(vapidNewBytes).buffer },
|
||||||
|
toJSON: () => ({
|
||||||
|
endpoint: 'https://push.example.test/sub-new',
|
||||||
|
keys: { p256dh: 'p256dh-new', auth: 'auth-new' },
|
||||||
|
}),
|
||||||
|
unsubscribe: vi.fn(async () => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
getSubscriptionMock = vi.fn(async () => activeSubscription);
|
||||||
|
subscribeMock = vi.fn(async () => {
|
||||||
|
activeSubscription = replacementSubscription;
|
||||||
|
return replacementSubscription;
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(window, 'isSecureContext', {
|
||||||
|
configurable: true,
|
||||||
|
value: true,
|
||||||
|
});
|
||||||
|
Object.defineProperty(window, 'PushManager', {
|
||||||
|
configurable: true,
|
||||||
|
value: function PushManager() {},
|
||||||
|
});
|
||||||
|
Object.defineProperty(window, 'Notification', {
|
||||||
|
configurable: true,
|
||||||
|
value: function Notification() {},
|
||||||
|
});
|
||||||
|
Object.defineProperty(navigator, 'serviceWorker', {
|
||||||
|
configurable: true,
|
||||||
|
value: {
|
||||||
|
ready: Promise.resolve({
|
||||||
|
pushManager: {
|
||||||
|
getSubscription: getSubscriptionMock,
|
||||||
|
subscribe: subscribeMock,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
mocks.api.getPushConversations.mockResolvedValue([]);
|
||||||
|
mocks.api.getPushSubscriptions.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: 'sub-1',
|
||||||
|
endpoint: 'https://push.example.test/sub-old',
|
||||||
|
p256dh: 'p256dh-old',
|
||||||
|
auth: 'auth-old',
|
||||||
|
label: 'Chrome on macOS',
|
||||||
|
created_at: 1,
|
||||||
|
last_success_at: null,
|
||||||
|
failure_count: 0,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
mocks.api.getVapidPublicKey.mockResolvedValue({ public_key: newKeyBase64 });
|
||||||
|
mocks.api.pushSubscribe.mockResolvedValue({
|
||||||
|
id: 'sub-2',
|
||||||
|
endpoint: 'https://push.example.test/sub-new',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clears currentSubscriptionId when refresh no longer finds this browser on the backend', async () => {
|
||||||
|
const { result } = renderHook(() => usePushSubscription());
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.currentSubscriptionId).toBe('sub-1');
|
||||||
|
expect(result.current.isSubscribed).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
mocks.api.getPushSubscriptions.mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
await result.current.refreshSubscriptions();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.currentSubscriptionId).toBeNull();
|
||||||
|
expect(result.current.isSubscribed).toBe(false);
|
||||||
|
expect(result.current.allSubscriptions).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('recreates a stale browser subscription when the server VAPID key changed', async () => {
|
||||||
|
const oldSubscription = activeSubscription;
|
||||||
|
mocks.api.getPushSubscriptions
|
||||||
|
.mockReset()
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: 'sub-1',
|
||||||
|
endpoint: 'https://push.example.test/sub-old',
|
||||||
|
p256dh: 'p256dh-old',
|
||||||
|
auth: 'auth-old',
|
||||||
|
label: 'Chrome on macOS',
|
||||||
|
created_at: 1,
|
||||||
|
last_success_at: null,
|
||||||
|
failure_count: 0,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: 'sub-2',
|
||||||
|
endpoint: 'https://push.example.test/sub-new',
|
||||||
|
p256dh: 'p256dh-new',
|
||||||
|
auth: 'auth-new',
|
||||||
|
label: 'Chrome on macOS',
|
||||||
|
created_at: 2,
|
||||||
|
last_success_at: null,
|
||||||
|
failure_count: 0,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const { result } = renderHook(() => usePushSubscription());
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.isSupported).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
await result.current.subscribe();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(oldSubscription?.unsubscribe).toHaveBeenCalledTimes(1);
|
||||||
|
expect(activeSubscription).toBe(replacementSubscription);
|
||||||
|
expect(subscribeMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.api.pushSubscribe).toHaveBeenCalledWith({
|
||||||
|
endpoint: 'https://push.example.test/sub-new',
|
||||||
|
p256dh: 'p256dh-new',
|
||||||
|
auth: 'auth-new',
|
||||||
|
label: expect.any(String),
|
||||||
|
});
|
||||||
|
expect(result.current.currentSubscriptionId).toBe('sub-2');
|
||||||
|
});
|
||||||
|
});
|
||||||
+30
-1
@@ -355,6 +355,7 @@ export interface AppSettings {
|
|||||||
discovery_blocked_types: number[];
|
discovery_blocked_types: number[];
|
||||||
tracked_telemetry_repeaters: string[];
|
tracked_telemetry_repeaters: string[];
|
||||||
auto_resend_channel: boolean;
|
auto_resend_channel: boolean;
|
||||||
|
telemetry_interval_hours: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AppSettingsUpdate {
|
export interface AppSettingsUpdate {
|
||||||
@@ -366,11 +367,22 @@ export interface AppSettingsUpdate {
|
|||||||
blocked_keys?: string[];
|
blocked_keys?: string[];
|
||||||
blocked_names?: string[];
|
blocked_names?: string[];
|
||||||
discovery_blocked_types?: number[];
|
discovery_blocked_types?: number[];
|
||||||
|
telemetry_interval_hours?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TelemetrySchedule {
|
||||||
|
preferred_hours: number;
|
||||||
|
effective_hours: number;
|
||||||
|
options: number[];
|
||||||
|
tracked_count: number;
|
||||||
|
max_tracked: number;
|
||||||
|
next_run_at: number | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TrackedTelemetryResponse {
|
export interface TrackedTelemetryResponse {
|
||||||
tracked_telemetry_repeaters: string[];
|
tracked_telemetry_repeaters: string[];
|
||||||
names: Record<string, string>;
|
names: Record<string, string>;
|
||||||
|
schedule: TelemetrySchedule;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Contact type constants */
|
/** Contact type constants */
|
||||||
@@ -487,9 +499,26 @@ export interface PaneState {
|
|||||||
fetched_at?: number | null;
|
fetched_at?: number | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface TelemetryLppSensor {
|
||||||
|
channel: number;
|
||||||
|
type_name: string;
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
|
||||||
export interface TelemetryHistoryEntry {
|
export interface TelemetryHistoryEntry {
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
data: Record<string, number>;
|
data: Record<string, number> & { lpp_sensors?: TelemetryLppSensor[] };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PushSubscriptionInfo {
|
||||||
|
id: string;
|
||||||
|
endpoint: string;
|
||||||
|
p256dh: string;
|
||||||
|
auth: string;
|
||||||
|
label: string;
|
||||||
|
created_at: number;
|
||||||
|
last_success_at: number | null;
|
||||||
|
failure_count: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TraceResponse {
|
export interface TraceResponse {
|
||||||
|
|||||||
@@ -209,6 +209,37 @@ export function formatRouteLabel(pathLen: number, capitalize: boolean = false):
|
|||||||
return capitalize ? label.charAt(0).toUpperCase() + label.slice(1) : label;
|
return capitalize ? label.charAt(0).toUpperCase() + label.slice(1) : label;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format the learned direct route for display in route-editing dialogs,
|
||||||
|
* e.g. "2 hops (AE -> F1)", "Direct", or "Flood".
|
||||||
|
*/
|
||||||
|
export function formatLearnedRouteSummary(contact: Contact): string {
|
||||||
|
const directRoute = getDirectContactRoute(contact);
|
||||||
|
if (!directRoute) {
|
||||||
|
return formatRouteLabel(-1, true);
|
||||||
|
}
|
||||||
|
const hops = parsePathHops(directRoute.path, directRoute.path_len);
|
||||||
|
const label = formatRouteLabel(directRoute.path_len, true);
|
||||||
|
return hops.length > 0 ? `${label} (${hops.join(' -> ')})` : label;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format the forced (override) route for display in route-editing dialogs,
|
||||||
|
* matching the learned-route format. Returns null when no override is set.
|
||||||
|
*/
|
||||||
|
export function formatForcedRouteSummary(contact: Contact): string | null {
|
||||||
|
if (!hasRoutingOverride(contact)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const effectiveRoute = getEffectiveContactRoute(contact);
|
||||||
|
if (effectiveRoute.pathLen === -1) {
|
||||||
|
return formatRouteLabel(-1, true);
|
||||||
|
}
|
||||||
|
const hops = parsePathHops(effectiveRoute.path, effectiveRoute.pathLen);
|
||||||
|
const label = formatRouteLabel(effectiveRoute.pathLen, true);
|
||||||
|
return hops.length > 0 ? `${label} (${hops.join(' -> ')})` : label;
|
||||||
|
}
|
||||||
|
|
||||||
export function formatRoutingOverrideInput(contact: Contact): string {
|
export function formatRoutingOverrideInput(contact: Contact): string {
|
||||||
const routeOverride = getRouteOverride(contact);
|
const routeOverride = getRouteOverride(contact);
|
||||||
if (!routeOverride) {
|
if (!routeOverride) {
|
||||||
|
|||||||
@@ -0,0 +1,61 @@
|
|||||||
|
export const STATUS_DOT_PULSE_CHANGE_EVENT = 'remoteterm-status-dot-pulse-change';
|
||||||
|
export const STATUS_DOT_PULSE_PACKET_EVENT = 'remoteterm-status-dot-pulse-packet';
|
||||||
|
|
||||||
|
const STORAGE_KEY = 'remoteterm-status-dot-pulse';
|
||||||
|
|
||||||
|
export type StatusDotPulseKind = 'channel' | 'dm' | 'advert' | 'other';
|
||||||
|
|
||||||
|
export function getStatusDotPulseEnabled(): boolean {
|
||||||
|
try {
|
||||||
|
return localStorage.getItem(STORAGE_KEY) === 'true';
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setStatusDotPulseEnabled(enabled: boolean): void {
|
||||||
|
try {
|
||||||
|
if (enabled) {
|
||||||
|
localStorage.setItem(STORAGE_KEY, 'true');
|
||||||
|
} else {
|
||||||
|
localStorage.removeItem(STORAGE_KEY);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// localStorage may be unavailable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function payloadTypeToPulseKind(payloadType: string | null | undefined): StatusDotPulseKind {
|
||||||
|
switch (payloadType) {
|
||||||
|
case 'GROUP_TEXT':
|
||||||
|
return 'channel';
|
||||||
|
case 'TEXT_MESSAGE':
|
||||||
|
return 'dm';
|
||||||
|
case 'ADVERT':
|
||||||
|
return 'advert';
|
||||||
|
default:
|
||||||
|
return 'other';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const PULSE_COLORS: Record<StatusDotPulseKind, string> = {
|
||||||
|
channel: 'hsl(210, 90%, 55%)', // blue
|
||||||
|
dm: 'hsl(270, 75%, 60%)', // purple
|
||||||
|
advert: 'hsl(185, 85%, 55%)', // cyan
|
||||||
|
other: 'hsl(140, 80%, 22%)', // dark green
|
||||||
|
};
|
||||||
|
|
||||||
|
export function pulseColorFor(kind: StatusDotPulseKind): string {
|
||||||
|
return PULSE_COLORS[kind];
|
||||||
|
}
|
||||||
|
|
||||||
|
export const STATUS_DOT_PULSE_DURATION_MS = 250;
|
||||||
|
|
||||||
|
export function emitStatusDotPulse(payloadType: string | null | undefined): void {
|
||||||
|
const kind = payloadTypeToPulseKind(payloadType);
|
||||||
|
window.dispatchEvent(
|
||||||
|
new CustomEvent<StatusDotPulseKind>(STATUS_DOT_PULSE_PACKET_EVENT, {
|
||||||
|
detail: kind,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -9,6 +9,8 @@ export interface Theme {
|
|||||||
|
|
||||||
export const THEME_CHANGE_EVENT = 'remoteterm-theme-change';
|
export const THEME_CHANGE_EVENT = 'remoteterm-theme-change';
|
||||||
|
|
||||||
|
export const FOLLOW_OS_THEME_ID = 'follow-os';
|
||||||
|
|
||||||
export const THEMES: Theme[] = [
|
export const THEMES: Theme[] = [
|
||||||
{
|
{
|
||||||
id: 'original',
|
id: 'original',
|
||||||
@@ -22,6 +24,13 @@ export const THEMES: Theme[] = [
|
|||||||
swatches: ['#F8F7F4', '#FFFFFF', '#1B7D4E', '#EDEBE7', '#D97706', '#3B82F6'],
|
swatches: ['#F8F7F4', '#FFFFFF', '#1B7D4E', '#EDEBE7', '#D97706', '#3B82F6'],
|
||||||
metaThemeColor: '#F8F7F4',
|
metaThemeColor: '#F8F7F4',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: FOLLOW_OS_THEME_ID,
|
||||||
|
name: 'OS Light/Dark Mode',
|
||||||
|
// Top row: light theme preview colors; bottom row: original (dark) preview colors
|
||||||
|
swatches: ['#F8F7F4', '#FFFFFF', '#1B7D4E', '#111419', '#181b21', '#27a05c'],
|
||||||
|
metaThemeColor: '#111419',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: 'ios',
|
id: 'ios',
|
||||||
name: 'iPhone',
|
name: 'iPhone',
|
||||||
@@ -94,6 +103,23 @@ export function getSavedTheme(): string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Resolves "Follow OS" to a concrete theme id by inspecting the OS color-scheme preference. */
|
||||||
|
function resolveFollowOS(): 'original' | 'light' {
|
||||||
|
if (typeof window === 'undefined' || typeof window.matchMedia !== 'function') {
|
||||||
|
return 'original';
|
||||||
|
}
|
||||||
|
return window.matchMedia('(prefers-color-scheme: light)').matches ? 'light' : 'original';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the concrete theme id currently applied to the document.
|
||||||
|
* Unlike getSavedTheme, this resolves 'follow-os' to 'original' or 'light'.
|
||||||
|
*/
|
||||||
|
export function getEffectiveTheme(): string {
|
||||||
|
const saved = getSavedTheme();
|
||||||
|
return saved === FOLLOW_OS_THEME_ID ? resolveFollowOS() : saved;
|
||||||
|
}
|
||||||
|
|
||||||
export function applyTheme(themeId: string): void {
|
export function applyTheme(themeId: string): void {
|
||||||
try {
|
try {
|
||||||
localStorage.setItem(THEME_KEY, themeId);
|
localStorage.setItem(THEME_KEY, themeId);
|
||||||
@@ -101,14 +127,16 @@ export function applyTheme(themeId: string): void {
|
|||||||
// localStorage may be unavailable
|
// localStorage may be unavailable
|
||||||
}
|
}
|
||||||
|
|
||||||
if (themeId === 'original') {
|
const effective = themeId === FOLLOW_OS_THEME_ID ? resolveFollowOS() : themeId;
|
||||||
|
|
||||||
|
if (effective === 'original') {
|
||||||
delete document.documentElement.dataset.theme;
|
delete document.documentElement.dataset.theme;
|
||||||
} else {
|
} else {
|
||||||
document.documentElement.dataset.theme = themeId;
|
document.documentElement.dataset.theme = effective;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update PWA theme-color meta tag
|
// Update PWA theme-color meta tag — reflect the effective (rendered) theme.
|
||||||
const theme = THEMES.find((t) => t.id === themeId);
|
const theme = THEMES.find((t) => t.id === effective);
|
||||||
if (theme) {
|
if (theme) {
|
||||||
const meta = document.querySelector('meta[name="theme-color"]');
|
const meta = document.querySelector('meta[name="theme-color"]');
|
||||||
if (meta) {
|
if (meta) {
|
||||||
@@ -117,6 +145,33 @@ export function applyTheme(themeId: string): void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (typeof window !== 'undefined') {
|
if (typeof window !== 'undefined') {
|
||||||
|
// Detail is the saved theme id (including 'follow-os'); listeners that need
|
||||||
|
// the rendered appearance should call getEffectiveTheme().
|
||||||
window.dispatchEvent(new CustomEvent(THEME_CHANGE_EVENT, { detail: themeId }));
|
window.dispatchEvent(new CustomEvent(THEME_CHANGE_EVENT, { detail: themeId }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let followOSInitialized = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Installs a one-time listener on prefers-color-scheme so that when the user is
|
||||||
|
* on "Follow OS", OS appearance changes re-apply the theme. Safe to call once
|
||||||
|
* from app bootstrap.
|
||||||
|
*/
|
||||||
|
export function initFollowOSListener(): void {
|
||||||
|
if (followOSInitialized) return;
|
||||||
|
if (typeof window === 'undefined' || typeof window.matchMedia !== 'function') return;
|
||||||
|
followOSInitialized = true;
|
||||||
|
const mql = window.matchMedia('(prefers-color-scheme: light)');
|
||||||
|
const handler = () => {
|
||||||
|
if (getSavedTheme() === FOLLOW_OS_THEME_ID) {
|
||||||
|
applyTheme(FOLLOW_OS_THEME_ID);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if (typeof mql.addEventListener === 'function') {
|
||||||
|
mql.addEventListener('change', handler);
|
||||||
|
} else if (typeof (mql as MediaQueryList).addListener === 'function') {
|
||||||
|
// Safari < 14 fallback
|
||||||
|
(mql as MediaQueryList).addListener(handler);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
+2
-9
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "remoteterm-meshcore"
|
name = "remoteterm-meshcore"
|
||||||
version = "3.11.0"
|
version = "3.12.0"
|
||||||
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
@@ -16,14 +16,7 @@ dependencies = [
|
|||||||
"aiomqtt>=2.0",
|
"aiomqtt>=2.0",
|
||||||
"apprise>=1.9.8",
|
"apprise>=1.9.8",
|
||||||
"boto3>=1.38.0",
|
"boto3>=1.38.0",
|
||||||
]
|
"pywebpush>=0.14.0",
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
test = [
|
|
||||||
"pytest>=8.0.0",
|
|
||||||
"pytest-asyncio>=0.24.0",
|
|
||||||
"pytest-xdist>=3.0",
|
|
||||||
"httpx>=0.27.0",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
|
|||||||
Regular → Executable
Regular → Executable
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user