mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-03-28 17:43:05 +01:00
Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f8f0b3a8cf | ||
|
|
47276dcb6c | ||
|
|
9c06ed62a4 | ||
|
|
e19a8d3395 | ||
|
|
b68bfc41d6 | ||
|
|
ffb5fa51c1 | ||
|
|
0e4828bf72 | ||
|
|
a5d9632a67 | ||
|
|
24747ecd17 | ||
|
|
dbb8dd4c43 | ||
|
|
6c003069d4 | ||
|
|
ea5ba3b2a3 | ||
|
|
58b34a6a2f | ||
|
|
4277e0c924 | ||
|
|
2f562ce682 | ||
|
|
370ff115b4 | ||
|
|
04733b6a02 | ||
|
|
749fb43fd0 | ||
|
|
8d7d926762 | ||
|
|
c809dad05d | ||
|
|
c76f230c9f | ||
|
|
226dc4f59e | ||
|
|
3f50a2ef07 | ||
|
|
4a7ea9eb29 | ||
|
|
29368961fc | ||
|
|
7cb84ea6c7 | ||
|
|
0b1a19164a | ||
|
|
cf1a55e258 | ||
|
|
0881998e5b |
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
frontend/prebuilt/** -diff
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -12,8 +12,12 @@ frontend/test-results/
|
||||
|
||||
# Frontend build output (built from source by end users)
|
||||
frontend/dist/
|
||||
frontend/prebuilt/
|
||||
frontend/.eslintcache
|
||||
|
||||
# Release artifacts
|
||||
remoteterm-prebuilt-frontend-v*.zip
|
||||
|
||||
# reference libraries
|
||||
references/
|
||||
|
||||
|
||||
11
AGENTS.md
11
AGENTS.md
@@ -194,11 +194,12 @@ This message-layer echo/path handling is independent of raw-packet storage dedup
|
||||
│ │ └── ...
|
||||
│ └── vite.config.ts
|
||||
├── scripts/
|
||||
│ ├── all_quality.sh # Run all lint, format, typecheck, tests, build (sequential)
|
||||
│ ├── all_quality.sh # Run all lint, format, typecheck, tests, and the standard frontend build
|
||||
│ ├── collect_licenses.sh # Gather third-party license attributions
|
||||
│ ├── e2e.sh # End-to-end test runner
|
||||
│ └── publish.sh # Version bump, changelog, docker build & push
|
||||
├── remoteterm.service # Systemd unit file for production deployment
|
||||
├── README_ADVANCED.md # Advanced setup, troubleshooting, and service guidance
|
||||
├── CONTRIBUTING.md # Contributor workflow and testing guidance
|
||||
├── tests/ # Backend tests (pytest)
|
||||
├── data/ # SQLite database (runtime)
|
||||
└── pyproject.toml # Python dependencies
|
||||
@@ -243,7 +244,7 @@ uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
Access at `http://localhost:8000`. All API routes are prefixed with `/api`.
|
||||
|
||||
If `frontend/dist` (or `frontend/dist/index.html`) is missing, backend startup now logs an explicit error and continues serving API routes. In that case, frontend static routes are not mounted until a frontend build is present.
|
||||
If `frontend/dist` is missing, the backend falls back to `frontend/prebuilt` when present (for example from the release zip artifact). If neither build directory is available, startup logs an explicit error and continues serving API routes without frontend static routes mounted.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -281,7 +282,7 @@ npm run test:run
|
||||
|
||||
### Before Completing Changes
|
||||
|
||||
**Always run `./scripts/all_quality.sh` before finishing any changes that have modified code or tests.** This runs all linting, formatting, type checking, tests, and builds sequentially, catching type mismatches, breaking changes, and compilation errors. This is not necessary for docs-only changes.
|
||||
**Always run `./scripts/all_quality.sh` before finishing any changes that have modified code or tests.** This runs all linting, formatting, type checking, tests, and the standard frontend build sequentially, catching type mismatches, breaking changes, and compilation errors. This is not necessary for docs-only changes.
|
||||
|
||||
## API Summary
|
||||
|
||||
@@ -444,7 +445,7 @@ mc.subscribe(EventType.ACK, handler)
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | `false` | Switch the always-on radio audit task from hourly checks to aggressive 10-second polling; the audit checks both missed message drift and channel-slot cache drift |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | `false` | Disable channel-slot reuse and force `set_channel(...)` before every channel send, even on serial/BLE |
|
||||
|
||||
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `sidebar_sort_order`, `advert_interval`, `last_advert_time`, `favorites`, `last_message_times`, `flood_scope`, `blocked_keys`, and `blocked_names`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
||||
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `sidebar_sort_order`, `advert_interval`, `last_advert_time`, `favorites`, `last_message_times`, `flood_scope`, `blocked_keys`, and `blocked_names`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. The backend still carries `sidebar_sort_order` for compatibility and migration, but the current frontend sidebar stores sort order per section (`Channels`, `Contacts`, `Repeaters`) in localStorage rather than treating it as one shared server-backed preference. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
||||
|
||||
Byte-perfect channel retries are user-triggered via `POST /api/messages/channel/{message_id}/resend` and are allowed for 30 seconds after the original send.
|
||||
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,3 +1,21 @@
|
||||
## [3.4.1] - 2026-03-16
|
||||
|
||||
Bugfix: Improve handling of version information on prebuilt bundles
|
||||
Bugfix: Improve frontend usability on disconnected radio
|
||||
Misc: Docs and readme updates
|
||||
Misc: Overhaul DM ingest and frontend state handling
|
||||
|
||||
## [3.4.0] - 2026-03-16
|
||||
|
||||
Feature: Add radio model and stats display
|
||||
Feature: Add prebuilt frontends, then deleted that and moved to prebuilt release artifacts
|
||||
Bugfix: Misc. frontend performance and correctness fixes
|
||||
Bugfix: Fix same-second same-content DM send collition
|
||||
Bugfix: Discard clearly-wrong GPS data
|
||||
Bugfix: Prevent repeater clock skew drift on page nav
|
||||
Misc: Use repeater's advertised location if we haven't loaded one from repeater admin
|
||||
Misc: Don't permit invalid fanout configs to be saved ever`
|
||||
|
||||
## [3.3.0] - 2026-03-13
|
||||
|
||||
Feature: Use dashed lines to show collapsed ambiguous router results
|
||||
|
||||
108
CONTRIBUTING.md
Normal file
108
CONTRIBUTING.md
Normal file
@@ -0,0 +1,108 @@
|
||||
# Contributing
|
||||
|
||||
## Guiding Principles
|
||||
|
||||
- In all your interactions with developers, maintainers, and users, be kind.
|
||||
- Prefer small, comprehensible changes over large sweeping ones. Individual commits should be meaningful atomic chunks of work. Pull requests with many, many commits instead of a phased approach may be declined.
|
||||
- Pull requests must be fully understood and explicitly endorsed by a human before merge. AI assistance is great, and this repo is optimized for it, but we keep quality by keeping our agents on track to write clear code, useful (not useless) tests, good architecture, and big-picture thinking.
|
||||
- No pull request should introduce new failing lint, typecheck, test, or build results.
|
||||
- Every pull request should have an associated issue or discussion thread; a brand new feature appearing first in a PR is an antipattern.
|
||||
- No truly automated radio traffic. Bot replies are already the practical edge of what this project wants to automate; any kind of traffic that would be intervalized or automated is not what this project is about.
|
||||
- No ingestion from the internet onto the mesh. This project is a radio client, not a bridge for outside traffic to enter the network. The mesh is strong because it is a radio mesh, not the internet with some weird wireless links.
|
||||
|
||||
## Local Development
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
uv sync
|
||||
uv run uvicorn app.main:app --reload
|
||||
```
|
||||
|
||||
With an explicit serial port:
|
||||
|
||||
```bash
|
||||
MESHCORE_SERIAL_PORT=/dev/ttyUSB0 uv run uvicorn app.main:app --reload
|
||||
```
|
||||
|
||||
On Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
uv sync
|
||||
$env:MESHCORE_SERIAL_PORT="COM8"
|
||||
uv run uvicorn app.main:app --reload
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Run both the backend and `npm run dev` for hot-reloading frontend development.
|
||||
|
||||
## Quality Checks
|
||||
|
||||
Run the full quality suite before proposing or handing off code changes:
|
||||
|
||||
```bash
|
||||
./scripts/all_quality.sh
|
||||
```
|
||||
|
||||
That runs linting, formatting, type checking, tests, and builds for both backend and frontend.
|
||||
|
||||
If you need targeted commands while iterating:
|
||||
|
||||
```bash
|
||||
# backend
|
||||
uv run ruff check app/ tests/ --fix
|
||||
uv run ruff format app/ tests/
|
||||
uv run pyright app/
|
||||
PYTHONPATH=. uv run pytest tests/ -v
|
||||
|
||||
# frontend
|
||||
cd frontend
|
||||
npm run lint:fix
|
||||
npm run format
|
||||
npm run test:run
|
||||
npm run build
|
||||
```
|
||||
|
||||
## E2E Testing
|
||||
|
||||
E2E coverage exists, but it is intentionally not part of the normal development path.
|
||||
|
||||
These tests are only guaranteed to run correctly in a narrow subset of environments; they require a busy mesh with messages arriving constantly, an available autodetect-able radio, and a contact in the test database (which you can provide in `tests/e2e/.tmp/e2e-test.db` after an initial run). E2E tests are generally not necessary to run for normal development work.
|
||||
|
||||
```bash
|
||||
cd tests/e2e
|
||||
npx playwright test # headless
|
||||
npx playwright test --headed # you can probably guess
|
||||
```
|
||||
|
||||
## Pull Request Expectations
|
||||
|
||||
- Keep scope tight.
|
||||
- Explain why the change is needed.
|
||||
- Link the issue or discussion where the behavior was agreed on.
|
||||
- Call out any follow-up work left intentionally undone.
|
||||
- Do not treat code review as the place where the app's direction is first introduced or debated
|
||||
|
||||
## Notes For Agent-Assisted Work
|
||||
|
||||
Before making non-trivial changes, read:
|
||||
|
||||
- `./AGENTS.md`
|
||||
- `./app/AGENTS.md`
|
||||
- `./frontend/AGENTS.md`
|
||||
|
||||
Read these only when working in those areas:
|
||||
|
||||
- `./app/fanout/AGENTS_fanout.md`
|
||||
- `./frontend/src/components/visualizer/AGENTS_packet_visualizer.md`
|
||||
|
||||
- Agent output is welcome, but human review is mandatory.
|
||||
- Agents should start with the AGENTS files above before making architectural changes.
|
||||
- If a change touches advanced areas like fanout or the visualizer, read the area-specific AGENTS file before editing.
|
||||
279
README.md
279
README.md
@@ -22,13 +22,27 @@ This is developed with very heavy agentic assistance -- there is no warranty of
|
||||
|
||||
If extending, have your LLM read the three `AGENTS.md` files: `./AGENTS.md`, `./frontend/AGENTS.md`, and `./app/AGENTS.md`.
|
||||
|
||||
## Start Here
|
||||
|
||||
Most users should choose one of these paths:
|
||||
|
||||
1. Clone and build from source.
|
||||
2. Download the prebuilt release zip if you are on a resource-constrained system and do not want to build the frontend locally.
|
||||
3. Use Docker if that better matches how you deploy.
|
||||
|
||||
For advanced setup, troubleshooting, HTTPS, systemd service setup, and remediation environment variables, see [README_ADVANCED.md](README_ADVANCED.md).
|
||||
|
||||
If you plan to contribute, read [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
## Requirements
|
||||
|
||||
- Python 3.10+
|
||||
- Node.js LTS or current (20, 22, 24, 25)
|
||||
- Node.js LTS or current (20, 22, 24, 25) if you're not using a prebuilt release
|
||||
- [UV](https://astral.sh/uv) package manager: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
- MeshCore radio connected via USB serial, TCP, or BLE
|
||||
|
||||
If you are on a low-resource system and do not want to build the frontend locally, download the release zip named `remoteterm-prebuilt-frontend-vX.X.X-<short hash>.zip`. That bundle includes `frontend/prebuilt`, so you can run the app without doing a frontend build from source.
|
||||
|
||||
<details>
|
||||
<summary>Finding your serial port</summary>
|
||||
|
||||
@@ -65,7 +79,7 @@ usbipd attach --wsl --busid 3-8
|
||||
```
|
||||
</details>
|
||||
|
||||
## Quick Start
|
||||
## Path 1: Clone And Build
|
||||
|
||||
**This approach is recommended over Docker due to intermittent serial communications issues I've seen on \*nix systems.**
|
||||
|
||||
@@ -73,43 +87,33 @@ usbipd attach --wsl --busid 3-8
|
||||
git clone https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
|
||||
cd Remote-Terminal-for-MeshCore
|
||||
|
||||
# Install backend dependencies
|
||||
uv sync
|
||||
|
||||
# Build frontend
|
||||
cd frontend && npm install && npm run build && cd ..
|
||||
|
||||
# Run server
|
||||
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
The server auto-detects the serial port. To specify a transport manually:
|
||||
Access the app at http://localhost:8000.
|
||||
|
||||
Source checkouts expect a normal frontend build in `frontend/dist`.
|
||||
|
||||
## Path 1.5: Use The Prebuilt Release Zip
|
||||
|
||||
Release zips can be found as an asset within the [releases listed here](https://github.com/jkingsman/Remote-Terminal-for-MeshCore/releases). This can be beneficial on resource constrained systems that cannot cope with the RAM-hungry frontend build process.
|
||||
|
||||
If you downloaded the release zip instead of cloning the repo, unpack it and run:
|
||||
|
||||
```bash
|
||||
# Serial (explicit port)
|
||||
MESHCORE_SERIAL_PORT=/dev/ttyUSB0 uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# TCP (e.g. via wifi-enabled firmware)
|
||||
MESHCORE_TCP_HOST=192.168.1.100 MESHCORE_TCP_PORT=4000 uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# BLE (address and PIN both required)
|
||||
MESHCORE_BLE_ADDRESS=AA:BB:CC:DD:EE:FF MESHCORE_BLE_PIN=123456 uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
On Windows (PowerShell), set environment variables as a separate statement:
|
||||
```powershell
|
||||
$env:MESHCORE_SERIAL_PORT="COM8" # or your COM port
|
||||
cd Remote-Terminal-for-MeshCore
|
||||
uv sync
|
||||
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
Access at http://localhost:8000
|
||||
The release bundle includes `frontend/prebuilt`, so it does not require a local frontend build.
|
||||
|
||||
> **Note:** WebGPU cracking requires HTTPS when not on localhost. See the HTTPS section under Additional Setup.
|
||||
## Path 2: Docker
|
||||
|
||||
## Docker Compose
|
||||
|
||||
> **Warning:** Docker has intermittent issues with serial event subscriptions. The native method above is more reliable.
|
||||
|
||||
> **Note:** BLE-in-docker is outside the scope of this README, but the env vars should all still work.
|
||||
> **Warning:** Docker has had reports intermittent issues with serial event subscriptions. The native method above is more reliable.
|
||||
|
||||
Edit `docker-compose.yaml` to set a serial device for passthrough, or uncomment your transport (serial or TCP). Then:
|
||||
|
||||
@@ -142,7 +146,7 @@ docker compose pull
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
The container runs as root by default for maximum serial passthrough compatibility across host setups. On Linux, if you switch between native and Docker runs, `./data` can end up root-owned. If you do not need that compatibility behavior, you can enable the optional `user: "${UID:-1000}:${GID:-1000}"` line in `docker-compose.yaml` to keep ownership aligned with your host user.
|
||||
The container runs as root by default for maximum serial passthrough compatibility across host setups. On Linux, if you switch between native and Docker runs, `./data` can end up root-owned. If you do not need that serial compatibility behavior, you can enable the optional `user: "${UID:-1000}:${GID:-1000}"` line in `docker-compose.yaml` to keep ownership aligned with your host user.
|
||||
|
||||
To stop:
|
||||
|
||||
@@ -150,68 +154,9 @@ To stop:
|
||||
docker compose down
|
||||
```
|
||||
|
||||
## Development
|
||||
## Standard Environment Variables
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
uv sync
|
||||
uv run uvicorn app.main:app --reload # autodetects serial port
|
||||
|
||||
# Or with explicit serial port
|
||||
MESHCORE_SERIAL_PORT=/dev/ttyUSB0 uv run uvicorn app.main:app --reload
|
||||
```
|
||||
|
||||
On Windows (PowerShell):
|
||||
```powershell
|
||||
uv sync
|
||||
$env:MESHCORE_SERIAL_PORT="COM8" # or your COM port
|
||||
uv run uvicorn app.main:app --reload
|
||||
```
|
||||
|
||||
> **Windows note:** I've seen an intermittent startup issue like `"Received empty packet: index out of range"` with failed contact sync. I can't figure out why this happens. The issue typically resolves on restart. If you can figure out why this happens, I will buy you a virtual or iRL six pack if you're in the PNW. As a former always-windows-girlie before embracing WSL2, I despise second-classing M$FT users, but I'm just stuck with this one.
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev # Dev server at http://localhost:5173 (proxies API to :8000)
|
||||
npm run build # Production build to dist/
|
||||
```
|
||||
|
||||
Run both the backend and `npm run dev` for hot-reloading frontend development.
|
||||
|
||||
### Code Quality & Tests
|
||||
|
||||
Please test, lint, format, and quality check your code before PRing or committing. At the least, run a lint + autoformat + pyright check on the backend, and a lint + autoformat on the frontend.
|
||||
|
||||
Run everything at once:
|
||||
|
||||
```bash
|
||||
./scripts/all_quality.sh
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Or run individual checks</summary>
|
||||
|
||||
```bash
|
||||
# python
|
||||
uv run ruff check app/ tests/ --fix # lint + auto-fix
|
||||
uv run ruff format app/ tests/ # format (always writes)
|
||||
uv run pyright app/ # type checking
|
||||
PYTHONPATH=. uv run pytest tests/ -v # backend tests
|
||||
|
||||
# frontend
|
||||
cd frontend
|
||||
npm run lint:fix # esLint + auto-fix
|
||||
npm run test:run # run tests
|
||||
npm run format # prettier (always writes)
|
||||
npm run build # build the frontend
|
||||
```
|
||||
</details>
|
||||
|
||||
## Configuration
|
||||
Only one transport may be active at a time. If multiple are set, the server will refuse to start.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
@@ -221,140 +166,36 @@ npm run build # build the frontend
|
||||
| `MESHCORE_TCP_PORT` | 4000 | TCP port |
|
||||
| `MESHCORE_BLE_ADDRESS` | | BLE device address (mutually exclusive with serial/TCP) |
|
||||
| `MESHCORE_BLE_PIN` | | BLE PIN (required when BLE address is set) |
|
||||
| `MESHCORE_LOG_LEVEL` | INFO | DEBUG, INFO, WARNING, ERROR |
|
||||
| `MESHCORE_DATABASE_PATH` | data/meshcore.db | SQLite database path |
|
||||
| `MESHCORE_DISABLE_BOTS` | false | Disable bot system entirely (blocks execution and config) |
|
||||
| `MESHCORE_LOG_LEVEL` | INFO | `DEBUG`, `INFO`, `WARNING`, `ERROR` |
|
||||
| `MESHCORE_DATABASE_PATH` | `data/meshcore.db` | SQLite database path |
|
||||
| `MESHCORE_DISABLE_BOTS` | false | Disable bot system entirely (blocks execution and config; an intermediate security precaution, but not as good as basic auth) |
|
||||
| `MESHCORE_BASIC_AUTH_USERNAME` | | Optional app-wide HTTP Basic auth username; must be set together with `MESHCORE_BASIC_AUTH_PASSWORD` |
|
||||
| `MESHCORE_BASIC_AUTH_PASSWORD` | | Optional app-wide HTTP Basic auth password; must be set together with `MESHCORE_BASIC_AUTH_USERNAME` |
|
||||
|
||||
Only one transport may be active at a time. If multiple are set, the server will refuse to start.
|
||||
Common launch patterns:
|
||||
|
||||
```bash
|
||||
# Serial (explicit port)
|
||||
MESHCORE_SERIAL_PORT=/dev/ttyUSB0 uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# TCP
|
||||
MESHCORE_TCP_HOST=192.168.1.100 MESHCORE_TCP_PORT=4000 uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# BLE
|
||||
MESHCORE_BLE_ADDRESS=AA:BB:CC:DD:EE:FF MESHCORE_BLE_PIN=123456 uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
On Windows (PowerShell), set environment variables as a separate statement:
|
||||
|
||||
```powershell
|
||||
$env:MESHCORE_SERIAL_PORT="COM8" # or your COM port
|
||||
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
If you enable Basic Auth, protect the app with HTTPS. HTTP Basic credentials are not safe on plain HTTP.
|
||||
|
||||
### Remediation Environment Variables
|
||||
## Where To Go Next
|
||||
|
||||
These are intended for diagnosing or working around radios that behave oddly.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | false | Run aggressive 10-second `get_msg()` fallback polling instead of the default hourly sanity check |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | false | Disable channel-slot reuse and force `set_channel(...)` before every channel send |
|
||||
|
||||
By default the app relies on radio events plus MeshCore auto-fetch for incoming messages, and also runs a low-frequency hourly audit poll. That audit checks both:
|
||||
|
||||
- whether messages were left on the radio without reaching the app through event subscription
|
||||
- whether the app's channel-slot expectations still match the radio's actual channel listing
|
||||
|
||||
If the audit finds a mismatch, you'll see an error in the application UI and your logs. If you see that warning, or if messages on the radio never show up in the app, try `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK=true` to switch that task into a more aggressive 10-second safety net. If room sends appear to be using the wrong channel slot or another client is changing slots underneath this app, try `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true` to force the radio to validate the channel slot is valid before sending (will delay sending by ~500ms).
|
||||
|
||||
## Additional Setup
|
||||
|
||||
<details>
|
||||
<summary>HTTPS (Required for WebGPU room-finding outside localhost)</summary>
|
||||
|
||||
WebGPU requires a secure context. When not on `localhost`, serve over HTTPS:
|
||||
|
||||
```bash
|
||||
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -nodes -subj '/CN=localhost'
|
||||
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000 --ssl-keyfile=key.pem --ssl-certfile=cert.pem
|
||||
```
|
||||
|
||||
For Docker Compose, generate the cert and add the volume mounts and command override to `docker-compose.yaml`:
|
||||
|
||||
```bash
|
||||
# generate snakeoil TLS cert
|
||||
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -nodes -subj '/CN=localhost'
|
||||
```
|
||||
|
||||
Then add the key and cert to the `remoteterm` service in `docker-compose.yaml`, and add an explicit launch command that uses them:
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./cert.pem:/app/cert.pem:ro
|
||||
- ./key.pem:/app/key.pem:ro
|
||||
command: uv run uvicorn app.main:app --host 0.0.0.0 --port 8000 --ssl-keyfile=/app/key.pem --ssl-certfile=/app/cert.pem
|
||||
```
|
||||
|
||||
Accept the browser warning, or use [mkcert](https://github.com/FiloSottile/mkcert) for locally-trusted certs.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Systemd Service (Linux)</summary>
|
||||
|
||||
Assumes you're running from `/opt/remoteterm`; update commands and `remoteterm.service` if you're running elsewhere.
|
||||
|
||||
```bash
|
||||
# Create service user
|
||||
sudo useradd -r -m -s /bin/false remoteterm
|
||||
|
||||
# Install to /opt/remoteterm
|
||||
sudo mkdir -p /opt/remoteterm
|
||||
sudo cp -r . /opt/remoteterm/
|
||||
sudo chown -R remoteterm:remoteterm /opt/remoteterm
|
||||
|
||||
# Install dependencies
|
||||
cd /opt/remoteterm
|
||||
sudo -u remoteterm uv venv
|
||||
sudo -u remoteterm uv sync
|
||||
|
||||
# Build frontend (required for the backend to serve the web UI)
|
||||
cd /opt/remoteterm/frontend
|
||||
sudo -u remoteterm npm install
|
||||
sudo -u remoteterm npm run build
|
||||
|
||||
# Install and start service
|
||||
sudo cp /opt/remoteterm/remoteterm.service /etc/systemd/system/
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable --now remoteterm
|
||||
|
||||
# Check status
|
||||
sudo systemctl status remoteterm
|
||||
sudo journalctl -u remoteterm -f
|
||||
```
|
||||
|
||||
Edit `/etc/systemd/system/remoteterm.service` to set `MESHCORE_SERIAL_PORT` if needed.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Testing</summary>
|
||||
|
||||
**Backend:**
|
||||
|
||||
```bash
|
||||
PYTHONPATH=. uv run pytest tests/ -v
|
||||
```
|
||||
|
||||
**Frontend:**
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm run test:run
|
||||
```
|
||||
|
||||
**E2E:**
|
||||
|
||||
Warning: these tests are only guaranteed to run correctly in a narrow subset of environments; they require a busy mesh with messages arriving constantly and an available autodetect-able radio, as well as a contact in the test database (which you can provide in `tests/e2e/.tmp/e2e-test.db` after an initial run). E2E tests are generally not necessary to run for normal development work.
|
||||
|
||||
```bash
|
||||
cd tests/e2e
|
||||
npx playwright test # headless
|
||||
npx playwright test --headed # show the browser window
|
||||
```
|
||||
</details>
|
||||
|
||||
## API Documentation
|
||||
|
||||
With the backend running: http://localhost:8000/docs
|
||||
|
||||
## Debugging & Bug Reports
|
||||
|
||||
If you're experiencing issues or opening a bug report, please start the backend with debug logging enabled. Debug mode provides a much more detailed breakdown of radio communication, packet processing, and other internal operations, which makes it significantly easier to diagnose problems.
|
||||
|
||||
To start the server with debug logging:
|
||||
|
||||
```bash
|
||||
MESHCORE_LOG_LEVEL=DEBUG uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
Please include the relevant debug log output when filing an issue on GitHub.
|
||||
- Advanced setup, troubleshooting, HTTPS, systemd, remediation variables, and debug logging: [README_ADVANCED.md](README_ADVANCED.md)
|
||||
- Contributing, tests, linting, E2E notes, and important AGENTS files: [CONTRIBUTING.md](CONTRIBUTING.md)
|
||||
- Live API docs after the backend is running: http://localhost:8000/docs
|
||||
|
||||
115
README_ADVANCED.md
Normal file
115
README_ADVANCED.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# Advanced Setup And Troubleshooting
|
||||
|
||||
## Remediation Environment Variables
|
||||
|
||||
These are intended for diagnosing or working around radios that behave oddly.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | false | Run aggressive 10-second `get_msg()` fallback polling to check for messages |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | false | Disable channel-slot reuse and force `set_channel(...)` before every channel send |
|
||||
|
||||
By default the app relies on radio events plus MeshCore auto-fetch for incoming messages, and also runs a low-frequency hourly audit poll. That audit checks both:
|
||||
|
||||
- whether messages were left on the radio without reaching the app through event subscription
|
||||
- whether the app's channel-slot expectations still match the radio's actual channel listing
|
||||
|
||||
If the audit finds a mismatch, you'll see an error in the application UI and your logs. If you see that warning, or if messages on the radio never show up in the app, try `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK=true` to switch that task into a more aggressive 10-second safety net. If room sends appear to be using the wrong channel slot or another client is changing slots underneath this app, try `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true` to force the radio to validate the channel slot is valid before sending (will delay sending by ~500ms).
|
||||
|
||||
## HTTPS
|
||||
|
||||
WebGPU room-finding requires a secure context when you are not on `localhost`.
|
||||
|
||||
Generate a local cert and start the backend with TLS:
|
||||
|
||||
```bash
|
||||
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -nodes -subj '/CN=localhost'
|
||||
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000 --ssl-keyfile=key.pem --ssl-certfile=cert.pem
|
||||
```
|
||||
|
||||
For Docker Compose, generate the cert, mount it into the container, and override the launch command:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
remoteterm:
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./cert.pem:/app/cert.pem:ro
|
||||
- ./key.pem:/app/key.pem:ro
|
||||
command: uv run uvicorn app.main:app --host 0.0.0.0 --port 8000 --ssl-keyfile=/app/key.pem --ssl-certfile=/app/cert.pem
|
||||
```
|
||||
|
||||
Accept the browser warning, or use [mkcert](https://github.com/FiloSottile/mkcert) for locally-trusted certs.
|
||||
|
||||
## Systemd Service
|
||||
|
||||
Assumes you are running from `/opt/remoteterm`; adjust paths if you deploy elsewhere.
|
||||
|
||||
```bash
|
||||
# Create service user
|
||||
sudo useradd -r -m -s /bin/false remoteterm
|
||||
|
||||
# Install to /opt/remoteterm
|
||||
sudo mkdir -p /opt/remoteterm
|
||||
sudo cp -r . /opt/remoteterm/
|
||||
sudo chown -R remoteterm:remoteterm /opt/remoteterm
|
||||
|
||||
# Install dependencies
|
||||
cd /opt/remoteterm
|
||||
sudo -u remoteterm uv venv
|
||||
sudo -u remoteterm uv sync
|
||||
|
||||
# If deploying from a source checkout, build the frontend first
|
||||
sudo -u remoteterm bash -lc 'cd /opt/remoteterm/frontend && npm install && npm run build'
|
||||
|
||||
# If deploying from the release zip artifact, frontend/prebuilt is already present
|
||||
```
|
||||
|
||||
Create `/etc/systemd/system/remoteterm.service` with:
|
||||
|
||||
```ini
|
||||
[Unit]
|
||||
Description=RemoteTerm for MeshCore
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=remoteterm
|
||||
Group=remoteterm
|
||||
WorkingDirectory=/opt/remoteterm
|
||||
ExecStart=/opt/remoteterm/.venv/bin/uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
Environment=MESHCORE_DATABASE_PATH=/opt/remoteterm/data/meshcore.db
|
||||
# Uncomment and set if auto-detection doesn't work:
|
||||
# Environment=MESHCORE_SERIAL_PORT=/dev/ttyUSB0
|
||||
SupplementaryGroups=dialout
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
Then install and start it:
|
||||
|
||||
```bash
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable --now remoteterm
|
||||
sudo systemctl status remoteterm
|
||||
sudo journalctl -u remoteterm -f
|
||||
```
|
||||
|
||||
## Debug Logging And Bug Reports
|
||||
|
||||
If you're experiencing issues or opening a bug report, please start the backend with debug logging enabled. Debug mode provides a much more detailed breakdown of radio communication, packet processing, and other internal operations, which makes it significantly easier to diagnose problems.
|
||||
|
||||
```bash
|
||||
MESHCORE_LOG_LEVEL=DEBUG uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
You can also navigate to `/api/debug` (or go to Settings -> About -> "Open debug support snapshot" at the bottom). This debug block contains information about the operating environment, expectations around keys and channels, and radio status. It also includes the most recent logs. **Non-log information reveals no keys, channel names, or other privilege information beyond the names of your bots. The logs, however, may contain channel names or keys (but never your private key).** If you do not wish to include this information, copy up to the `STOP COPYING HERE` marker in the debug body.
|
||||
|
||||
## Development Notes
|
||||
|
||||
For day-to-day development, see [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
Windows note: I've seen an intermittent startup issue like `"Received empty packet: index out of range"` with failed contact sync. I can't figure out why this happens. The issue typically resolves on restart. If you can figure out why this happens, I will buy you a virtual or iRL six pack if you're in the PNW. As a former always-windows-girlie before embracing WSL2, I despise second-classing M$FT users, but I'm just stuck with this one.
|
||||
@@ -272,6 +272,8 @@ Repository writes should prefer typed models such as `ContactUpsert` over ad hoc
|
||||
- `flood_scope`
|
||||
- `blocked_keys`, `blocked_names`
|
||||
|
||||
Note: `sidebar_sort_order` remains in the backend model for compatibility and migration, but the current frontend sidebar uses per-section localStorage sort preferences instead of a single shared server-backed sort mode.
|
||||
|
||||
Note: MQTT, community MQTT, and bot configs were migrated to the `fanout_configs` table (migrations 36-38).
|
||||
|
||||
## Security Posture (intentional)
|
||||
@@ -351,7 +353,7 @@ tests/
|
||||
|
||||
The MeshCore radio protocol encodes `sender_timestamp` as a 4-byte little-endian integer (Unix seconds). This is a firmware-level wire format — the radio, the Python library (`commands/messaging.py`), and the decoder (`decoder.py`) all read/write exactly 4 bytes. Millisecond Unix timestamps would overflow 4 bytes, so higher resolution is not possible without a firmware change.
|
||||
|
||||
**Consequence:** The dedup index `(type, conversation_key, text, COALESCE(sender_timestamp, 0))` operates at 1-second granularity. Sending identical text to the same conversation twice within one second will hit the UNIQUE constraint on the second insert, returning HTTP 500 *after* the radio has already transmitted. The message is sent over the air but not stored in the database. Do not attempt to fix this by switching to millisecond timestamps — it will break echo dedup (the echo's 4-byte timestamp won't match the stored value) and overflow `to_bytes(4, "little")`.
|
||||
**Consequence:** Channel-message dedup still operates at 1-second granularity because the radio protocol only provides second-resolution `sender_timestamp`. Do not attempt to fix this by switching to millisecond timestamps — it will break echo dedup (the echo's 4-byte timestamp won't match the stored value) and overflow `to_bytes(4, "little")`. Direct messages no longer share that channel dedup index; they are deduplicated by raw-packet identity instead so legitimate same-text same-second DMs can coexist.
|
||||
|
||||
### Outgoing DM echoes remain undecrypted
|
||||
|
||||
|
||||
10
app/channel_constants.py
Normal file
10
app/channel_constants.py
Normal file
@@ -0,0 +1,10 @@
|
||||
PUBLIC_CHANNEL_KEY = "8B3387E9C5CDEA6AC9E5EDBAA115CD72"
|
||||
PUBLIC_CHANNEL_NAME = "Public"
|
||||
|
||||
|
||||
def is_public_channel_key(key: str) -> bool:
|
||||
return key.upper() == PUBLIC_CHANNEL_KEY
|
||||
|
||||
|
||||
def is_public_channel_name(name: str) -> bool:
|
||||
return name.casefold() == PUBLIC_CHANNEL_NAME.casefold()
|
||||
@@ -50,9 +50,10 @@ CREATE TABLE IF NOT EXISTS messages (
|
||||
acked INTEGER DEFAULT 0,
|
||||
sender_name TEXT,
|
||||
sender_key TEXT
|
||||
-- Deduplication: identical text + timestamp in the same conversation is treated as a
|
||||
-- mesh echo/repeat. Outgoing sends allocate a collision-free sender_timestamp before
|
||||
-- transmit so legitimate repeat sends do not collide with this index.
|
||||
-- Deduplication: channel echoes/repeats use a channel-only unique index on
|
||||
-- identical conversation/text/timestamp. Direct messages are deduplicated
|
||||
-- separately via raw-packet linkage so legitimate same-text same-second DMs
|
||||
-- can coexist.
|
||||
-- Enforced via idx_messages_dedup_null_safe (unique index) rather than a table constraint
|
||||
-- to avoid the storage overhead of SQLite's autoindex duplicating every message text.
|
||||
);
|
||||
@@ -90,7 +91,8 @@ CREATE TABLE IF NOT EXISTS contact_name_history (
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_received ON messages(received_at);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_dedup_null_safe
|
||||
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0));
|
||||
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))
|
||||
WHERE type = 'CHAN';
|
||||
CREATE INDEX IF NOT EXISTS idx_raw_packets_message_id ON raw_packets(message_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_raw_packets_payload_hash ON raw_packets(payload_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_contacts_on_radio ON contacts(on_radio);
|
||||
|
||||
@@ -85,6 +85,10 @@ class PacketInfo:
|
||||
path_hash_size: int = 1 # Bytes per hop: 1, 2, or 3
|
||||
|
||||
|
||||
def _is_valid_advert_location(lat: float, lon: float) -> bool:
|
||||
return -90 <= lat <= 90 and -180 <= lon <= 180
|
||||
|
||||
|
||||
def extract_payload(raw_packet: bytes) -> bytes | None:
|
||||
"""
|
||||
Extract just the payload from a raw packet, skipping header and path.
|
||||
@@ -243,7 +247,9 @@ def get_packet_payload_type(raw_packet: bytes) -> PayloadType | None:
|
||||
return None
|
||||
|
||||
|
||||
def parse_advertisement(payload: bytes) -> ParsedAdvertisement | None:
|
||||
def parse_advertisement(
|
||||
payload: bytes, raw_packet: bytes | None = None
|
||||
) -> ParsedAdvertisement | None:
|
||||
"""
|
||||
Parse an advertisement payload.
|
||||
|
||||
@@ -299,6 +305,16 @@ def parse_advertisement(payload: bytes) -> ParsedAdvertisement | None:
|
||||
lon_raw = int.from_bytes(payload[offset + 4 : offset + 8], byteorder="little", signed=True)
|
||||
lat = lat_raw / 1_000_000
|
||||
lon = lon_raw / 1_000_000
|
||||
if not _is_valid_advert_location(lat, lon):
|
||||
packet_hex = (raw_packet if raw_packet is not None else payload).hex().upper()
|
||||
logger.warning(
|
||||
"Dropping location data for nonsensical packet -- packet %s implies lat/lon %s/%s. Outta this world!",
|
||||
packet_hex,
|
||||
lat,
|
||||
lon,
|
||||
)
|
||||
lat = None
|
||||
lon = None
|
||||
offset += 8
|
||||
|
||||
# Skip feature fields if present
|
||||
|
||||
@@ -4,19 +4,21 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from meshcore import EventType
|
||||
|
||||
from app.models import CONTACT_TYPE_REPEATER, Contact, ContactUpsert
|
||||
from app.models import Contact, ContactUpsert
|
||||
from app.packet_processor import process_raw_packet
|
||||
from app.repository import (
|
||||
AmbiguousPublicKeyPrefixError,
|
||||
ContactRepository,
|
||||
)
|
||||
from app.services import dm_ack_tracker
|
||||
from app.services.contact_reconciliation import (
|
||||
claim_prefix_messages_for_contact,
|
||||
promote_prefix_contacts_for_contact,
|
||||
record_contact_name_and_reconcile,
|
||||
)
|
||||
from app.services.messages import create_fallback_direct_message, increment_ack_and_broadcast
|
||||
from app.services.dm_ingest import (
|
||||
ingest_fallback_direct_message,
|
||||
resolve_fallback_direct_message_context,
|
||||
)
|
||||
from app.services.messages import increment_ack_and_broadcast
|
||||
from app.websocket import broadcast_event
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -51,8 +53,8 @@ async def on_contact_message(event: "Event") -> None:
|
||||
2. The packet processor couldn't match the sender to a known contact
|
||||
|
||||
The packet processor handles: decryption, storage, broadcast, bot trigger.
|
||||
This handler only stores if the packet processor didn't already handle it
|
||||
(detected via INSERT OR IGNORE returning None for duplicates).
|
||||
This handler adapts CONTACT_MSG_RECV payloads into the shared DM ingest
|
||||
workflow, which reconciles duplicates against the packet pipeline when possible.
|
||||
"""
|
||||
payload = event.payload
|
||||
|
||||
@@ -66,54 +68,27 @@ async def on_contact_message(event: "Event") -> None:
|
||||
sender_pubkey = payload.get("public_key") or payload.get("pubkey_prefix", "")
|
||||
received_at = int(time.time())
|
||||
|
||||
# Look up contact from database - use prefix lookup only if needed
|
||||
# (get_by_key_or_prefix does exact match first, then prefix fallback)
|
||||
try:
|
||||
contact = await ContactRepository.get_by_key_or_prefix(sender_pubkey)
|
||||
except AmbiguousPublicKeyPrefixError:
|
||||
logger.warning(
|
||||
"DM sender prefix '%s' is ambiguous; storing under prefix until full key is known",
|
||||
sender_pubkey,
|
||||
context = await resolve_fallback_direct_message_context(
|
||||
sender_public_key=sender_pubkey,
|
||||
received_at=received_at,
|
||||
broadcast_fn=broadcast_event,
|
||||
contact_repository=ContactRepository,
|
||||
log=logger,
|
||||
)
|
||||
if context.skip_storage:
|
||||
logger.debug(
|
||||
"Skipping message from repeater %s (not stored in chat history)",
|
||||
context.conversation_key[:12],
|
||||
)
|
||||
contact = None
|
||||
if contact:
|
||||
sender_pubkey = contact.public_key.lower()
|
||||
return
|
||||
|
||||
# Promote any prefix-stored messages to this full key
|
||||
await claim_prefix_messages_for_contact(public_key=sender_pubkey, log=logger)
|
||||
|
||||
# Skip messages from repeaters - they only send CLI responses, not chat messages.
|
||||
# CLI responses are handled by the command endpoint and txt_type filter above.
|
||||
if contact.type == CONTACT_TYPE_REPEATER:
|
||||
logger.debug(
|
||||
"Skipping message from repeater %s (not stored in chat history)",
|
||||
sender_pubkey[:12],
|
||||
)
|
||||
return
|
||||
elif sender_pubkey:
|
||||
placeholder_upsert = ContactUpsert(
|
||||
public_key=sender_pubkey.lower(),
|
||||
type=0,
|
||||
last_seen=received_at,
|
||||
last_contacted=received_at,
|
||||
first_seen=received_at,
|
||||
on_radio=False,
|
||||
out_path_hash_mode=-1,
|
||||
)
|
||||
await ContactRepository.upsert(placeholder_upsert)
|
||||
contact = await ContactRepository.get_by_key(sender_pubkey.lower())
|
||||
if contact:
|
||||
broadcast_event("contact", contact.model_dump())
|
||||
|
||||
# Try to create message - INSERT OR IGNORE handles duplicates atomically
|
||||
# If the packet processor already stored this message, this returns None
|
||||
# Try to create or reconcile the message via the shared DM ingest service.
|
||||
ts = payload.get("sender_timestamp")
|
||||
sender_timestamp = ts if ts is not None else received_at
|
||||
sender_name = contact.name if contact else None
|
||||
path = payload.get("path")
|
||||
path_len = payload.get("path_len")
|
||||
message = await create_fallback_direct_message(
|
||||
conversation_key=sender_pubkey,
|
||||
message = await ingest_fallback_direct_message(
|
||||
conversation_key=context.conversation_key,
|
||||
text=payload.get("text", ""),
|
||||
sender_timestamp=sender_timestamp,
|
||||
received_at=received_at,
|
||||
@@ -121,23 +96,24 @@ async def on_contact_message(event: "Event") -> None:
|
||||
path_len=path_len,
|
||||
txt_type=txt_type,
|
||||
signature=payload.get("signature"),
|
||||
sender_name=sender_name,
|
||||
sender_key=sender_pubkey,
|
||||
sender_name=context.sender_name,
|
||||
sender_key=context.sender_key,
|
||||
broadcast_fn=broadcast_event,
|
||||
update_last_contacted_key=context.contact.public_key.lower() if context.contact else None,
|
||||
)
|
||||
|
||||
if message is None:
|
||||
# Already handled by packet processor (or exact duplicate) - nothing more to do
|
||||
logger.debug("DM from %s already processed by packet processor", sender_pubkey[:12])
|
||||
logger.debug(
|
||||
"DM from %s already processed by packet processor", context.conversation_key[:12]
|
||||
)
|
||||
return
|
||||
|
||||
# If we get here, the packet processor didn't handle this message
|
||||
# (likely because private key export is not available)
|
||||
logger.debug("DM from %s handled by event handler (fallback path)", sender_pubkey[:12])
|
||||
|
||||
# Update contact last_contacted (contact was already fetched above)
|
||||
if contact:
|
||||
await ContactRepository.update_last_contacted(sender_pubkey, received_at)
|
||||
logger.debug(
|
||||
"DM from %s handled by event handler (fallback path)", context.conversation_key[:12]
|
||||
)
|
||||
|
||||
|
||||
async def on_rx_log_data(event: "Event") -> None:
|
||||
|
||||
@@ -10,6 +10,10 @@ logger = logging.getLogger(__name__)
|
||||
INDEX_CACHE_CONTROL = "no-store"
|
||||
ASSET_CACHE_CONTROL = "public, max-age=31536000, immutable"
|
||||
STATIC_FILE_CACHE_CONTROL = "public, max-age=3600"
|
||||
FRONTEND_BUILD_INSTRUCTIONS = (
|
||||
"Run 'cd frontend && npm install && npm run build', "
|
||||
"or use a release zip that includes frontend/prebuilt."
|
||||
)
|
||||
|
||||
|
||||
class CacheControlStaticFiles(StaticFiles):
|
||||
@@ -48,40 +52,38 @@ def _resolve_request_origin(request: Request) -> str:
|
||||
return str(request.base_url).rstrip("/")
|
||||
|
||||
|
||||
def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
||||
"""Register frontend static file routes if a built frontend is available.
|
||||
|
||||
Returns True when routes are registered, False when frontend files are
|
||||
missing/incomplete. Missing frontend files are logged but are not fatal.
|
||||
"""
|
||||
def _validate_frontend_dir(frontend_dir: Path, *, log_failures: bool = True) -> tuple[bool, Path]:
|
||||
"""Resolve and validate a built frontend directory."""
|
||||
frontend_dir = frontend_dir.resolve()
|
||||
index_file = frontend_dir / "index.html"
|
||||
assets_dir = frontend_dir / "assets"
|
||||
|
||||
if not frontend_dir.exists():
|
||||
logger.error(
|
||||
"Frontend build directory not found at %s. "
|
||||
"Run 'cd frontend && npm run build'. API will continue without frontend routes.",
|
||||
frontend_dir,
|
||||
)
|
||||
return False
|
||||
if log_failures:
|
||||
logger.error("Frontend build directory not found at %s.", frontend_dir)
|
||||
return False, frontend_dir
|
||||
|
||||
if not frontend_dir.is_dir():
|
||||
logger.error(
|
||||
"Frontend build path is not a directory: %s. "
|
||||
"API will continue without frontend routes.",
|
||||
frontend_dir,
|
||||
)
|
||||
return False
|
||||
if log_failures:
|
||||
logger.error("Frontend build path is not a directory: %s.", frontend_dir)
|
||||
return False, frontend_dir
|
||||
|
||||
if not index_file.exists():
|
||||
logger.error(
|
||||
"Frontend index file not found at %s. "
|
||||
"Run 'cd frontend && npm run build'. API will continue without frontend routes.",
|
||||
index_file,
|
||||
)
|
||||
if log_failures:
|
||||
logger.error("Frontend index file not found at %s.", index_file)
|
||||
return False, frontend_dir
|
||||
|
||||
return True, frontend_dir
|
||||
|
||||
|
||||
def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
||||
"""Register frontend static file routes if a built frontend is available."""
|
||||
valid, frontend_dir = _validate_frontend_dir(frontend_dir)
|
||||
if not valid:
|
||||
return False
|
||||
|
||||
index_file = frontend_dir / "index.html"
|
||||
assets_dir = frontend_dir / "assets"
|
||||
|
||||
if assets_dir.exists() and assets_dir.is_dir():
|
||||
app.mount(
|
||||
"/assets",
|
||||
@@ -157,6 +159,30 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def register_first_available_frontend_static_routes(
|
||||
app: FastAPI, frontend_dirs: list[Path]
|
||||
) -> Path | None:
|
||||
"""Register frontend routes from the first valid build directory."""
|
||||
for i, candidate in enumerate(frontend_dirs):
|
||||
valid, resolved_candidate = _validate_frontend_dir(candidate, log_failures=False)
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
if register_frontend_static_routes(app, resolved_candidate):
|
||||
logger.info("Selected frontend build directory %s", resolved_candidate)
|
||||
return resolved_candidate
|
||||
|
||||
if i < len(frontend_dirs) - 1:
|
||||
logger.warning("Frontend build at %s was unusable; trying fallback", resolved_candidate)
|
||||
|
||||
logger.error(
|
||||
"No usable frontend build found. Searched: %s. %s API will continue without frontend routes.",
|
||||
", ".join(str(path.resolve()) for path in frontend_dirs),
|
||||
FRONTEND_BUILD_INSTRUCTIONS,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def register_frontend_missing_fallback(app: FastAPI) -> None:
|
||||
"""Register a fallback route that tells the user to build the frontend."""
|
||||
|
||||
@@ -164,7 +190,5 @@ def register_frontend_missing_fallback(app: FastAPI) -> None:
|
||||
async def frontend_not_built():
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content={
|
||||
"detail": "Frontend not built. Run: cd frontend && npm install && npm run build"
|
||||
},
|
||||
content={"detail": f"Frontend not built. {FRONTEND_BUILD_INSTRUCTIONS}"},
|
||||
)
|
||||
|
||||
@@ -30,6 +30,16 @@ _private_key: bytes | None = None
|
||||
_public_key: bytes | None = None
|
||||
|
||||
|
||||
def clear_keys() -> None:
|
||||
"""Clear any stored private/public key material from memory."""
|
||||
global _private_key, _public_key
|
||||
had_key = _private_key is not None or _public_key is not None
|
||||
_private_key = None
|
||||
_public_key = None
|
||||
if had_key:
|
||||
logger.info("Cleared in-memory keystore")
|
||||
|
||||
|
||||
def set_private_key(key: bytes) -> None:
|
||||
"""Store the private key in memory and derive the public key.
|
||||
|
||||
|
||||
12
app/main.py
12
app/main.py
@@ -11,7 +11,10 @@ from fastapi.responses import JSONResponse
|
||||
from app.config import settings as server_settings
|
||||
from app.config import setup_logging
|
||||
from app.database import db
|
||||
from app.frontend_static import register_frontend_missing_fallback, register_frontend_static_routes
|
||||
from app.frontend_static import (
|
||||
register_first_available_frontend_static_routes,
|
||||
register_frontend_missing_fallback,
|
||||
)
|
||||
from app.radio import RadioDisconnectedError
|
||||
from app.radio_sync import (
|
||||
stop_message_polling,
|
||||
@@ -151,6 +154,9 @@ app.include_router(statistics.router, prefix="/api")
|
||||
app.include_router(ws.router, prefix="/api")
|
||||
|
||||
# Serve frontend static files in production
|
||||
FRONTEND_DIR = Path(__file__).parent.parent / "frontend" / "dist"
|
||||
if not register_frontend_static_routes(app, FRONTEND_DIR):
|
||||
FRONTEND_DIST_DIR = Path(__file__).parent.parent / "frontend" / "dist"
|
||||
FRONTEND_PREBUILT_DIR = Path(__file__).parent.parent / "frontend" / "prebuilt"
|
||||
if not register_first_available_frontend_static_routes(
|
||||
app, [FRONTEND_DIST_DIR, FRONTEND_PREBUILT_DIR]
|
||||
):
|
||||
register_frontend_missing_fallback(app)
|
||||
|
||||
@@ -331,6 +331,13 @@ async def run_migrations(conn: aiosqlite.Connection) -> int:
|
||||
await set_version(conn, 42)
|
||||
applied += 1
|
||||
|
||||
# Migration 43: Limit message dedup index to channel messages only
|
||||
if version < 43:
|
||||
logger.info("Applying migration 43: narrow message dedup index to channels")
|
||||
await _migrate_043_split_message_dedup_by_type(conn)
|
||||
await set_version(conn, 43)
|
||||
applied += 1
|
||||
|
||||
if applied > 0:
|
||||
logger.info(
|
||||
"Applied %d migration(s), schema now at version %d", applied, await get_version(conn)
|
||||
@@ -2443,3 +2450,29 @@ async def _migrate_042_add_channel_flood_scope_override(conn: aiosqlite.Connecti
|
||||
raise
|
||||
|
||||
await conn.commit()
|
||||
|
||||
|
||||
async def _migrate_043_split_message_dedup_by_type(conn: aiosqlite.Connection) -> None:
|
||||
"""Restrict the message dedup index to channel messages."""
|
||||
cursor = await conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='messages'"
|
||||
)
|
||||
if await cursor.fetchone() is None:
|
||||
await conn.commit()
|
||||
return
|
||||
|
||||
cursor = await conn.execute("PRAGMA table_info(messages)")
|
||||
columns = {row[1] for row in await cursor.fetchall()}
|
||||
required_columns = {"type", "conversation_key", "text", "sender_timestamp"}
|
||||
if not required_columns.issubset(columns):
|
||||
logger.debug("messages table missing dedup-index columns, skipping migration 43")
|
||||
await conn.commit()
|
||||
return
|
||||
|
||||
await conn.execute("DROP INDEX IF EXISTS idx_messages_dedup_null_safe")
|
||||
await conn.execute(
|
||||
"""CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_dedup_null_safe
|
||||
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))
|
||||
WHERE type = 'CHAN'"""
|
||||
)
|
||||
await conn.commit()
|
||||
|
||||
@@ -410,6 +410,11 @@ class RepeaterLoginResponse(BaseModel):
|
||||
"""Response from repeater login."""
|
||||
|
||||
status: str = Field(description="Login result status")
|
||||
authenticated: bool = Field(description="Whether repeater authentication was confirmed")
|
||||
message: str | None = Field(
|
||||
default=None,
|
||||
description="Optional warning or error message when authentication was not confirmed",
|
||||
)
|
||||
|
||||
|
||||
class RepeaterStatusResponse(BaseModel):
|
||||
|
||||
@@ -425,7 +425,7 @@ async def _process_advertisement(
|
||||
logger.debug("Failed to parse advertisement packet")
|
||||
return
|
||||
|
||||
advert = parse_advertisement(packet_info.payload)
|
||||
advert = parse_advertisement(packet_info.payload, raw_packet=raw_bytes)
|
||||
if not advert:
|
||||
logger.debug("Failed to parse advertisement payload")
|
||||
return
|
||||
@@ -477,8 +477,9 @@ async def _process_advertisement(
|
||||
path_len,
|
||||
)
|
||||
|
||||
# Use device_role from advertisement for contact type (1=Chat, 2=Repeater, 3=Room, 4=Sensor)
|
||||
# Use advert.timestamp for last_advert (sender's timestamp), receive timestamp for last_seen
|
||||
# Use device_role from advertisement for contact type (1=Chat, 2=Repeater, 3=Room, 4=Sensor).
|
||||
# Persist advert freshness fields using the server receive wall clock so
|
||||
# route selection is not affected by sender clock skew.
|
||||
contact_type = (
|
||||
advert.device_role if advert.device_role > 0 else (existing.type if existing else 0)
|
||||
)
|
||||
@@ -498,7 +499,7 @@ async def _process_advertisement(
|
||||
type=contact_type,
|
||||
lat=advert.lat,
|
||||
lon=advert.lon,
|
||||
last_advert=advert.timestamp if advert.timestamp > 0 else timestamp,
|
||||
last_advert=timestamp,
|
||||
last_seen=timestamp,
|
||||
last_path=path_hex,
|
||||
last_path_len=path_len,
|
||||
|
||||
35
app/radio.py
35
app/radio.py
@@ -9,8 +9,10 @@ from pathlib import Path
|
||||
from meshcore import MeshCore
|
||||
|
||||
from app.config import settings
|
||||
from app.keystore import clear_keys
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
MAX_FRONTEND_RECONNECT_ERROR_BROADCASTS = 3
|
||||
|
||||
|
||||
class RadioOperationError(RuntimeError):
|
||||
@@ -130,6 +132,12 @@ class RadioManager:
|
||||
self._setup_lock: asyncio.Lock | None = None
|
||||
self._setup_in_progress: bool = False
|
||||
self._setup_complete: bool = False
|
||||
self._frontend_reconnect_error_broadcasts: int = 0
|
||||
self.device_info_loaded: bool = False
|
||||
self.max_contacts: int | None = None
|
||||
self.device_model: str | None = None
|
||||
self.firmware_build: str | None = None
|
||||
self.firmware_version: str | None = None
|
||||
self.max_channels: int = 40
|
||||
self.path_hash_mode: int = 0
|
||||
self.path_hash_mode_supported: bool = False
|
||||
@@ -381,6 +389,21 @@ class RadioManager:
|
||||
self._last_connected = False
|
||||
await self.disconnect()
|
||||
|
||||
def _reset_reconnect_error_broadcasts(self) -> None:
|
||||
self._frontend_reconnect_error_broadcasts = 0
|
||||
|
||||
def _broadcast_reconnect_error_if_needed(self, details: str) -> None:
|
||||
from app.websocket import broadcast_error
|
||||
|
||||
self._frontend_reconnect_error_broadcasts += 1
|
||||
if self._frontend_reconnect_error_broadcasts > MAX_FRONTEND_RECONNECT_ERROR_BROADCASTS:
|
||||
return
|
||||
|
||||
if self._frontend_reconnect_error_broadcasts == MAX_FRONTEND_RECONNECT_ERROR_BROADCASTS:
|
||||
details = f"{details} Further reconnect failures will be logged only until a connection succeeds."
|
||||
|
||||
broadcast_error("Reconnection failed", details)
|
||||
|
||||
async def _disable_meshcore_auto_reconnect(self, mc: MeshCore) -> None:
|
||||
"""Disable library-managed reconnects so manual teardown fully releases transport."""
|
||||
connection_manager = getattr(mc, "connection_manager", None)
|
||||
@@ -478,6 +501,8 @@ class RadioManager:
|
||||
|
||||
async def disconnect(self) -> None:
|
||||
"""Disconnect from the radio."""
|
||||
clear_keys()
|
||||
self._reset_reconnect_error_broadcasts()
|
||||
if self._meshcore is not None:
|
||||
logger.debug("Disconnecting from radio")
|
||||
mc = self._meshcore
|
||||
@@ -486,6 +511,11 @@ class RadioManager:
|
||||
await self._disable_meshcore_auto_reconnect(mc)
|
||||
self._meshcore = None
|
||||
self._setup_complete = False
|
||||
self.device_info_loaded = False
|
||||
self.max_contacts = None
|
||||
self.device_model = None
|
||||
self.firmware_build = None
|
||||
self.firmware_version = None
|
||||
self.max_channels = 40
|
||||
self.path_hash_mode = 0
|
||||
self.path_hash_mode_supported = False
|
||||
@@ -499,7 +529,7 @@ class RadioManager:
|
||||
Returns True if reconnection was successful, False otherwise.
|
||||
Uses a lock to prevent concurrent reconnection attempts.
|
||||
"""
|
||||
from app.websocket import broadcast_error, broadcast_health
|
||||
from app.websocket import broadcast_health
|
||||
|
||||
# Lazily initialize lock (can't create in __init__ before event loop exists)
|
||||
if self._reconnect_lock is None:
|
||||
@@ -537,6 +567,7 @@ class RadioManager:
|
||||
|
||||
if self.is_connected:
|
||||
logger.info("Radio reconnected successfully at %s", self._connection_info)
|
||||
self._reset_reconnect_error_broadcasts()
|
||||
if broadcast_on_success:
|
||||
broadcast_health(True, self._connection_info)
|
||||
return True
|
||||
@@ -546,7 +577,7 @@ class RadioManager:
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Reconnection failed: %s", e, exc_info=True)
|
||||
broadcast_error("Reconnection failed", str(e))
|
||||
self._broadcast_reconnect_error_if_needed(str(e))
|
||||
return False
|
||||
|
||||
async def start_connection_monitor(self) -> None:
|
||||
|
||||
@@ -17,6 +17,7 @@ from contextlib import asynccontextmanager
|
||||
|
||||
from meshcore import EventType, MeshCore
|
||||
|
||||
from app.channel_constants import PUBLIC_CHANNEL_KEY, PUBLIC_CHANNEL_NAME
|
||||
from app.config import settings
|
||||
from app.event_handlers import cleanup_expired_acks
|
||||
from app.models import Contact, ContactUpsert
|
||||
@@ -443,16 +444,13 @@ async def ensure_default_channels() -> None:
|
||||
This seeds the canonical Public channel row in the database if it is missing
|
||||
or misnamed. It does not make the channel undeletable through the router.
|
||||
"""
|
||||
# Public channel - no hashtag, specific well-known key
|
||||
PUBLIC_CHANNEL_KEY_HEX = "8B3387E9C5CDEA6AC9E5EDBAA115CD72"
|
||||
|
||||
# Check by KEY (not name) since that's what's fixed
|
||||
existing = await ChannelRepository.get_by_key(PUBLIC_CHANNEL_KEY_HEX)
|
||||
if not existing or existing.name != "Public":
|
||||
existing = await ChannelRepository.get_by_key(PUBLIC_CHANNEL_KEY)
|
||||
if not existing or existing.name != PUBLIC_CHANNEL_NAME:
|
||||
logger.info("Ensuring default Public channel exists with correct name")
|
||||
await ChannelRepository.upsert(
|
||||
key=PUBLIC_CHANNEL_KEY_HEX,
|
||||
name="Public",
|
||||
key=PUBLIC_CHANNEL_KEY,
|
||||
name=PUBLIC_CHANNEL_NAME,
|
||||
is_hashtag=False,
|
||||
on_radio=existing.on_radio if existing else False,
|
||||
)
|
||||
|
||||
@@ -109,6 +109,18 @@ class RawPacketRepository:
|
||||
)
|
||||
await db.conn.commit()
|
||||
|
||||
@staticmethod
|
||||
async def get_linked_message_id(packet_id: int) -> int | None:
|
||||
"""Return the linked message ID for a raw packet, if any."""
|
||||
cursor = await db.conn.execute(
|
||||
"SELECT message_id FROM raw_packets WHERE id = ?",
|
||||
(packet_id,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
return row["message_id"]
|
||||
|
||||
@staticmethod
|
||||
async def prune_old_undecrypted(max_age_days: int) -> int:
|
||||
"""Delete undecrypted packets older than max_age_days. Returns count deleted."""
|
||||
|
||||
@@ -4,6 +4,12 @@ from hashlib import sha256
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from app.channel_constants import (
|
||||
PUBLIC_CHANNEL_KEY,
|
||||
PUBLIC_CHANNEL_NAME,
|
||||
is_public_channel_key,
|
||||
is_public_channel_name,
|
||||
)
|
||||
from app.models import Channel, ChannelDetail, ChannelMessageCounts, ChannelTopSender
|
||||
from app.region_scope import normalize_region_scope
|
||||
from app.repository import ChannelRepository, MessageRepository
|
||||
@@ -62,10 +68,31 @@ async def create_channel(request: CreateChannelRequest) -> Channel:
|
||||
Channels are NOT pushed to radio on creation. They are loaded to the radio
|
||||
automatically when sending a message (see messages.py send_channel_message).
|
||||
"""
|
||||
is_hashtag = request.name.startswith("#")
|
||||
requested_name = request.name
|
||||
is_hashtag = requested_name.startswith("#")
|
||||
|
||||
# Determine the channel secret
|
||||
if request.key and not is_hashtag:
|
||||
# Reserve the canonical Public room so it cannot drift to another key,
|
||||
# and the well-known Public key cannot be renamed to something else.
|
||||
if is_public_channel_name(requested_name):
|
||||
if request.key:
|
||||
try:
|
||||
key_bytes = bytes.fromhex(request.key)
|
||||
if len(key_bytes) != 16:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Channel key must be exactly 16 bytes (32 hex chars)",
|
||||
)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid hex string for key") from None
|
||||
if key_bytes.hex().upper() != PUBLIC_CHANNEL_KEY:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f'"{PUBLIC_CHANNEL_NAME}" must use the canonical Public key',
|
||||
)
|
||||
key_hex = PUBLIC_CHANNEL_KEY
|
||||
channel_name = PUBLIC_CHANNEL_NAME
|
||||
is_hashtag = False
|
||||
elif request.key and not is_hashtag:
|
||||
try:
|
||||
key_bytes = bytes.fromhex(request.key)
|
||||
if len(key_bytes) != 16:
|
||||
@@ -74,17 +101,25 @@ async def create_channel(request: CreateChannelRequest) -> Channel:
|
||||
)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid hex string for key") from None
|
||||
key_hex = key_bytes.hex().upper()
|
||||
if is_public_channel_key(key_hex):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f'The canonical Public key may only be used for "{PUBLIC_CHANNEL_NAME}"',
|
||||
)
|
||||
channel_name = requested_name
|
||||
else:
|
||||
# Derive key from name hash (same as meshcore library does)
|
||||
key_bytes = sha256(request.name.encode("utf-8")).digest()[:16]
|
||||
key_bytes = sha256(requested_name.encode("utf-8")).digest()[:16]
|
||||
key_hex = key_bytes.hex().upper()
|
||||
channel_name = requested_name
|
||||
|
||||
key_hex = key_bytes.hex().upper()
|
||||
logger.info("Creating channel %s: %s (hashtag=%s)", key_hex, request.name, is_hashtag)
|
||||
logger.info("Creating channel %s: %s (hashtag=%s)", key_hex, channel_name, is_hashtag)
|
||||
|
||||
# Store in database only - radio sync happens at send time
|
||||
await ChannelRepository.upsert(
|
||||
key=key_hex,
|
||||
name=request.name,
|
||||
name=channel_name,
|
||||
is_hashtag=is_hashtag,
|
||||
on_radio=False,
|
||||
)
|
||||
@@ -140,6 +175,11 @@ async def delete_channel(key: str) -> dict:
|
||||
Note: This does not clear the channel from the radio. The radio's channel
|
||||
slots are managed separately (channels are loaded temporarily when sending).
|
||||
"""
|
||||
if is_public_channel_key(key):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="The canonical Public channel cannot be deleted"
|
||||
)
|
||||
|
||||
logger.info("Deleting channel %s from database", key)
|
||||
await ChannelRepository.delete(key)
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import hashlib
|
||||
import importlib.metadata
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -21,6 +22,21 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["debug"])
|
||||
|
||||
LOG_COPY_BOUNDARY_MESSAGE = "STOP COPYING HERE IF YOU DO NOT WANT TO INCLUDE LOGS BELOW"
|
||||
LOG_COPY_BOUNDARY_LINE = "-" * 64
|
||||
RELEASE_BUILD_INFO_FILENAME = "build_info.json"
|
||||
LOG_COPY_BOUNDARY_PREFIX = [
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_MESSAGE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
]
|
||||
|
||||
|
||||
class DebugApplicationInfo(BaseModel):
|
||||
version: str
|
||||
@@ -114,11 +130,30 @@ def _git_output(*args: str) -> str | None:
|
||||
return output or None
|
||||
|
||||
|
||||
def _release_build_info() -> dict[str, Any] | None:
|
||||
build_info_path = _repo_root() / RELEASE_BUILD_INFO_FILENAME
|
||||
try:
|
||||
data = json.loads(build_info_path.read_text())
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if isinstance(data, dict):
|
||||
return data
|
||||
return None
|
||||
|
||||
|
||||
def _build_application_info() -> DebugApplicationInfo:
|
||||
release_build_info = _release_build_info()
|
||||
dirty_output = _git_output("status", "--porcelain")
|
||||
commit_hash = _git_output("rev-parse", "HEAD")
|
||||
if commit_hash is None and release_build_info is not None:
|
||||
commit_hash_value = release_build_info.get("commit_hash")
|
||||
if isinstance(commit_hash_value, str) and commit_hash_value.strip():
|
||||
commit_hash = commit_hash_value.strip()
|
||||
|
||||
return DebugApplicationInfo(
|
||||
version=_get_app_version(),
|
||||
commit_hash=_git_output("rev-parse", "HEAD"),
|
||||
commit_hash=commit_hash,
|
||||
git_branch=_git_output("rev-parse", "--abbrev-ref", "HEAD"),
|
||||
git_dirty=(dirty_output is not None and dirty_output != ""),
|
||||
python_version=sys.version.split()[0],
|
||||
@@ -295,5 +330,5 @@ async def debug_support_snapshot() -> DebugSnapshotResponse:
|
||||
},
|
||||
),
|
||||
radio_probe=radio_probe,
|
||||
logs=get_recent_log_lines(limit=1000),
|
||||
logs=[*LOG_COPY_BOUNDARY_PREFIX, *get_recent_log_lines(limit=1000)],
|
||||
)
|
||||
|
||||
@@ -78,6 +78,26 @@ class FanoutConfigUpdate(BaseModel):
|
||||
enabled: bool | None = Field(default=None, description="Enable/disable toggle")
|
||||
|
||||
|
||||
def _validate_and_normalize_config(config_type: str, config: dict) -> dict:
|
||||
"""Validate a config blob and return the canonical persisted form."""
|
||||
normalized = dict(config)
|
||||
|
||||
if config_type == "mqtt_private":
|
||||
_validate_mqtt_private_config(normalized)
|
||||
elif config_type == "mqtt_community":
|
||||
_validate_mqtt_community_config(normalized)
|
||||
elif config_type == "bot":
|
||||
_validate_bot_config(normalized)
|
||||
elif config_type == "webhook":
|
||||
_validate_webhook_config(normalized)
|
||||
elif config_type == "apprise":
|
||||
_validate_apprise_config(normalized)
|
||||
elif config_type == "sqs":
|
||||
_validate_sqs_config(normalized)
|
||||
|
||||
return normalized
|
||||
|
||||
|
||||
def _validate_mqtt_private_config(config: dict) -> None:
|
||||
"""Validate mqtt_private config blob."""
|
||||
if not config.get("broker_host"):
|
||||
@@ -323,28 +343,13 @@ async def create_fanout_config(body: FanoutConfigCreate) -> dict:
|
||||
if body.type == "bot" and server_settings.disable_bots:
|
||||
raise HTTPException(status_code=403, detail="Bot system disabled by server configuration")
|
||||
|
||||
# Only validate config when creating as enabled — disabled configs
|
||||
# are drafts the user hasn't finished configuring yet.
|
||||
if body.enabled:
|
||||
if body.type == "mqtt_private":
|
||||
_validate_mqtt_private_config(body.config)
|
||||
elif body.type == "mqtt_community":
|
||||
_validate_mqtt_community_config(body.config)
|
||||
elif body.type == "bot":
|
||||
_validate_bot_config(body.config)
|
||||
elif body.type == "webhook":
|
||||
_validate_webhook_config(body.config)
|
||||
elif body.type == "apprise":
|
||||
_validate_apprise_config(body.config)
|
||||
elif body.type == "sqs":
|
||||
_validate_sqs_config(body.config)
|
||||
|
||||
normalized_config = _validate_and_normalize_config(body.type, body.config)
|
||||
scope = _enforce_scope(body.type, body.scope)
|
||||
|
||||
cfg = await FanoutConfigRepository.create(
|
||||
config_type=body.type,
|
||||
name=body.name,
|
||||
config=body.config,
|
||||
config=normalized_config,
|
||||
scope=scope,
|
||||
enabled=body.enabled,
|
||||
)
|
||||
@@ -374,27 +379,11 @@ async def update_fanout_config(config_id: str, body: FanoutConfigUpdate) -> dict
|
||||
kwargs["name"] = body.name
|
||||
if body.enabled is not None:
|
||||
kwargs["enabled"] = body.enabled
|
||||
if body.config is not None:
|
||||
kwargs["config"] = body.config
|
||||
if body.scope is not None:
|
||||
kwargs["scope"] = _enforce_scope(existing["type"], body.scope)
|
||||
|
||||
# Validate config when the result will be enabled
|
||||
will_be_enabled = body.enabled if body.enabled is not None else existing["enabled"]
|
||||
if will_be_enabled:
|
||||
config_to_validate = body.config if body.config is not None else existing["config"]
|
||||
if existing["type"] == "mqtt_private":
|
||||
_validate_mqtt_private_config(config_to_validate)
|
||||
elif existing["type"] == "mqtt_community":
|
||||
_validate_mqtt_community_config(config_to_validate)
|
||||
elif existing["type"] == "bot":
|
||||
_validate_bot_config(config_to_validate)
|
||||
elif existing["type"] == "webhook":
|
||||
_validate_webhook_config(config_to_validate)
|
||||
elif existing["type"] == "apprise":
|
||||
_validate_apprise_config(config_to_validate)
|
||||
elif existing["type"] == "sqs":
|
||||
_validate_sqs_config(config_to_validate)
|
||||
config_to_validate = body.config if body.config is not None else existing["config"]
|
||||
kwargs["config"] = _validate_and_normalize_config(existing["type"], config_to_validate)
|
||||
|
||||
updated = await FanoutConfigRepository.update(config_id, **kwargs)
|
||||
if updated is None:
|
||||
|
||||
@@ -11,18 +11,34 @@ from app.services.radio_runtime import radio_runtime as radio_manager
|
||||
router = APIRouter(tags=["health"])
|
||||
|
||||
|
||||
class RadioDeviceInfoResponse(BaseModel):
|
||||
model: str | None = None
|
||||
firmware_build: str | None = None
|
||||
firmware_version: str | None = None
|
||||
max_contacts: int | None = None
|
||||
max_channels: int | None = None
|
||||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
status: str
|
||||
radio_connected: bool
|
||||
radio_initializing: bool = False
|
||||
radio_state: str = "disconnected"
|
||||
connection_info: str | None
|
||||
radio_device_info: RadioDeviceInfoResponse | None = None
|
||||
database_size_mb: float
|
||||
oldest_undecrypted_timestamp: int | None
|
||||
fanout_statuses: dict[str, dict[str, str]] = {}
|
||||
bots_disabled: bool = False
|
||||
|
||||
|
||||
def _clean_optional_str(value: object) -> str | None:
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
cleaned = value.strip()
|
||||
return cleaned or None
|
||||
|
||||
|
||||
async def build_health_data(radio_connected: bool, connection_info: str | None) -> dict:
|
||||
"""Build the health status payload used by REST endpoint and WebSocket broadcasts."""
|
||||
db_size_mb = 0.0
|
||||
@@ -48,22 +64,12 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
|
||||
pass
|
||||
|
||||
setup_in_progress = getattr(radio_manager, "is_setup_in_progress", False)
|
||||
if not isinstance(setup_in_progress, bool):
|
||||
setup_in_progress = False
|
||||
|
||||
setup_complete = getattr(radio_manager, "is_setup_complete", radio_connected)
|
||||
if not isinstance(setup_complete, bool):
|
||||
setup_complete = radio_connected
|
||||
if not radio_connected:
|
||||
setup_complete = False
|
||||
|
||||
connection_desired = getattr(radio_manager, "connection_desired", True)
|
||||
if not isinstance(connection_desired, bool):
|
||||
connection_desired = True
|
||||
|
||||
is_reconnecting = getattr(radio_manager, "is_reconnecting", False)
|
||||
if not isinstance(is_reconnecting, bool):
|
||||
is_reconnecting = False
|
||||
|
||||
radio_initializing = bool(radio_connected and (setup_in_progress or not setup_complete))
|
||||
if not connection_desired:
|
||||
@@ -77,12 +83,26 @@ async def build_health_data(radio_connected: bool, connection_info: str | None)
|
||||
else:
|
||||
radio_state = "disconnected"
|
||||
|
||||
radio_device_info = None
|
||||
device_info_loaded = getattr(radio_manager, "device_info_loaded", False)
|
||||
if radio_connected and device_info_loaded:
|
||||
radio_device_info = {
|
||||
"model": _clean_optional_str(getattr(radio_manager, "device_model", None)),
|
||||
"firmware_build": _clean_optional_str(getattr(radio_manager, "firmware_build", None)),
|
||||
"firmware_version": _clean_optional_str(
|
||||
getattr(radio_manager, "firmware_version", None)
|
||||
),
|
||||
"max_contacts": getattr(radio_manager, "max_contacts", None),
|
||||
"max_channels": getattr(radio_manager, "max_channels", None),
|
||||
}
|
||||
|
||||
return {
|
||||
"status": "ok" if radio_connected and not radio_initializing else "degraded",
|
||||
"radio_connected": radio_connected,
|
||||
"radio_initializing": radio_initializing,
|
||||
"radio_state": radio_state,
|
||||
"connection_info": connection_info,
|
||||
"radio_device_info": radio_device_info,
|
||||
"database_size_mb": db_size_mb,
|
||||
"oldest_undecrypted_timestamp": oldest_ts,
|
||||
"fanout_statuses": fanout_statuses,
|
||||
|
||||
@@ -3,7 +3,7 @@ from hashlib import sha256
|
||||
from sqlite3 import OperationalError
|
||||
|
||||
import aiosqlite
|
||||
from fastapi import APIRouter, BackgroundTasks
|
||||
from fastapi import APIRouter, BackgroundTasks, HTTPException, Response, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from app.database import db
|
||||
@@ -40,6 +40,10 @@ class DecryptResult(BaseModel):
|
||||
message: str
|
||||
|
||||
|
||||
def _bad_request(detail: str) -> HTTPException:
|
||||
return HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=detail)
|
||||
|
||||
|
||||
async def _run_historical_channel_decryption(
|
||||
channel_key_bytes: bytes, channel_key_hex: str, display_name: str | None = None
|
||||
) -> None:
|
||||
@@ -100,7 +104,7 @@ async def get_undecrypted_count() -> dict:
|
||||
|
||||
@router.post("/decrypt/historical", response_model=DecryptResult)
|
||||
async def decrypt_historical_packets(
|
||||
request: DecryptRequest, background_tasks: BackgroundTasks
|
||||
request: DecryptRequest, background_tasks: BackgroundTasks, response: Response
|
||||
) -> DecryptResult:
|
||||
"""
|
||||
Attempt to decrypt historical packets with the provided key.
|
||||
@@ -112,27 +116,15 @@ async def decrypt_historical_packets(
|
||||
try:
|
||||
channel_key_bytes = bytes.fromhex(request.channel_key)
|
||||
if len(channel_key_bytes) != 16:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Channel key must be 16 bytes (32 hex chars)",
|
||||
)
|
||||
raise _bad_request("Channel key must be 16 bytes (32 hex chars)")
|
||||
channel_key_hex = request.channel_key.upper()
|
||||
except ValueError:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Invalid hex string for channel key",
|
||||
)
|
||||
raise _bad_request("Invalid hex string for channel key") from None
|
||||
elif request.channel_name:
|
||||
channel_key_bytes = sha256(request.channel_name.encode("utf-8")).digest()[:16]
|
||||
channel_key_hex = channel_key_bytes.hex().upper()
|
||||
else:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Must provide channel_key or channel_name",
|
||||
)
|
||||
raise _bad_request("Must provide channel_key or channel_name")
|
||||
|
||||
# Get count and lookup channel name for display
|
||||
count = await RawPacketRepository.get_undecrypted_count()
|
||||
@@ -148,6 +140,7 @@ async def decrypt_historical_packets(
|
||||
background_tasks.add_task(
|
||||
_run_historical_channel_decryption, channel_key_bytes, channel_key_hex, display_name
|
||||
)
|
||||
response.status_code = status.HTTP_202_ACCEPTED
|
||||
|
||||
return DecryptResult(
|
||||
started=True,
|
||||
@@ -158,48 +151,24 @@ async def decrypt_historical_packets(
|
||||
elif request.key_type == "contact":
|
||||
# DM decryption
|
||||
if not request.private_key:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Must provide private_key for contact decryption",
|
||||
)
|
||||
raise _bad_request("Must provide private_key for contact decryption")
|
||||
if not request.contact_public_key:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Must provide contact_public_key for contact decryption",
|
||||
)
|
||||
raise _bad_request("Must provide contact_public_key for contact decryption")
|
||||
|
||||
try:
|
||||
private_key_bytes = bytes.fromhex(request.private_key)
|
||||
if len(private_key_bytes) != 64:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Private key must be 64 bytes (128 hex chars)",
|
||||
)
|
||||
raise _bad_request("Private key must be 64 bytes (128 hex chars)")
|
||||
except ValueError:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Invalid hex string for private key",
|
||||
)
|
||||
raise _bad_request("Invalid hex string for private key") from None
|
||||
|
||||
try:
|
||||
contact_public_key_bytes = bytes.fromhex(request.contact_public_key)
|
||||
if len(contact_public_key_bytes) != 32:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Contact public key must be 32 bytes (64 hex chars)",
|
||||
)
|
||||
raise _bad_request("Contact public key must be 32 bytes (64 hex chars)")
|
||||
contact_public_key_hex = request.contact_public_key.lower()
|
||||
except ValueError:
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="Invalid hex string for contact public key",
|
||||
)
|
||||
raise _bad_request("Invalid hex string for contact public key") from None
|
||||
|
||||
packets = await RawPacketRepository.get_undecrypted_text_messages()
|
||||
count = len(packets)
|
||||
@@ -223,6 +192,7 @@ async def decrypt_historical_packets(
|
||||
contact_public_key_hex,
|
||||
display_name,
|
||||
)
|
||||
response.status_code = status.HTTP_202_ACCEPTED
|
||||
|
||||
return DecryptResult(
|
||||
started=True,
|
||||
@@ -230,11 +200,7 @@ async def decrypt_historical_packets(
|
||||
message=f"Started DM decryption of {count} TEXT_MESSAGE packets in background",
|
||||
)
|
||||
|
||||
return DecryptResult(
|
||||
started=False,
|
||||
total_packets=0,
|
||||
message="key_type must be 'channel' or 'contact'",
|
||||
)
|
||||
raise _bad_request("key_type must be 'channel' or 'contact'")
|
||||
|
||||
|
||||
class MaintenanceRequest(BaseModel):
|
||||
|
||||
@@ -44,8 +44,21 @@ ACL_PERMISSION_NAMES = {
|
||||
}
|
||||
router = APIRouter(prefix="/contacts", tags=["repeaters"])
|
||||
|
||||
# Delay between repeater radio operations to allow key exchange and path establishment
|
||||
REPEATER_OP_DELAY_SECONDS = 2.0
|
||||
REPEATER_LOGIN_RESPONSE_TIMEOUT_SECONDS = 5.0
|
||||
REPEATER_LOGIN_REJECTED_MESSAGE = (
|
||||
"The repeater replied but did not confirm this login. "
|
||||
"Existing access may still allow some repeater operations, but admin actions may fail."
|
||||
)
|
||||
REPEATER_LOGIN_SEND_FAILED_MESSAGE = (
|
||||
"The login request could not be sent to the repeater. "
|
||||
"The dashboard is still available, but repeater operations may fail until a login succeeds."
|
||||
)
|
||||
REPEATER_LOGIN_TIMEOUT_MESSAGE = (
|
||||
"No login confirmation was heard from the repeater. "
|
||||
"On current repeater firmware, that can mean the password was wrong, "
|
||||
"blank-password login was not allowed by the ACL, or the reply was missed in transit. "
|
||||
"The dashboard is still available; try logging in again if admin actions fail."
|
||||
)
|
||||
|
||||
|
||||
def _monotonic() -> float:
|
||||
@@ -136,31 +149,88 @@ async def _fetch_repeater_response(
|
||||
return None
|
||||
|
||||
|
||||
async def prepare_repeater_connection(mc, contact: Contact, password: str) -> None:
|
||||
"""Prepare connection to a repeater by adding to radio and logging in.
|
||||
async def prepare_repeater_connection(mc, contact: Contact, password: str) -> RepeaterLoginResponse:
|
||||
"""Prepare connection to a repeater by adding to radio and attempting login.
|
||||
|
||||
Args:
|
||||
mc: MeshCore instance
|
||||
contact: The repeater contact
|
||||
password: Password for login (empty string for no password)
|
||||
|
||||
Raises:
|
||||
HTTPException: If login fails
|
||||
"""
|
||||
pubkey_prefix = contact.public_key[:12].lower()
|
||||
loop = asyncio.get_running_loop()
|
||||
login_future = loop.create_future()
|
||||
|
||||
def _resolve_login(event_type: EventType, message: str | None = None) -> None:
|
||||
if login_future.done():
|
||||
return
|
||||
login_future.set_result(
|
||||
RepeaterLoginResponse(
|
||||
status="ok" if event_type == EventType.LOGIN_SUCCESS else "error",
|
||||
authenticated=event_type == EventType.LOGIN_SUCCESS,
|
||||
message=message,
|
||||
)
|
||||
)
|
||||
|
||||
success_subscription = mc.subscribe(
|
||||
EventType.LOGIN_SUCCESS,
|
||||
lambda _event: _resolve_login(EventType.LOGIN_SUCCESS),
|
||||
attribute_filters={"pubkey_prefix": pubkey_prefix},
|
||||
)
|
||||
failed_subscription = mc.subscribe(
|
||||
EventType.LOGIN_FAILED,
|
||||
lambda _event: _resolve_login(
|
||||
EventType.LOGIN_FAILED,
|
||||
REPEATER_LOGIN_REJECTED_MESSAGE,
|
||||
),
|
||||
attribute_filters={"pubkey_prefix": pubkey_prefix},
|
||||
)
|
||||
|
||||
# Add contact to radio with path from DB (non-fatal — contact may already be loaded)
|
||||
logger.info("Adding repeater %s to radio", contact.public_key[:12])
|
||||
await _ensure_on_radio(mc, contact)
|
||||
try:
|
||||
logger.info("Adding repeater %s to radio", contact.public_key[:12])
|
||||
await _ensure_on_radio(mc, contact)
|
||||
|
||||
# Send login with password
|
||||
logger.info("Sending login to repeater %s", contact.public_key[:12])
|
||||
login_result = await mc.commands.send_login(contact.public_key, password)
|
||||
logger.info("Sending login to repeater %s", contact.public_key[:12])
|
||||
login_result = await mc.commands.send_login(contact.public_key, password)
|
||||
|
||||
if login_result.type == EventType.ERROR:
|
||||
raise HTTPException(status_code=401, detail=f"Login failed: {login_result.payload}")
|
||||
if login_result.type == EventType.ERROR:
|
||||
return RepeaterLoginResponse(
|
||||
status="error",
|
||||
authenticated=False,
|
||||
message=f"{REPEATER_LOGIN_SEND_FAILED_MESSAGE} ({login_result.payload})",
|
||||
)
|
||||
|
||||
# Wait for key exchange to complete before sending requests
|
||||
logger.debug("Waiting %.1fs for key exchange to complete", REPEATER_OP_DELAY_SECONDS)
|
||||
await asyncio.sleep(REPEATER_OP_DELAY_SECONDS)
|
||||
try:
|
||||
return await asyncio.wait_for(
|
||||
login_future,
|
||||
timeout=REPEATER_LOGIN_RESPONSE_TIMEOUT_SECONDS,
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning(
|
||||
"No login response from repeater %s within %.1fs",
|
||||
contact.public_key[:12],
|
||||
REPEATER_LOGIN_RESPONSE_TIMEOUT_SECONDS,
|
||||
)
|
||||
return RepeaterLoginResponse(
|
||||
status="timeout",
|
||||
authenticated=False,
|
||||
message=REPEATER_LOGIN_TIMEOUT_MESSAGE,
|
||||
)
|
||||
except HTTPException as exc:
|
||||
logger.warning(
|
||||
"Repeater login setup failed for %s: %s",
|
||||
contact.public_key[:12],
|
||||
exc.detail,
|
||||
)
|
||||
return RepeaterLoginResponse(
|
||||
status="error",
|
||||
authenticated=False,
|
||||
message=f"{REPEATER_LOGIN_SEND_FAILED_MESSAGE} ({exc.detail})",
|
||||
)
|
||||
finally:
|
||||
success_subscription.unsubscribe()
|
||||
failed_subscription.unsubscribe()
|
||||
|
||||
|
||||
def _require_repeater(contact: Contact) -> None:
|
||||
@@ -180,7 +250,7 @@ def _require_repeater(contact: Contact) -> None:
|
||||
|
||||
@router.post("/{public_key}/repeater/login", response_model=RepeaterLoginResponse)
|
||||
async def repeater_login(public_key: str, request: RepeaterLoginRequest) -> RepeaterLoginResponse:
|
||||
"""Log in to a repeater. Adds contact to radio, sends login, waits for key exchange."""
|
||||
"""Attempt repeater login and report whether auth was confirmed."""
|
||||
require_connected()
|
||||
contact = await _resolve_contact_or_404(public_key)
|
||||
_require_repeater(contact)
|
||||
@@ -190,9 +260,7 @@ async def repeater_login(public_key: str, request: RepeaterLoginRequest) -> Repe
|
||||
pause_polling=True,
|
||||
suspend_auto_fetch=True,
|
||||
) as mc:
|
||||
await prepare_repeater_connection(mc, contact, request.password)
|
||||
|
||||
return RepeaterLoginResponse(status="ok")
|
||||
return await prepare_repeater_connection(mc, contact, request.password)
|
||||
|
||||
|
||||
@router.post("/{public_key}/repeater/status", response_model=RepeaterStatusResponse)
|
||||
|
||||
320
app/services/dm_ingest.py
Normal file
320
app/services/dm_ingest.py
Normal file
@@ -0,0 +1,320 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from app.models import CONTACT_TYPE_REPEATER, Contact, ContactUpsert, Message
|
||||
from app.repository import (
|
||||
AmbiguousPublicKeyPrefixError,
|
||||
ContactRepository,
|
||||
MessageRepository,
|
||||
RawPacketRepository,
|
||||
)
|
||||
from app.services.contact_reconciliation import claim_prefix_messages_for_contact
|
||||
from app.services.messages import (
|
||||
broadcast_message,
|
||||
build_message_model,
|
||||
build_message_paths,
|
||||
format_contact_log_target,
|
||||
handle_duplicate_message,
|
||||
reconcile_duplicate_message,
|
||||
truncate_for_log,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.decoder import DecryptedDirectMessage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BroadcastFn = Callable[..., Any]
|
||||
_decrypted_dm_store_lock = asyncio.Lock()
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FallbackDirectMessageContext:
|
||||
conversation_key: str
|
||||
contact: Contact | None
|
||||
sender_name: str | None
|
||||
sender_key: str | None
|
||||
skip_storage: bool = False
|
||||
|
||||
|
||||
async def _prepare_resolved_contact(
|
||||
contact: Contact,
|
||||
*,
|
||||
log: logging.Logger | None = None,
|
||||
) -> tuple[str, bool]:
|
||||
conversation_key = contact.public_key.lower()
|
||||
await claim_prefix_messages_for_contact(public_key=conversation_key, log=log or logger)
|
||||
|
||||
if contact.type == CONTACT_TYPE_REPEATER:
|
||||
return conversation_key, True
|
||||
|
||||
return conversation_key, False
|
||||
|
||||
|
||||
async def resolve_fallback_direct_message_context(
|
||||
*,
|
||||
sender_public_key: str,
|
||||
received_at: int,
|
||||
broadcast_fn: BroadcastFn,
|
||||
contact_repository=ContactRepository,
|
||||
log: logging.Logger | None = None,
|
||||
) -> FallbackDirectMessageContext:
|
||||
normalized_sender = sender_public_key.lower()
|
||||
|
||||
try:
|
||||
contact = await contact_repository.get_by_key_or_prefix(normalized_sender)
|
||||
except AmbiguousPublicKeyPrefixError:
|
||||
(log or logger).warning(
|
||||
"DM sender prefix '%s' is ambiguous; storing under prefix until full key is known",
|
||||
sender_public_key,
|
||||
)
|
||||
contact = None
|
||||
|
||||
if contact is not None:
|
||||
conversation_key, skip_storage = await _prepare_resolved_contact(contact, log=log)
|
||||
return FallbackDirectMessageContext(
|
||||
conversation_key=conversation_key,
|
||||
contact=contact,
|
||||
sender_name=contact.name,
|
||||
sender_key=conversation_key,
|
||||
skip_storage=skip_storage,
|
||||
)
|
||||
|
||||
if normalized_sender:
|
||||
placeholder_upsert = ContactUpsert(
|
||||
public_key=normalized_sender,
|
||||
type=0,
|
||||
last_seen=received_at,
|
||||
last_contacted=received_at,
|
||||
first_seen=received_at,
|
||||
on_radio=False,
|
||||
out_path_hash_mode=-1,
|
||||
)
|
||||
await contact_repository.upsert(placeholder_upsert)
|
||||
contact = await contact_repository.get_by_key(normalized_sender)
|
||||
if contact is not None:
|
||||
broadcast_fn("contact", contact.model_dump())
|
||||
|
||||
return FallbackDirectMessageContext(
|
||||
conversation_key=normalized_sender,
|
||||
contact=contact,
|
||||
sender_name=contact.name if contact else None,
|
||||
sender_key=normalized_sender or None,
|
||||
)
|
||||
|
||||
|
||||
async def _store_direct_message(
|
||||
*,
|
||||
packet_id: int | None,
|
||||
conversation_key: str,
|
||||
text: str,
|
||||
sender_timestamp: int,
|
||||
received_at: int,
|
||||
path: str | None,
|
||||
path_len: int | None,
|
||||
outgoing: bool,
|
||||
txt_type: int,
|
||||
signature: str | None,
|
||||
sender_name: str | None,
|
||||
sender_key: str | None,
|
||||
realtime: bool,
|
||||
broadcast_fn: BroadcastFn,
|
||||
update_last_contacted_key: str | None,
|
||||
best_effort_content_dedup: bool,
|
||||
linked_packet_dedup: bool,
|
||||
message_repository=MessageRepository,
|
||||
contact_repository=ContactRepository,
|
||||
raw_packet_repository=RawPacketRepository,
|
||||
) -> Message | None:
|
||||
async def store() -> Message | None:
|
||||
if linked_packet_dedup and packet_id is not None:
|
||||
linked_message_id = await raw_packet_repository.get_linked_message_id(packet_id)
|
||||
if linked_message_id is not None:
|
||||
existing_msg = await message_repository.get_by_id(linked_message_id)
|
||||
if existing_msg is not None:
|
||||
await reconcile_duplicate_message(
|
||||
existing_msg=existing_msg,
|
||||
packet_id=packet_id,
|
||||
path=path,
|
||||
received_at=received_at,
|
||||
path_len=path_len,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
return None
|
||||
|
||||
if best_effort_content_dedup:
|
||||
existing_msg = await message_repository.get_by_content(
|
||||
msg_type="PRIV",
|
||||
conversation_key=conversation_key,
|
||||
text=text,
|
||||
sender_timestamp=sender_timestamp,
|
||||
)
|
||||
if existing_msg is not None:
|
||||
await reconcile_duplicate_message(
|
||||
existing_msg=existing_msg,
|
||||
packet_id=packet_id,
|
||||
path=path,
|
||||
received_at=received_at,
|
||||
path_len=path_len,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
return None
|
||||
|
||||
msg_id = await message_repository.create(
|
||||
msg_type="PRIV",
|
||||
text=text,
|
||||
conversation_key=conversation_key,
|
||||
sender_timestamp=sender_timestamp,
|
||||
received_at=received_at,
|
||||
path=path,
|
||||
path_len=path_len,
|
||||
txt_type=txt_type,
|
||||
signature=signature,
|
||||
outgoing=outgoing,
|
||||
sender_key=sender_key,
|
||||
sender_name=sender_name,
|
||||
)
|
||||
if msg_id is None:
|
||||
await handle_duplicate_message(
|
||||
packet_id=packet_id,
|
||||
msg_type="PRIV",
|
||||
conversation_key=conversation_key,
|
||||
text=text,
|
||||
sender_timestamp=sender_timestamp,
|
||||
path=path,
|
||||
received_at=received_at,
|
||||
path_len=path_len,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
return None
|
||||
|
||||
if packet_id is not None:
|
||||
await raw_packet_repository.mark_decrypted(packet_id, msg_id)
|
||||
|
||||
message = build_message_model(
|
||||
message_id=msg_id,
|
||||
msg_type="PRIV",
|
||||
conversation_key=conversation_key,
|
||||
text=text,
|
||||
sender_timestamp=sender_timestamp,
|
||||
received_at=received_at,
|
||||
paths=build_message_paths(path, received_at, path_len),
|
||||
txt_type=txt_type,
|
||||
signature=signature,
|
||||
sender_key=sender_key,
|
||||
outgoing=outgoing,
|
||||
sender_name=sender_name,
|
||||
)
|
||||
broadcast_message(message=message, broadcast_fn=broadcast_fn, realtime=realtime)
|
||||
|
||||
if update_last_contacted_key:
|
||||
await contact_repository.update_last_contacted(update_last_contacted_key, received_at)
|
||||
|
||||
return message
|
||||
|
||||
if linked_packet_dedup:
|
||||
async with _decrypted_dm_store_lock:
|
||||
return await store()
|
||||
return await store()
|
||||
|
||||
|
||||
async def ingest_decrypted_direct_message(
|
||||
*,
|
||||
packet_id: int,
|
||||
decrypted: "DecryptedDirectMessage",
|
||||
their_public_key: str,
|
||||
received_at: int | None = None,
|
||||
path: str | None = None,
|
||||
path_len: int | None = None,
|
||||
outgoing: bool = False,
|
||||
realtime: bool = True,
|
||||
broadcast_fn: BroadcastFn,
|
||||
contact_repository=ContactRepository,
|
||||
) -> Message | None:
|
||||
conversation_key = their_public_key.lower()
|
||||
contact = await contact_repository.get_by_key(conversation_key)
|
||||
sender_name: str | None = None
|
||||
if contact is not None:
|
||||
conversation_key, skip_storage = await _prepare_resolved_contact(contact, log=logger)
|
||||
if skip_storage:
|
||||
logger.debug(
|
||||
"Skipping message from repeater %s (CLI responses not stored): %s",
|
||||
conversation_key[:12],
|
||||
(decrypted.message or "")[:50],
|
||||
)
|
||||
return None
|
||||
if not outgoing:
|
||||
sender_name = contact.name
|
||||
|
||||
received = received_at or int(time.time())
|
||||
message = await _store_direct_message(
|
||||
packet_id=packet_id,
|
||||
conversation_key=conversation_key,
|
||||
text=decrypted.message,
|
||||
sender_timestamp=decrypted.timestamp,
|
||||
received_at=received,
|
||||
path=path,
|
||||
path_len=path_len,
|
||||
outgoing=outgoing,
|
||||
txt_type=0,
|
||||
signature=None,
|
||||
sender_name=sender_name,
|
||||
sender_key=conversation_key if not outgoing else None,
|
||||
realtime=realtime,
|
||||
broadcast_fn=broadcast_fn,
|
||||
update_last_contacted_key=conversation_key,
|
||||
best_effort_content_dedup=outgoing,
|
||||
linked_packet_dedup=True,
|
||||
)
|
||||
if message is None:
|
||||
return None
|
||||
|
||||
logger.info(
|
||||
'Stored direct message "%s" for %r (msg ID %d in contact ID %s, outgoing=%s)',
|
||||
truncate_for_log(decrypted.message),
|
||||
format_contact_log_target(contact.name if contact else None, conversation_key),
|
||||
message.id,
|
||||
conversation_key,
|
||||
outgoing,
|
||||
)
|
||||
return message
|
||||
|
||||
|
||||
async def ingest_fallback_direct_message(
|
||||
*,
|
||||
conversation_key: str,
|
||||
text: str,
|
||||
sender_timestamp: int,
|
||||
received_at: int,
|
||||
path: str | None,
|
||||
path_len: int | None,
|
||||
txt_type: int,
|
||||
signature: str | None,
|
||||
sender_name: str | None,
|
||||
sender_key: str | None,
|
||||
broadcast_fn: BroadcastFn,
|
||||
update_last_contacted_key: str | None = None,
|
||||
) -> Message | None:
|
||||
return await _store_direct_message(
|
||||
packet_id=None,
|
||||
conversation_key=conversation_key,
|
||||
text=text,
|
||||
sender_timestamp=sender_timestamp,
|
||||
received_at=received_at,
|
||||
path=path,
|
||||
path_len=path_len,
|
||||
outgoing=False,
|
||||
txt_type=txt_type,
|
||||
signature=signature,
|
||||
sender_name=sender_name,
|
||||
sender_key=sender_key,
|
||||
realtime=True,
|
||||
broadcast_fn=broadcast_fn,
|
||||
update_last_contacted_key=update_last_contacted_key,
|
||||
best_effort_content_dedup=True,
|
||||
linked_packet_dedup=False,
|
||||
)
|
||||
@@ -20,6 +20,11 @@ from app.services.messages import (
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NO_RADIO_RESPONSE_AFTER_SEND_DETAIL = (
|
||||
"Send command was issued to the radio, but no response was heard back. "
|
||||
"The message may or may not have sent successfully."
|
||||
)
|
||||
|
||||
BroadcastFn = Callable[..., Any]
|
||||
TrackAckFn = Callable[[str, int, int], bool]
|
||||
NowFn = Callable[[], float]
|
||||
@@ -193,6 +198,13 @@ async def send_channel_message_with_effective_scope(
|
||||
msg=text,
|
||||
timestamp=timestamp_bytes,
|
||||
)
|
||||
if send_result is None:
|
||||
logger.warning(
|
||||
"No response from radio after %s for channel %s; send outcome is unknown",
|
||||
action_label,
|
||||
channel.name,
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
if send_result.type == EventType.ERROR:
|
||||
radio_manager.invalidate_cached_channel_slot(channel_key)
|
||||
else:
|
||||
@@ -279,7 +291,14 @@ async def send_direct_message_to_contact(
|
||||
timestamp=sender_timestamp,
|
||||
)
|
||||
|
||||
if result is None or result.type == EventType.ERROR:
|
||||
if result is None:
|
||||
logger.warning(
|
||||
"No response from radio after direct send to %s; send outcome is unknown",
|
||||
contact.public_key[:12],
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to send message: {result.payload}")
|
||||
|
||||
message = await create_outgoing_direct_message(
|
||||
@@ -376,6 +395,13 @@ async def send_channel_message_to_channel(
|
||||
error_broadcast_fn=error_broadcast_fn,
|
||||
)
|
||||
|
||||
if result is None:
|
||||
logger.warning(
|
||||
"No response from radio after channel send to %s; send outcome is unknown",
|
||||
channel.name,
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to send message: {result.payload}"
|
||||
@@ -487,6 +513,12 @@ async def resend_channel_message_record(
|
||||
temp_radio_slot=temp_radio_slot,
|
||||
error_broadcast_fn=error_broadcast_fn,
|
||||
)
|
||||
if result is None:
|
||||
logger.warning(
|
||||
"No response from radio after channel resend to %s; send outcome is unknown",
|
||||
channel.name,
|
||||
)
|
||||
raise HTTPException(status_code=504, detail=NO_RADIO_RESPONSE_AFTER_SEND_DETAIL)
|
||||
if result.type == EventType.ERROR:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
|
||||
@@ -3,7 +3,7 @@ import time
|
||||
from collections.abc import Callable
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from app.models import CONTACT_TYPE_REPEATER, Message, MessagePath
|
||||
from app.models import Message, MessagePath
|
||||
from app.repository import ContactRepository, MessageRepository, RawPacketRepository
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -15,7 +15,7 @@ BroadcastFn = Callable[..., Any]
|
||||
LOG_MESSAGE_PREVIEW_LEN = 32
|
||||
|
||||
|
||||
def _truncate_for_log(text: str, max_chars: int = LOG_MESSAGE_PREVIEW_LEN) -> str:
|
||||
def truncate_for_log(text: str, max_chars: int = LOG_MESSAGE_PREVIEW_LEN) -> str:
|
||||
"""Return a compact single-line message preview for log output."""
|
||||
normalized = " ".join(text.split())
|
||||
if len(normalized) <= max_chars:
|
||||
@@ -28,7 +28,7 @@ def _format_channel_log_target(channel_name: str | None, channel_key: str) -> st
|
||||
return channel_name or channel_key
|
||||
|
||||
|
||||
def _format_contact_log_target(contact_name: str | None, public_key: str) -> str:
|
||||
def format_contact_log_target(contact_name: str | None, public_key: str) -> str:
|
||||
"""Return a human-friendly DM target label for logs."""
|
||||
return contact_name or public_key[:12]
|
||||
|
||||
@@ -125,6 +125,45 @@ async def increment_ack_and_broadcast(
|
||||
return ack_count
|
||||
|
||||
|
||||
async def reconcile_duplicate_message(
|
||||
*,
|
||||
existing_msg: Message,
|
||||
packet_id: int | None,
|
||||
path: str | None,
|
||||
received_at: int,
|
||||
path_len: int | None,
|
||||
broadcast_fn: BroadcastFn,
|
||||
) -> None:
|
||||
logger.debug(
|
||||
"Duplicate %s for %s (msg_id=%d, outgoing=%s) - adding path",
|
||||
existing_msg.type,
|
||||
existing_msg.conversation_key[:12],
|
||||
existing_msg.id,
|
||||
existing_msg.outgoing,
|
||||
)
|
||||
|
||||
if path is not None:
|
||||
paths = await MessageRepository.add_path(existing_msg.id, path, received_at, path_len)
|
||||
else:
|
||||
paths = existing_msg.paths or []
|
||||
|
||||
if existing_msg.outgoing and existing_msg.type == "CHAN":
|
||||
ack_count = await MessageRepository.increment_ack_count(existing_msg.id)
|
||||
else:
|
||||
ack_count = existing_msg.acked
|
||||
|
||||
if existing_msg.outgoing or path is not None:
|
||||
broadcast_message_acked(
|
||||
message_id=existing_msg.id,
|
||||
ack_count=ack_count,
|
||||
paths=paths,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
|
||||
if packet_id is not None:
|
||||
await RawPacketRepository.mark_decrypted(packet_id, existing_msg.id)
|
||||
|
||||
|
||||
async def handle_duplicate_message(
|
||||
*,
|
||||
packet_id: int | None,
|
||||
@@ -153,35 +192,15 @@ async def handle_duplicate_message(
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug(
|
||||
"Duplicate %s for %s (msg_id=%d, outgoing=%s) - adding path",
|
||||
msg_type,
|
||||
conversation_key[:12],
|
||||
existing_msg.id,
|
||||
existing_msg.outgoing,
|
||||
await reconcile_duplicate_message(
|
||||
existing_msg=existing_msg,
|
||||
packet_id=packet_id,
|
||||
path=path,
|
||||
received_at=received_at,
|
||||
path_len=path_len,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
|
||||
if path is not None:
|
||||
paths = await MessageRepository.add_path(existing_msg.id, path, received_at, path_len)
|
||||
else:
|
||||
paths = existing_msg.paths or []
|
||||
|
||||
if existing_msg.outgoing and existing_msg.type == "CHAN":
|
||||
ack_count = await MessageRepository.increment_ack_count(existing_msg.id)
|
||||
else:
|
||||
ack_count = existing_msg.acked
|
||||
|
||||
if existing_msg.outgoing or path is not None:
|
||||
broadcast_message_acked(
|
||||
message_id=existing_msg.id,
|
||||
ack_count=ack_count,
|
||||
paths=paths,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
|
||||
if packet_id is not None:
|
||||
await RawPacketRepository.mark_decrypted(packet_id, existing_msg.id)
|
||||
|
||||
|
||||
async def create_message_from_decrypted(
|
||||
*,
|
||||
@@ -236,7 +255,7 @@ async def create_message_from_decrypted(
|
||||
|
||||
logger.info(
|
||||
'Stored channel message "%s" for %r (msg ID %d in chan ID %s)',
|
||||
_truncate_for_log(text),
|
||||
truncate_for_log(text),
|
||||
_format_channel_log_target(channel_name, channel_key_normalized),
|
||||
msg_id,
|
||||
channel_key_normalized,
|
||||
@@ -277,124 +296,20 @@ async def create_dm_message_from_decrypted(
|
||||
broadcast_fn: BroadcastFn,
|
||||
) -> int | None:
|
||||
"""Store and broadcast a decrypted direct message."""
|
||||
contact = await ContactRepository.get_by_key(their_public_key)
|
||||
if contact and contact.type == CONTACT_TYPE_REPEATER:
|
||||
logger.debug(
|
||||
"Skipping message from repeater %s (CLI responses not stored): %s",
|
||||
their_public_key[:12],
|
||||
(decrypted.message or "")[:50],
|
||||
)
|
||||
return None
|
||||
from app.services.dm_ingest import ingest_decrypted_direct_message
|
||||
|
||||
received = received_at or int(time.time())
|
||||
conversation_key = their_public_key.lower()
|
||||
sender_name = contact.name if contact and not outgoing else None
|
||||
|
||||
msg_id = await MessageRepository.create(
|
||||
msg_type="PRIV",
|
||||
text=decrypted.message,
|
||||
conversation_key=conversation_key,
|
||||
sender_timestamp=decrypted.timestamp,
|
||||
received_at=received,
|
||||
message = await ingest_decrypted_direct_message(
|
||||
packet_id=packet_id,
|
||||
decrypted=decrypted,
|
||||
their_public_key=their_public_key,
|
||||
received_at=received_at,
|
||||
path=path,
|
||||
path_len=path_len,
|
||||
outgoing=outgoing,
|
||||
sender_key=conversation_key if not outgoing else None,
|
||||
sender_name=sender_name,
|
||||
)
|
||||
|
||||
if msg_id is None:
|
||||
await handle_duplicate_message(
|
||||
packet_id=packet_id,
|
||||
msg_type="PRIV",
|
||||
conversation_key=conversation_key,
|
||||
text=decrypted.message,
|
||||
sender_timestamp=decrypted.timestamp,
|
||||
path=path,
|
||||
received_at=received,
|
||||
path_len=path_len,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
return None
|
||||
|
||||
logger.info(
|
||||
'Stored direct message "%s" for %r (msg ID %d in contact ID %s, outgoing=%s)',
|
||||
_truncate_for_log(decrypted.message),
|
||||
_format_contact_log_target(contact.name if contact else None, conversation_key),
|
||||
msg_id,
|
||||
conversation_key,
|
||||
outgoing,
|
||||
)
|
||||
await RawPacketRepository.mark_decrypted(packet_id, msg_id)
|
||||
|
||||
broadcast_message(
|
||||
message=build_message_model(
|
||||
message_id=msg_id,
|
||||
msg_type="PRIV",
|
||||
conversation_key=conversation_key,
|
||||
text=decrypted.message,
|
||||
sender_timestamp=decrypted.timestamp,
|
||||
received_at=received,
|
||||
paths=build_message_paths(path, received, path_len),
|
||||
outgoing=outgoing,
|
||||
sender_name=sender_name,
|
||||
sender_key=conversation_key if not outgoing else None,
|
||||
),
|
||||
broadcast_fn=broadcast_fn,
|
||||
realtime=realtime,
|
||||
broadcast_fn=broadcast_fn,
|
||||
)
|
||||
|
||||
await ContactRepository.update_last_contacted(conversation_key, received)
|
||||
return msg_id
|
||||
|
||||
|
||||
async def create_fallback_direct_message(
|
||||
*,
|
||||
conversation_key: str,
|
||||
text: str,
|
||||
sender_timestamp: int,
|
||||
received_at: int,
|
||||
path: str | None,
|
||||
path_len: int | None,
|
||||
txt_type: int,
|
||||
signature: str | None,
|
||||
sender_name: str | None,
|
||||
sender_key: str | None,
|
||||
broadcast_fn: BroadcastFn,
|
||||
message_repository=MessageRepository,
|
||||
) -> Message | None:
|
||||
"""Store and broadcast a CONTACT_MSG_RECV fallback direct message."""
|
||||
msg_id = await message_repository.create(
|
||||
msg_type="PRIV",
|
||||
text=text,
|
||||
conversation_key=conversation_key,
|
||||
sender_timestamp=sender_timestamp,
|
||||
received_at=received_at,
|
||||
path=path,
|
||||
path_len=path_len,
|
||||
txt_type=txt_type,
|
||||
signature=signature,
|
||||
sender_key=sender_key,
|
||||
sender_name=sender_name,
|
||||
)
|
||||
if msg_id is None:
|
||||
return None
|
||||
|
||||
message = build_message_model(
|
||||
message_id=msg_id,
|
||||
msg_type="PRIV",
|
||||
conversation_key=conversation_key,
|
||||
text=text,
|
||||
sender_timestamp=sender_timestamp,
|
||||
received_at=received_at,
|
||||
paths=build_message_paths(path, received_at, path_len),
|
||||
txt_type=txt_type,
|
||||
signature=signature,
|
||||
sender_key=sender_key,
|
||||
sender_name=sender_name,
|
||||
)
|
||||
broadcast_message(message=message, broadcast_fn=broadcast_fn)
|
||||
return message
|
||||
return message.id if message is not None else None
|
||||
|
||||
|
||||
async def create_fallback_channel_message(
|
||||
|
||||
@@ -7,6 +7,21 @@ POST_CONNECT_SETUP_TIMEOUT_SECONDS = 300
|
||||
POST_CONNECT_SETUP_MAX_ATTEMPTS = 2
|
||||
|
||||
|
||||
def _clean_device_string(value: object) -> str | None:
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
cleaned = value.strip()
|
||||
return cleaned or None
|
||||
|
||||
|
||||
def _decode_fixed_string(raw: bytes, start: int, length: int) -> str | None:
|
||||
if len(raw) < start:
|
||||
return None
|
||||
return _clean_device_string(
|
||||
raw[start : start + length].decode("utf-8", "ignore").replace("\0", "")
|
||||
)
|
||||
|
||||
|
||||
async def run_post_connect_setup(radio_manager) -> None:
|
||||
"""Run shared radio initialization after a transport connection succeeds."""
|
||||
from app.event_handlers import register_event_handlers
|
||||
@@ -78,26 +93,66 @@ async def run_post_connect_setup(radio_manager) -> None:
|
||||
return await _original_handle_rx(data)
|
||||
|
||||
reader.handle_rx = _capture_handle_rx
|
||||
radio_manager.device_info_loaded = False
|
||||
radio_manager.max_contacts = None
|
||||
radio_manager.device_model = None
|
||||
radio_manager.firmware_build = None
|
||||
radio_manager.firmware_version = None
|
||||
radio_manager.max_channels = 40
|
||||
radio_manager.path_hash_mode = 0
|
||||
radio_manager.path_hash_mode_supported = False
|
||||
try:
|
||||
device_query = await mc.commands.send_device_query()
|
||||
if device_query and "max_channels" in device_query.payload:
|
||||
radio_manager.max_channels = max(
|
||||
1, int(device_query.payload["max_channels"])
|
||||
)
|
||||
if device_query and "path_hash_mode" in device_query.payload:
|
||||
radio_manager.path_hash_mode = device_query.payload["path_hash_mode"]
|
||||
payload = (
|
||||
device_query.payload
|
||||
if device_query is not None and isinstance(device_query.payload, dict)
|
||||
else {}
|
||||
)
|
||||
|
||||
payload_max_contacts = payload.get("max_contacts")
|
||||
if isinstance(payload_max_contacts, int):
|
||||
radio_manager.max_contacts = max(1, payload_max_contacts)
|
||||
|
||||
payload_max_channels = payload.get("max_channels")
|
||||
if isinstance(payload_max_channels, int):
|
||||
radio_manager.max_channels = max(1, payload_max_channels)
|
||||
|
||||
radio_manager.device_model = _clean_device_string(payload.get("model"))
|
||||
radio_manager.firmware_build = _clean_device_string(payload.get("fw_build"))
|
||||
radio_manager.firmware_version = _clean_device_string(payload.get("ver"))
|
||||
|
||||
fw_ver = payload.get("fw ver")
|
||||
payload_reports_device_info = isinstance(fw_ver, int) and fw_ver >= 3
|
||||
if payload_reports_device_info:
|
||||
radio_manager.device_info_loaded = True
|
||||
|
||||
if "path_hash_mode" in payload and isinstance(payload["path_hash_mode"], int):
|
||||
radio_manager.path_hash_mode = payload["path_hash_mode"]
|
||||
radio_manager.path_hash_mode_supported = True
|
||||
elif _captured_frame:
|
||||
# Raw-frame fallback:
|
||||
# byte 1 = fw_ver, byte 3 = max_channels, byte 81 = path_hash_mode
|
||||
|
||||
if _captured_frame:
|
||||
# Raw-frame fallback / completion:
|
||||
# byte 1 = fw_ver, byte 2 = max_contacts/2, byte 3 = max_channels,
|
||||
# bytes 8:20 = fw_build, 20:60 = model, 60:80 = ver, byte 81 = path_hash_mode
|
||||
raw = _captured_frame[-1]
|
||||
fw_ver = raw[1] if len(raw) > 1 else 0
|
||||
if fw_ver >= 3 and len(raw) >= 4:
|
||||
radio_manager.max_channels = max(1, raw[3])
|
||||
if fw_ver >= 10 and len(raw) >= 82:
|
||||
if fw_ver >= 3:
|
||||
radio_manager.device_info_loaded = True
|
||||
if radio_manager.max_contacts is None and len(raw) >= 3:
|
||||
radio_manager.max_contacts = max(1, raw[2] * 2)
|
||||
if len(raw) >= 4 and not isinstance(payload_max_channels, int):
|
||||
radio_manager.max_channels = max(1, raw[3])
|
||||
if radio_manager.firmware_build is None:
|
||||
radio_manager.firmware_build = _decode_fixed_string(raw, 8, 12)
|
||||
if radio_manager.device_model is None:
|
||||
radio_manager.device_model = _decode_fixed_string(raw, 20, 40)
|
||||
if radio_manager.firmware_version is None:
|
||||
radio_manager.firmware_version = _decode_fixed_string(raw, 60, 20)
|
||||
if (
|
||||
not radio_manager.path_hash_mode_supported
|
||||
and fw_ver >= 10
|
||||
and len(raw) >= 82
|
||||
):
|
||||
radio_manager.path_hash_mode = raw[81]
|
||||
radio_manager.path_hash_mode_supported = True
|
||||
logger.warning(
|
||||
@@ -114,6 +169,17 @@ async def run_post_connect_setup(radio_manager) -> None:
|
||||
logger.info("Path hash mode: %d (supported)", radio_manager.path_hash_mode)
|
||||
else:
|
||||
logger.debug("Firmware does not report path_hash_mode")
|
||||
if radio_manager.device_info_loaded:
|
||||
logger.info(
|
||||
"Radio device info: model=%s build=%s version=%s max_contacts=%s max_channels=%d",
|
||||
radio_manager.device_model or "unknown",
|
||||
radio_manager.firmware_build or "unknown",
|
||||
radio_manager.firmware_version or "unknown",
|
||||
radio_manager.max_contacts
|
||||
if radio_manager.max_contacts is not None
|
||||
else "unknown",
|
||||
radio_manager.max_channels,
|
||||
)
|
||||
logger.info("Max channel slots: %d", radio_manager.max_channels)
|
||||
except Exception as exc:
|
||||
logger.debug("Failed to query device info capabilities: %s", exc)
|
||||
|
||||
@@ -318,6 +318,8 @@ LocalStorage migration helpers for favorites; canonical favorites are server-sid
|
||||
- `flood_scope`
|
||||
- `blocked_keys`, `blocked_names`
|
||||
|
||||
The backend still carries `sidebar_sort_order` for compatibility and old preference migration, but the current sidebar UI stores sort order per section (`Channels`, `Contacts`, `Repeaters`) in frontend localStorage rather than treating it as one global server-backed setting.
|
||||
|
||||
Note: MQTT, bot, and community MQTT settings were migrated to the `fanout_configs` table (managed via `/api/fanout`). They are no longer part of `AppSettings`.
|
||||
|
||||
`HealthStatus` includes `fanout_statuses: Record<string, FanoutStatusEntry>` mapping config IDs to `{name, type, status}`. Also includes `bots_disabled: boolean`.
|
||||
@@ -407,6 +409,10 @@ npm run test:run
|
||||
npm run build
|
||||
```
|
||||
|
||||
`npm run packaged-build` is release-only. It writes the fallback `frontend/prebuilt`
|
||||
directory used by the downloadable prebuilt release zip; normal development and
|
||||
validation should stick to `npm run build`.
|
||||
|
||||
When touching cross-layer contracts, also run backend tests from repo root:
|
||||
|
||||
```bash
|
||||
@@ -415,6 +421,10 @@ PYTHONPATH=. uv run pytest tests/ -v
|
||||
|
||||
## Errata & Known Non-Issues
|
||||
|
||||
### Contacts rollup uses mention styling for unread DMs
|
||||
|
||||
This is intentional. In the sidebar section headers, unread direct messages are treated as mention-equivalent, so the Contacts rollup uses the highlighted mention-style badge for any unread DM. Row-level mention detection remains separate; this note is only about the section summary styling.
|
||||
|
||||
### RawPacketList always scrolls to bottom
|
||||
|
||||
`RawPacketList` unconditionally scrolls to the latest packet on every update. This is intentional — the packet feed is a live status display, not an interactive log meant for lingering or long-term analysis. Users watching it want to see the newest packet, not hold a scroll position.
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
{
|
||||
"name": "remoteterm-meshcore-frontend",
|
||||
"private": true,
|
||||
"version": "3.3.0",
|
||||
"version": "3.4.1",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"packaged-build": "vite build --outDir prebuilt",
|
||||
"preview": "vite preview",
|
||||
"test": "vitest",
|
||||
"test:run": "vitest run",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useEffect, useCallback, useRef, useState } from 'react';
|
||||
import { api } from './api';
|
||||
import * as messageCache from './messageCache';
|
||||
import { takePrefetchOrFetch } from './prefetch';
|
||||
import { useWebSocket } from './useWebSocket';
|
||||
import {
|
||||
@@ -109,6 +108,7 @@ export function App() {
|
||||
// useConversationRouter, but useConversationRouter needs channels/contacts from
|
||||
// useContactsAndChannels. We break the cycle with a ref-based indirection.
|
||||
const setActiveConversationRef = useRef<(conv: Conversation | null) => void>(() => {});
|
||||
const removeConversationMessagesRef = useRef<(conversationId: string) => void>(() => {});
|
||||
|
||||
// --- Extracted hooks ---
|
||||
|
||||
@@ -135,7 +135,6 @@ export function App() {
|
||||
favorites,
|
||||
fetchAppSettings,
|
||||
handleSaveAppSettings,
|
||||
handleSortOrderChange,
|
||||
handleToggleFavorite,
|
||||
handleToggleBlockedKey,
|
||||
handleToggleBlockedName,
|
||||
@@ -181,6 +180,8 @@ export function App() {
|
||||
setActiveConversation: (conv) => setActiveConversationRef.current(conv),
|
||||
pendingDeleteFallbackRef,
|
||||
hasSetDefaultConversation,
|
||||
removeConversationMessages: (conversationId) =>
|
||||
removeConversationMessagesRef.current(conversationId),
|
||||
});
|
||||
|
||||
// useConversationRouter is called second — it receives channels/contacts as inputs
|
||||
@@ -193,6 +194,7 @@ export function App() {
|
||||
channels,
|
||||
contacts,
|
||||
contactsLoaded,
|
||||
suspendHashSync: showSettings,
|
||||
setSidebarOpen,
|
||||
pendingDeleteFallbackRef,
|
||||
hasSetDefaultConversation,
|
||||
@@ -229,28 +231,36 @@ export function App() {
|
||||
hasOlderMessages,
|
||||
hasNewerMessages,
|
||||
loadingNewer,
|
||||
hasNewerMessagesRef,
|
||||
fetchOlderMessages,
|
||||
fetchNewerMessages,
|
||||
jumpToBottom,
|
||||
reloadCurrentConversation,
|
||||
addMessageIfNew,
|
||||
updateMessageAck,
|
||||
triggerReconcile,
|
||||
observeMessage,
|
||||
receiveMessageAck,
|
||||
reconcileOnReconnect,
|
||||
renameConversationMessages,
|
||||
removeConversationMessages,
|
||||
clearConversationMessages,
|
||||
} = useConversationMessages(activeConversation, targetMessageId);
|
||||
removeConversationMessagesRef.current = removeConversationMessages;
|
||||
|
||||
const {
|
||||
unreadCounts,
|
||||
mentions,
|
||||
lastMessageTimes,
|
||||
unreadLastReadAts,
|
||||
incrementUnread,
|
||||
recordMessageEvent,
|
||||
renameConversationState,
|
||||
markAllRead,
|
||||
trackNewMessage,
|
||||
refreshUnreads,
|
||||
} = useUnreadCounts(channels, contacts, activeConversation);
|
||||
|
||||
useEffect(() => {
|
||||
if (showSettings && !config && settingsSection === 'radio') {
|
||||
setSettingsSection('local');
|
||||
}
|
||||
}, [config, settingsSection, setSettingsSection, showSettings]);
|
||||
|
||||
useEffect(() => {
|
||||
if (activeConversation?.type !== 'channel') {
|
||||
setChannelUnreadMarker(null);
|
||||
@@ -309,7 +319,7 @@ export function App() {
|
||||
setHealth,
|
||||
fetchConfig,
|
||||
setRawPackets,
|
||||
triggerReconcile,
|
||||
reconcileOnReconnect,
|
||||
refreshUnreads,
|
||||
setChannels,
|
||||
fetchAllContacts,
|
||||
@@ -317,23 +327,23 @@ export function App() {
|
||||
blockedKeysRef,
|
||||
blockedNamesRef,
|
||||
activeConversationRef,
|
||||
hasNewerMessagesRef,
|
||||
addMessageIfNew,
|
||||
trackNewMessage,
|
||||
incrementUnread,
|
||||
observeMessage,
|
||||
recordMessageEvent,
|
||||
renameConversationState,
|
||||
checkMention,
|
||||
pendingDeleteFallbackRef,
|
||||
setActiveConversation,
|
||||
updateMessageAck,
|
||||
renameConversationMessages,
|
||||
removeConversationMessages,
|
||||
receiveMessageAck,
|
||||
notifyIncomingMessage,
|
||||
});
|
||||
const handleVisibilityPolicyChanged = useCallback(() => {
|
||||
messageCache.clear();
|
||||
clearConversationMessages();
|
||||
reloadCurrentConversation();
|
||||
void refreshUnreads();
|
||||
setVisibilityVersion((current) => current + 1);
|
||||
}, [refreshUnreads, reloadCurrentConversation]);
|
||||
}, [clearConversationMessages, refreshUnreads, reloadCurrentConversation]);
|
||||
|
||||
const handleBlockKey = useCallback(
|
||||
async (key: string) => {
|
||||
@@ -362,7 +372,7 @@ export function App() {
|
||||
activeConversationRef,
|
||||
setContacts,
|
||||
setChannels,
|
||||
addMessageIfNew,
|
||||
observeMessage,
|
||||
messageInputRef,
|
||||
});
|
||||
const handleCreateCrackedChannel = useCallback(
|
||||
@@ -401,10 +411,7 @@ export function App() {
|
||||
void markAllRead();
|
||||
},
|
||||
favorites,
|
||||
sortOrder: appSettings?.sidebar_sort_order ?? 'recent',
|
||||
onSortOrderChange: (sortOrder: 'recent' | 'alpha') => {
|
||||
void handleSortOrderChange(sortOrder);
|
||||
},
|
||||
legacySortOrder: appSettings?.sidebar_sort_order,
|
||||
isConversationNotificationsEnabled,
|
||||
};
|
||||
const conversationPaneProps = {
|
||||
@@ -566,6 +573,7 @@ export function App() {
|
||||
settingsSection={settingsSection}
|
||||
sidebarOpen={sidebarOpen}
|
||||
showCracker={showCracker}
|
||||
disabledSettingsSections={config ? [] : ['radio']}
|
||||
onSettingsSectionChange={setSettingsSection}
|
||||
onSidebarOpenChange={setSidebarOpen}
|
||||
onCrackerRunningChange={setCrackerRunning}
|
||||
|
||||
@@ -41,6 +41,7 @@ interface AppShellProps {
|
||||
settingsSection: SettingsSection;
|
||||
sidebarOpen: boolean;
|
||||
showCracker: boolean;
|
||||
disabledSettingsSections?: SettingsSection[];
|
||||
onSettingsSectionChange: (section: SettingsSection) => void;
|
||||
onSidebarOpenChange: (open: boolean) => void;
|
||||
onCrackerRunningChange: (running: boolean) => void;
|
||||
@@ -69,6 +70,7 @@ export function AppShell({
|
||||
settingsSection,
|
||||
sidebarOpen,
|
||||
showCracker,
|
||||
disabledSettingsSections = [],
|
||||
onSettingsSectionChange,
|
||||
onSidebarOpenChange,
|
||||
onCrackerRunningChange,
|
||||
@@ -118,13 +120,16 @@ export function AppShell({
|
||||
<div className="flex-1 min-h-0 overflow-y-auto py-1 [contain:layout_paint]">
|
||||
{SETTINGS_SECTION_ORDER.map((section) => {
|
||||
const Icon = SETTINGS_SECTION_ICONS[section];
|
||||
const disabled = disabledSettingsSections.includes(section);
|
||||
return (
|
||||
<button
|
||||
key={section}
|
||||
type="button"
|
||||
disabled={disabled}
|
||||
className={cn(
|
||||
'w-full px-3 py-2 text-left text-[13px] border-l-2 border-transparent hover:bg-accent transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-inset',
|
||||
settingsSection === section && 'bg-accent border-l-primary'
|
||||
'w-full px-3 py-2 text-left text-[13px] border-l-2 border-transparent transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-inset disabled:cursor-not-allowed disabled:opacity-50',
|
||||
!disabled && 'hover:bg-accent',
|
||||
settingsSection === section && !disabled && 'bg-accent border-l-primary'
|
||||
)}
|
||||
aria-current={settingsSection === section ? 'true' : undefined}
|
||||
onClick={() => onSettingsSectionChange(section)}
|
||||
|
||||
@@ -6,6 +6,7 @@ import { ContactPathDiscoveryModal } from './ContactPathDiscoveryModal';
|
||||
import { ChannelFloodScopeOverrideModal } from './ChannelFloodScopeOverrideModal';
|
||||
import { isFavorite } from '../utils/favorites';
|
||||
import { handleKeyboardActivate } from '../utils/a11y';
|
||||
import { isPublicChannelKey } from '../utils/publicChannel';
|
||||
import { stripRegionScopePrefix } from '../utils/regionScope';
|
||||
import { isPrefixOnlyContact } from '../utils/pubkey';
|
||||
import { ContactAvatar } from './ContactAvatar';
|
||||
@@ -304,7 +305,7 @@ export function ChatHeader({
|
||||
title={
|
||||
activeContactIsPrefixOnly
|
||||
? 'Direct Trace unavailable until the full contact key is known'
|
||||
: 'Direct Trace. Send a zero-hop packet to thie contact and display out and back SNR'
|
||||
: 'Direct Trace. Send a zero-hop packet to this contact and display out and back SNR'
|
||||
}
|
||||
aria-label="Direct Trace"
|
||||
disabled={activeContactIsPrefixOnly}
|
||||
@@ -379,7 +380,7 @@ export function ChatHeader({
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
{!(conversation.type === 'channel' && conversation.name === 'Public') && (
|
||||
{!(conversation.type === 'channel' && isPublicChannelKey(conversation.id)) && (
|
||||
<button
|
||||
className="p-1 rounded hover:bg-destructive/10 text-muted-foreground hover:text-destructive text-lg leading-none transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
||||
onClick={() => {
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
hasRoutingOverride,
|
||||
parsePathHops,
|
||||
} from '../utils/pathUtils';
|
||||
import { isPublicChannelKey } from '../utils/publicChannel';
|
||||
import { getMapFocusHash } from '../utils/urlHash';
|
||||
import { isFavorite } from '../utils/favorites';
|
||||
import { handleKeyboardActivate } from '../utils/a11y';
|
||||
@@ -611,7 +612,7 @@ function MostActiveRoomsSection({
|
||||
onKeyDown={onNavigateToChannel ? handleKeyboardActivate : undefined}
|
||||
onClick={() => onNavigateToChannel?.(room.channel_key)}
|
||||
>
|
||||
{room.channel_name.startsWith('#') || room.channel_name === 'Public'
|
||||
{room.channel_name.startsWith('#') || isPublicChannelKey(room.channel_key)
|
||||
? room.channel_name
|
||||
: `#${room.channel_name}`}
|
||||
</span>
|
||||
|
||||
@@ -104,7 +104,7 @@ function MapBoundsHandler({
|
||||
}
|
||||
|
||||
export function MapView({ contacts, focusedKey }: MapViewProps) {
|
||||
const sevenDaysAgo = Date.now() / 1000 - 7 * 24 * 60 * 60;
|
||||
const [sevenDaysAgo] = useState(() => Date.now() / 1000 - 7 * 24 * 60 * 60);
|
||||
|
||||
// Filter to contacts with GPS coordinates, heard within the last 7 days.
|
||||
// Always include the focused contact so "view on map" links work for older nodes.
|
||||
|
||||
@@ -24,6 +24,7 @@ const CHANNEL_WARNING_THRESHOLD = 120; // Conservative for multi-hop
|
||||
const CHANNEL_DANGER_BUFFER = 8; // Red zone starts this many bytes before hard limit
|
||||
|
||||
const textEncoder = new TextEncoder();
|
||||
const RADIO_NO_RESPONSE_SNIPPET = 'no response was heard back';
|
||||
/** Get UTF-8 byte length of a string (LoRa packets are byte-constrained, not character-constrained). */
|
||||
function byteLen(s: string): number {
|
||||
return textEncoder.encode(s).length;
|
||||
@@ -118,8 +119,11 @@ export const MessageInput = forwardRef<MessageInputHandle, MessageInputProps>(fu
|
||||
setText('');
|
||||
} catch (err) {
|
||||
console.error('Failed to send message:', err);
|
||||
toast.error('Failed to send message', {
|
||||
description: err instanceof Error ? err.message : 'Check radio connection',
|
||||
const description = err instanceof Error ? err.message : 'Check radio connection';
|
||||
const isRadioNoResponse =
|
||||
err instanceof Error && err.message.toLowerCase().includes(RADIO_NO_RESPONSE_SNIPPET);
|
||||
toast.error(isRadioNoResponse ? 'Radio did not confirm send' : 'Failed to send message', {
|
||||
description,
|
||||
});
|
||||
return;
|
||||
} finally {
|
||||
|
||||
@@ -329,7 +329,7 @@ export function MessageList({
|
||||
}, [messages, onResendChannelMessage]);
|
||||
|
||||
// Sort messages by received_at ascending (oldest first)
|
||||
// Note: Deduplication is handled by useConversationMessages.addMessageIfNew()
|
||||
// Note: Deduplication is handled by useConversationMessages.observeMessage()
|
||||
// and the database UNIQUE constraint on (type, conversation_key, text, sender_timestamp)
|
||||
const sortedMessages = useMemo(
|
||||
() => [...messages].sort((a, b) => a.received_at - b.received_at || a.id - b.id),
|
||||
|
||||
@@ -15,6 +15,7 @@ import { Input } from './ui/input';
|
||||
import { Label } from './ui/label';
|
||||
import { Checkbox } from './ui/checkbox';
|
||||
import { Button } from './ui/button';
|
||||
import { toast } from './ui/sonner';
|
||||
|
||||
type Tab = 'existing' | 'new-contact' | 'new-room' | 'hashtag';
|
||||
|
||||
@@ -90,6 +91,9 @@ export function NewMessageModal({
|
||||
resetForm();
|
||||
onClose();
|
||||
} catch (err) {
|
||||
toast.error('Failed to create conversation', {
|
||||
description: err instanceof Error ? err.message : undefined,
|
||||
});
|
||||
setError(err instanceof Error ? err.message : 'Failed to create');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
@@ -123,6 +127,9 @@ export function NewMessageModal({
|
||||
setName('');
|
||||
hashtagInputRef.current?.focus();
|
||||
} catch (err) {
|
||||
toast.error('Failed to create conversation', {
|
||||
description: err instanceof Error ? err.message : undefined,
|
||||
});
|
||||
setError(err instanceof Error ? err.message : 'Failed to create');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
|
||||
@@ -8,6 +8,7 @@ import { RepeaterLogin } from './RepeaterLogin';
|
||||
import { useRepeaterDashboard } from '../hooks/useRepeaterDashboard';
|
||||
import { isFavorite } from '../utils/favorites';
|
||||
import { handleKeyboardActivate } from '../utils/a11y';
|
||||
import { isValidLocation } from '../utils/pathUtils';
|
||||
import { ContactStatusInfo } from './ContactStatusInfo';
|
||||
import type { Contact, Conversation, Favorite, PathDiscoveryResponse } from '../types';
|
||||
import { TelemetryPane } from './repeater/RepeaterTelemetryPane';
|
||||
@@ -60,6 +61,8 @@ export function RepeaterDashboard({
|
||||
onDeleteContact,
|
||||
}: RepeaterDashboardProps) {
|
||||
const [pathDiscoveryOpen, setPathDiscoveryOpen] = useState(false);
|
||||
const contact = contacts.find((c) => c.public_key === conversation.id) ?? null;
|
||||
const hasAdvertLocation = isValidLocation(contact?.lat ?? null, contact?.lon ?? null);
|
||||
const {
|
||||
loggedIn,
|
||||
loginLoading,
|
||||
@@ -77,9 +80,8 @@ export function RepeaterDashboard({
|
||||
sendFloodAdvert,
|
||||
rebootRepeater,
|
||||
syncClock,
|
||||
} = useRepeaterDashboard(conversation);
|
||||
} = useRepeaterDashboard(conversation, { hasAdvertLocation });
|
||||
|
||||
const contact = contacts.find((c) => c.public_key === conversation.id);
|
||||
const isFav = isFavorite(favorites, 'contact', conversation.id);
|
||||
|
||||
// Loading all panes indicator
|
||||
@@ -225,7 +227,7 @@ export function RepeaterDashboard({
|
||||
) : (
|
||||
<div className="space-y-4">
|
||||
{/* Top row: Telemetry + Radio Settings | Node Info + Neighbors */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
<div className="grid grid-cols-1 gap-4 md:grid-cols-2 md:items-stretch">
|
||||
<div className="flex flex-col gap-4">
|
||||
<NodeInfoPane
|
||||
data={paneData.nodeInfo}
|
||||
@@ -255,12 +257,13 @@ export function RepeaterDashboard({
|
||||
disabled={anyLoading}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex min-h-0 flex-col gap-4">
|
||||
<NeighborsPane
|
||||
data={paneData.neighbors}
|
||||
state={paneStates.neighbors}
|
||||
onRefresh={() => refreshPane('neighbors')}
|
||||
disabled={anyLoading}
|
||||
repeaterContact={contact}
|
||||
contacts={contacts}
|
||||
nodeInfo={paneData.nodeInfo}
|
||||
nodeInfoState={paneStates.nodeInfo}
|
||||
|
||||
@@ -155,11 +155,13 @@ export function SettingsModal(props: SettingsModalProps) {
|
||||
const renderSectionHeader = (section: SettingsSection): ReactNode => {
|
||||
if (!showSectionButton) return null;
|
||||
const Icon = SETTINGS_SECTION_ICONS[section];
|
||||
const disabled = section === 'radio' && !config;
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
className={sectionButtonClasses}
|
||||
className={`${sectionButtonClasses} disabled:cursor-not-allowed disabled:opacity-50`}
|
||||
aria-expanded={expandedSections[section]}
|
||||
disabled={disabled}
|
||||
onClick={() => toggleSection(section)}
|
||||
>
|
||||
<span className="inline-flex items-center gap-2 font-medium" role="heading" aria-level={3}>
|
||||
@@ -177,33 +179,38 @@ export function SettingsModal(props: SettingsModalProps) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return !config ? (
|
||||
<div className="py-8 text-center text-muted-foreground">Loading configuration...</div>
|
||||
) : (
|
||||
return (
|
||||
<div className={settingsContainerClass}>
|
||||
{shouldRenderSection('radio') && (
|
||||
<section className={sectionWrapperClass}>
|
||||
{renderSectionHeader('radio')}
|
||||
{isSectionVisible('radio') && appSettings && (
|
||||
<SettingsRadioSection
|
||||
config={config}
|
||||
health={health}
|
||||
appSettings={appSettings}
|
||||
pageMode={pageMode}
|
||||
onSave={onSave}
|
||||
onSaveAppSettings={onSaveAppSettings}
|
||||
onSetPrivateKey={onSetPrivateKey}
|
||||
onReboot={onReboot}
|
||||
onDisconnect={onDisconnect}
|
||||
onReconnect={onReconnect}
|
||||
onAdvertise={onAdvertise}
|
||||
meshDiscovery={meshDiscovery}
|
||||
meshDiscoveryLoadingTarget={meshDiscoveryLoadingTarget}
|
||||
onDiscoverMesh={onDiscoverMesh}
|
||||
onClose={onClose}
|
||||
className={sectionContentClass}
|
||||
/>
|
||||
)}
|
||||
{isSectionVisible('radio') &&
|
||||
(config && appSettings ? (
|
||||
<SettingsRadioSection
|
||||
config={config}
|
||||
health={health}
|
||||
appSettings={appSettings}
|
||||
pageMode={pageMode}
|
||||
onSave={onSave}
|
||||
onSaveAppSettings={onSaveAppSettings}
|
||||
onSetPrivateKey={onSetPrivateKey}
|
||||
onReboot={onReboot}
|
||||
onDisconnect={onDisconnect}
|
||||
onReconnect={onReconnect}
|
||||
onAdvertise={onAdvertise}
|
||||
meshDiscovery={meshDiscovery}
|
||||
meshDiscoveryLoadingTarget={meshDiscoveryLoadingTarget}
|
||||
onDiscoverMesh={onDiscoverMesh}
|
||||
onClose={onClose}
|
||||
className={sectionContentClass}
|
||||
/>
|
||||
) : (
|
||||
<div className={sectionContentClass}>
|
||||
<div className="rounded-md border border-input bg-muted/20 px-4 py-3 text-sm text-muted-foreground">
|
||||
Radio settings are unavailable until a radio connects.
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</section>
|
||||
)}
|
||||
|
||||
@@ -222,19 +229,26 @@ export function SettingsModal(props: SettingsModalProps) {
|
||||
{shouldRenderSection('database') && (
|
||||
<section className={sectionWrapperClass}>
|
||||
{renderSectionHeader('database')}
|
||||
{isSectionVisible('database') && appSettings && (
|
||||
<SettingsDatabaseSection
|
||||
appSettings={appSettings}
|
||||
health={health}
|
||||
onSaveAppSettings={onSaveAppSettings}
|
||||
onHealthRefresh={onHealthRefresh}
|
||||
blockedKeys={blockedKeys}
|
||||
blockedNames={blockedNames}
|
||||
onToggleBlockedKey={onToggleBlockedKey}
|
||||
onToggleBlockedName={onToggleBlockedName}
|
||||
className={sectionContentClass}
|
||||
/>
|
||||
)}
|
||||
{isSectionVisible('database') &&
|
||||
(appSettings ? (
|
||||
<SettingsDatabaseSection
|
||||
appSettings={appSettings}
|
||||
health={health}
|
||||
onSaveAppSettings={onSaveAppSettings}
|
||||
onHealthRefresh={onHealthRefresh}
|
||||
blockedKeys={blockedKeys}
|
||||
blockedNames={blockedNames}
|
||||
onToggleBlockedKey={onToggleBlockedKey}
|
||||
onToggleBlockedName={onToggleBlockedName}
|
||||
className={sectionContentClass}
|
||||
/>
|
||||
) : (
|
||||
<div className={sectionContentClass}>
|
||||
<div className="rounded-md border border-input bg-muted/20 px-4 py-3 text-sm text-muted-foreground">
|
||||
Loading app settings...
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</section>
|
||||
)}
|
||||
|
||||
|
||||
@@ -19,7 +19,18 @@ import {
|
||||
type Conversation,
|
||||
type Favorite,
|
||||
} from '../types';
|
||||
import { getStateKey, type ConversationTimes, type SortOrder } from '../utils/conversationState';
|
||||
import {
|
||||
buildSidebarSectionSortOrders,
|
||||
getStateKey,
|
||||
loadLegacyLocalStorageSortOrder,
|
||||
loadLocalStorageSidebarSectionSortOrders,
|
||||
saveLocalStorageSidebarSectionSortOrders,
|
||||
type ConversationTimes,
|
||||
type SidebarSectionSortOrders,
|
||||
type SidebarSortableSection,
|
||||
type SortOrder,
|
||||
} from '../utils/conversationState';
|
||||
import { isPublicChannelKey } from '../utils/publicChannel';
|
||||
import { getContactDisplayName } from '../utils/pubkey';
|
||||
import { handleKeyboardActivate } from '../utils/a11y';
|
||||
import { ContactAvatar } from './ContactAvatar';
|
||||
@@ -91,13 +102,36 @@ interface SidebarProps {
|
||||
onToggleCracker: () => void;
|
||||
onMarkAllRead: () => void;
|
||||
favorites: Favorite[];
|
||||
/** Sort order from server settings */
|
||||
sortOrder?: SortOrder;
|
||||
/** Callback when sort order changes */
|
||||
onSortOrderChange?: (order: SortOrder) => void;
|
||||
/** Legacy global sort order, used only to seed per-section local preferences. */
|
||||
legacySortOrder?: SortOrder;
|
||||
isConversationNotificationsEnabled?: (type: 'channel' | 'contact', id: string) => boolean;
|
||||
}
|
||||
|
||||
type InitialSectionSortState = {
|
||||
orders: SidebarSectionSortOrders;
|
||||
source: 'section' | 'legacy' | 'none';
|
||||
};
|
||||
|
||||
function loadInitialSectionSortOrders(): InitialSectionSortState {
|
||||
const storedOrders = loadLocalStorageSidebarSectionSortOrders();
|
||||
if (storedOrders) {
|
||||
return { orders: storedOrders, source: 'section' };
|
||||
}
|
||||
|
||||
const legacyOrder = loadLegacyLocalStorageSortOrder();
|
||||
if (legacyOrder) {
|
||||
return {
|
||||
orders: buildSidebarSectionSortOrders(legacyOrder),
|
||||
source: 'legacy',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
orders: buildSidebarSectionSortOrders(),
|
||||
source: 'none',
|
||||
};
|
||||
}
|
||||
|
||||
export function Sidebar({
|
||||
contacts,
|
||||
channels,
|
||||
@@ -112,12 +146,12 @@ export function Sidebar({
|
||||
onToggleCracker,
|
||||
onMarkAllRead,
|
||||
favorites,
|
||||
sortOrder: sortOrderProp = 'recent',
|
||||
onSortOrderChange,
|
||||
legacySortOrder,
|
||||
isConversationNotificationsEnabled,
|
||||
}: SidebarProps) {
|
||||
const sortOrder = sortOrderProp;
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const initialSectionSortState = useMemo(loadInitialSectionSortOrders, []);
|
||||
const [sectionSortOrders, setSectionSortOrders] = useState(initialSectionSortState.orders);
|
||||
const initialCollapsedState = useMemo(loadCollapsedState, []);
|
||||
const [toolsCollapsed, setToolsCollapsed] = useState(initialCollapsedState.tools);
|
||||
const [favoritesCollapsed, setFavoritesCollapsed] = useState(initialCollapsedState.favorites);
|
||||
@@ -125,10 +159,31 @@ export function Sidebar({
|
||||
const [contactsCollapsed, setContactsCollapsed] = useState(initialCollapsedState.contacts);
|
||||
const [repeatersCollapsed, setRepeatersCollapsed] = useState(initialCollapsedState.repeaters);
|
||||
const collapseSnapshotRef = useRef<CollapseState | null>(null);
|
||||
const sectionSortSourceRef = useRef(initialSectionSortState.source);
|
||||
|
||||
const handleSortToggle = () => {
|
||||
const newOrder = sortOrder === 'alpha' ? 'recent' : 'alpha';
|
||||
onSortOrderChange?.(newOrder);
|
||||
useEffect(() => {
|
||||
if (sectionSortSourceRef.current === 'legacy') {
|
||||
saveLocalStorageSidebarSectionSortOrders(sectionSortOrders);
|
||||
sectionSortSourceRef.current = 'section';
|
||||
return;
|
||||
}
|
||||
|
||||
if (sectionSortSourceRef.current !== 'none' || legacySortOrder === undefined) return;
|
||||
|
||||
const seededOrders = buildSidebarSectionSortOrders(legacySortOrder);
|
||||
setSectionSortOrders(seededOrders);
|
||||
saveLocalStorageSidebarSectionSortOrders(seededOrders);
|
||||
sectionSortSourceRef.current = 'section';
|
||||
}, [legacySortOrder, sectionSortOrders]);
|
||||
|
||||
const handleSortToggle = (section: SidebarSortableSection) => {
|
||||
setSectionSortOrders((prev) => {
|
||||
const nextOrder = prev[section] === 'alpha' ? 'recent' : 'alpha';
|
||||
const updated = { ...prev, [section]: nextOrder };
|
||||
saveLocalStorageSidebarSectionSortOrders(updated);
|
||||
sectionSortSourceRef.current = 'section';
|
||||
return updated;
|
||||
});
|
||||
};
|
||||
|
||||
const handleSelectConversation = (conversation: Conversation) => {
|
||||
@@ -200,10 +255,10 @@ export function Sidebar({
|
||||
() =>
|
||||
[...uniqueChannels].sort((a, b) => {
|
||||
// Public channel always sorts to the top
|
||||
if (a.name === 'Public') return -1;
|
||||
if (b.name === 'Public') return 1;
|
||||
if (isPublicChannelKey(a.key)) return -1;
|
||||
if (isPublicChannelKey(b.key)) return 1;
|
||||
|
||||
if (sortOrder === 'recent') {
|
||||
if (sectionSortOrders.channels === 'recent') {
|
||||
const timeA = getLastMessageTime('channel', a.key);
|
||||
const timeB = getLastMessageTime('channel', b.key);
|
||||
if (timeA && timeB) return timeB - timeA;
|
||||
@@ -212,13 +267,13 @@ export function Sidebar({
|
||||
}
|
||||
return a.name.localeCompare(b.name);
|
||||
}),
|
||||
[uniqueChannels, sortOrder, getLastMessageTime]
|
||||
[uniqueChannels, sectionSortOrders.channels, getLastMessageTime]
|
||||
);
|
||||
|
||||
const sortContactsByOrder = useCallback(
|
||||
(items: Contact[]) =>
|
||||
(items: Contact[], order: SortOrder) =>
|
||||
[...items].sort((a, b) => {
|
||||
if (sortOrder === 'recent') {
|
||||
if (order === 'recent') {
|
||||
const timeA = getLastMessageTime('contact', a.public_key);
|
||||
const timeB = getLastMessageTime('contact', b.public_key);
|
||||
if (timeA && timeB) return timeB - timeA;
|
||||
@@ -227,18 +282,26 @@ export function Sidebar({
|
||||
}
|
||||
return (a.name || a.public_key).localeCompare(b.name || b.public_key);
|
||||
}),
|
||||
[sortOrder, getLastMessageTime]
|
||||
[getLastMessageTime]
|
||||
);
|
||||
|
||||
// Split non-repeater contacts and repeater contacts into separate sorted lists
|
||||
const sortedNonRepeaterContacts = useMemo(
|
||||
() => sortContactsByOrder(uniqueContacts.filter((c) => c.type !== CONTACT_TYPE_REPEATER)),
|
||||
[uniqueContacts, sortContactsByOrder]
|
||||
() =>
|
||||
sortContactsByOrder(
|
||||
uniqueContacts.filter((c) => c.type !== CONTACT_TYPE_REPEATER),
|
||||
sectionSortOrders.contacts
|
||||
),
|
||||
[uniqueContacts, sectionSortOrders.contacts, sortContactsByOrder]
|
||||
);
|
||||
|
||||
const sortedRepeaters = useMemo(
|
||||
() => sortContactsByOrder(uniqueContacts.filter((c) => c.type === CONTACT_TYPE_REPEATER)),
|
||||
[uniqueContacts, sortContactsByOrder]
|
||||
() =>
|
||||
sortContactsByOrder(
|
||||
uniqueContacts.filter((c) => c.type === CONTACT_TYPE_REPEATER),
|
||||
sectionSortOrders.repeaters
|
||||
),
|
||||
[uniqueContacts, sectionSortOrders.repeaters, sortContactsByOrder]
|
||||
);
|
||||
|
||||
// Filter by search query
|
||||
@@ -604,11 +667,12 @@ export function Sidebar({
|
||||
title: string,
|
||||
collapsed: boolean,
|
||||
onToggle: () => void,
|
||||
showSortToggle = false,
|
||||
sortSection: SidebarSortableSection | null = null,
|
||||
unreadCount = 0,
|
||||
highlightUnread = false
|
||||
) => {
|
||||
const effectiveCollapsed = isSearching ? false : collapsed;
|
||||
const sectionSortOrder = sortSection ? sectionSortOrders[sortSection] : null;
|
||||
|
||||
return (
|
||||
<div className="flex justify-between items-center px-3 py-2 pt-3.5">
|
||||
@@ -630,16 +694,24 @@ export function Sidebar({
|
||||
)}
|
||||
<span>{title}</span>
|
||||
</button>
|
||||
{(showSortToggle || unreadCount > 0) && (
|
||||
{(sortSection || unreadCount > 0) && (
|
||||
<div className="ml-auto flex items-center gap-1.5">
|
||||
{showSortToggle && (
|
||||
{sortSection && sectionSortOrder && (
|
||||
<button
|
||||
className="bg-transparent text-muted-foreground/60 px-1 py-0.5 text-[10px] rounded hover:text-foreground transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
||||
onClick={handleSortToggle}
|
||||
aria-label={sortOrder === 'alpha' ? 'Sort by recent' : 'Sort alphabetically'}
|
||||
title={sortOrder === 'alpha' ? 'Sort by recent' : 'Sort alphabetically'}
|
||||
onClick={() => handleSortToggle(sortSection)}
|
||||
aria-label={
|
||||
sectionSortOrder === 'alpha'
|
||||
? `Sort ${title} by recent`
|
||||
: `Sort ${title} alphabetically`
|
||||
}
|
||||
title={
|
||||
sectionSortOrder === 'alpha'
|
||||
? `Sort ${title} by recent`
|
||||
: `Sort ${title} alphabetically`
|
||||
}
|
||||
>
|
||||
{sortOrder === 'alpha' ? 'A-Z' : '⏱'}
|
||||
{sectionSortOrder === 'alpha' ? 'A-Z' : '⏱'}
|
||||
</button>
|
||||
)}
|
||||
{unreadCount > 0 && (
|
||||
@@ -731,7 +803,7 @@ export function Sidebar({
|
||||
'Favorites',
|
||||
favoritesCollapsed,
|
||||
() => setFavoritesCollapsed((prev) => !prev),
|
||||
false,
|
||||
null,
|
||||
favoritesUnreadCount,
|
||||
favoritesHasMention
|
||||
)}
|
||||
@@ -747,7 +819,7 @@ export function Sidebar({
|
||||
'Channels',
|
||||
channelsCollapsed,
|
||||
() => setChannelsCollapsed((prev) => !prev),
|
||||
true,
|
||||
'channels',
|
||||
channelsUnreadCount,
|
||||
channelsHasMention
|
||||
)}
|
||||
@@ -763,7 +835,7 @@ export function Sidebar({
|
||||
'Contacts',
|
||||
contactsCollapsed,
|
||||
() => setContactsCollapsed((prev) => !prev),
|
||||
true,
|
||||
'contacts',
|
||||
contactsUnreadCount,
|
||||
contactsUnreadCount > 0
|
||||
)}
|
||||
@@ -779,7 +851,7 @@ export function Sidebar({
|
||||
'Repeaters',
|
||||
repeatersCollapsed,
|
||||
() => setRepeatersCollapsed((prev) => !prev),
|
||||
true,
|
||||
'repeaters',
|
||||
repeatersUnreadCount
|
||||
)}
|
||||
{(isSearching || !repeatersCollapsed) &&
|
||||
|
||||
@@ -19,6 +19,7 @@ export function NeighborsPane({
|
||||
state,
|
||||
onRefresh,
|
||||
disabled,
|
||||
repeaterContact,
|
||||
contacts,
|
||||
nodeInfo,
|
||||
nodeInfoState,
|
||||
@@ -28,11 +29,15 @@ export function NeighborsPane({
|
||||
state: PaneState;
|
||||
onRefresh: () => void;
|
||||
disabled?: boolean;
|
||||
repeaterContact: Contact | null;
|
||||
contacts: Contact[];
|
||||
nodeInfo: RepeaterNodeInfoResponse | null;
|
||||
nodeInfoState: PaneState;
|
||||
repeaterName: string | null;
|
||||
}) {
|
||||
const advertLat = repeaterContact?.lat ?? null;
|
||||
const advertLon = repeaterContact?.lon ?? null;
|
||||
|
||||
const radioLat = useMemo(() => {
|
||||
const parsed = nodeInfo?.lat != null ? parseFloat(nodeInfo.lat) : null;
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
@@ -43,11 +48,26 @@ export function NeighborsPane({
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
}, [nodeInfo?.lon]);
|
||||
|
||||
const radioName = nodeInfo?.name || repeaterName;
|
||||
const hasValidRepeaterGps = isValidLocation(radioLat, radioLon);
|
||||
const showGpsUnavailableMessage =
|
||||
!hasValidRepeaterGps &&
|
||||
(nodeInfoState.error !== null || nodeInfoState.fetched_at != null || nodeInfo !== null);
|
||||
const positionSource = useMemo(() => {
|
||||
if (isValidLocation(radioLat, radioLon)) {
|
||||
return { lat: radioLat, lon: radioLon, source: 'reported' as const };
|
||||
}
|
||||
if (isValidLocation(advertLat, advertLon)) {
|
||||
return { lat: advertLat, lon: advertLon, source: 'advert' as const };
|
||||
}
|
||||
return { lat: null, lon: null, source: null };
|
||||
}, [advertLat, advertLon, radioLat, radioLon]);
|
||||
|
||||
const radioName = nodeInfo?.name || repeaterContact?.name || repeaterName;
|
||||
const hasValidRepeaterGps = positionSource.source !== null;
|
||||
const headerNote =
|
||||
positionSource.source === 'reported'
|
||||
? 'Using repeater-reported position'
|
||||
: positionSource.source === 'advert'
|
||||
? 'Using advert position'
|
||||
: nodeInfoState.loading
|
||||
? 'Waiting for repeater position'
|
||||
: 'No repeater position available';
|
||||
|
||||
// Resolve contact data for each neighbor in a single pass — used for
|
||||
// coords (mini-map), distances (table column), and sorted display order.
|
||||
@@ -71,7 +91,7 @@ export function NeighborsPane({
|
||||
|
||||
let dist: string | null = null;
|
||||
if (hasValidRepeaterGps && isValidLocation(nLat, nLon)) {
|
||||
const distKm = calculateDistance(radioLat, radioLon, nLat, nLon);
|
||||
const distKm = calculateDistance(positionSource.lat, positionSource.lon, nLat, nLon);
|
||||
if (distKm != null) {
|
||||
dist = formatDistance(distKm);
|
||||
anyDist = true;
|
||||
@@ -91,24 +111,25 @@ export function NeighborsPane({
|
||||
sorted: enriched,
|
||||
hasDistances: anyDist,
|
||||
};
|
||||
}, [contacts, data, hasValidRepeaterGps, radioLat, radioLon]);
|
||||
}, [contacts, data, hasValidRepeaterGps, positionSource.lat, positionSource.lon]);
|
||||
|
||||
return (
|
||||
<RepeaterPane
|
||||
title="Neighbors"
|
||||
headerNote={headerNote}
|
||||
state={state}
|
||||
onRefresh={onRefresh}
|
||||
disabled={disabled}
|
||||
className="flex flex-col"
|
||||
contentClassName="flex-1 flex flex-col"
|
||||
className="flex min-h-0 flex-1 flex-col"
|
||||
contentClassName="flex min-h-0 flex-1 flex-col"
|
||||
>
|
||||
{!data ? (
|
||||
<NotFetched />
|
||||
) : sorted.length === 0 ? (
|
||||
<p className="text-sm text-muted-foreground">No neighbors reported</p>
|
||||
) : (
|
||||
<div className="flex-1 flex flex-col gap-2">
|
||||
<div className="overflow-x-auto">
|
||||
<div className="flex min-h-0 flex-1 flex-col gap-2">
|
||||
<div className="shrink-0 overflow-x-auto">
|
||||
<table className="w-full text-sm">
|
||||
<thead>
|
||||
<tr className="text-left text-muted-foreground text-xs">
|
||||
@@ -145,7 +166,7 @@ export function NeighborsPane({
|
||||
{hasValidRepeaterGps && (neighborsWithCoords.length > 0 || hasValidRepeaterGps) ? (
|
||||
<Suspense
|
||||
fallback={
|
||||
<div className="h-48 flex items-center justify-center text-xs text-muted-foreground">
|
||||
<div className="flex min-h-48 flex-1 items-center justify-center text-xs text-muted-foreground">
|
||||
Loading map...
|
||||
</div>
|
||||
}
|
||||
@@ -153,18 +174,17 @@ export function NeighborsPane({
|
||||
<NeighborsMiniMap
|
||||
key={neighborsWithCoords.map((n) => n.pubkey_prefix).join(',')}
|
||||
neighbors={neighborsWithCoords}
|
||||
radioLat={radioLat}
|
||||
radioLon={radioLon}
|
||||
radioLat={positionSource.lat}
|
||||
radioLon={positionSource.lon}
|
||||
radioName={radioName}
|
||||
/>
|
||||
</Suspense>
|
||||
) : showGpsUnavailableMessage ? (
|
||||
) : (
|
||||
<div className="rounded border border-border/70 bg-muted/20 px-3 py-2 text-xs text-muted-foreground">
|
||||
GPS info failed to fetch; map and distance data not available. This may be due to
|
||||
missing or zero-zero GPS data on the repeater, or due to transient fetch failure. Try
|
||||
refreshing.
|
||||
Map and distance data are unavailable until this repeater has a valid position from
|
||||
either its advert or a Node Info fetch.
|
||||
</div>
|
||||
) : null}
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</RepeaterPane>
|
||||
|
||||
@@ -16,8 +16,8 @@ export function NodeInfoPane({
|
||||
}) {
|
||||
const clockDrift = useMemo(() => {
|
||||
if (!data?.clock_utc) return null;
|
||||
return formatClockDrift(data.clock_utc);
|
||||
}, [data?.clock_utc]);
|
||||
return formatClockDrift(data.clock_utc, state.fetched_at ?? undefined);
|
||||
}, [data?.clock_utc, state.fetched_at]);
|
||||
|
||||
return (
|
||||
<RepeaterPane title="Node Info" state={state} onRefresh={onRefresh} disabled={disabled}>
|
||||
|
||||
@@ -39,7 +39,10 @@ export function formatDuration(seconds: number): string {
|
||||
return `${mins}m`;
|
||||
}
|
||||
|
||||
export function formatClockDrift(clockUtc: string): { text: string; isLarge: boolean } {
|
||||
export function formatClockDrift(
|
||||
clockUtc: string,
|
||||
referenceTimeMs: number = Date.now()
|
||||
): { text: string; isLarge: boolean } {
|
||||
// Firmware format: "HH:MM - D/M/YYYY UTC" or "HH:MM:SS - D/M/YYYY UTC"
|
||||
// Also handle ISO-like: "YYYY-MM-DD HH:MM:SS"
|
||||
let parsed: Date;
|
||||
@@ -56,7 +59,7 @@ export function formatClockDrift(clockUtc: string): { text: string; isLarge: boo
|
||||
}
|
||||
if (isNaN(parsed.getTime())) return { text: '(invalid)', isLarge: false };
|
||||
|
||||
const driftMs = Math.abs(Date.now() - parsed.getTime());
|
||||
const driftMs = Math.abs(referenceTimeMs - parsed.getTime());
|
||||
const driftSec = Math.floor(driftMs / 1000);
|
||||
|
||||
if (driftSec >= 86400) return { text: '>24 hours!', isLarge: true };
|
||||
@@ -106,6 +109,7 @@ function formatFetchedTime(fetchedAt: number): string {
|
||||
|
||||
export function RepeaterPane({
|
||||
title,
|
||||
headerNote,
|
||||
state,
|
||||
onRefresh,
|
||||
disabled,
|
||||
@@ -114,6 +118,7 @@ export function RepeaterPane({
|
||||
contentClassName,
|
||||
}: {
|
||||
title: string;
|
||||
headerNote?: ReactNode;
|
||||
state: PaneState;
|
||||
onRefresh?: () => void;
|
||||
disabled?: boolean;
|
||||
@@ -128,6 +133,7 @@ export function RepeaterPane({
|
||||
<div className="flex items-center justify-between px-3 py-2 bg-muted/50 border-b border-border">
|
||||
<div className="min-w-0">
|
||||
<h3 className="text-sm font-medium">{title}</h3>
|
||||
{headerNote && <p className="text-[11px] text-muted-foreground">{headerNote}</p>}
|
||||
{fetchedAt && (
|
||||
<p
|
||||
className="text-[11px] text-muted-foreground"
|
||||
|
||||
@@ -173,7 +173,8 @@ export function SettingsDatabaseSection({
|
||||
Deletes archival copies of raw packet bytes for messages that are already decrypted and
|
||||
visible in your chat history.{' '}
|
||||
<em className="text-muted-foreground/80">
|
||||
This will not affect any displayed messages or app functionality.
|
||||
This will not affect any displayed messages or app functionality, nor impact your
|
||||
ability to do historical decryption.
|
||||
</em>{' '}
|
||||
The raw bytes are only useful for manual packet analysis.
|
||||
</p>
|
||||
|
||||
@@ -333,6 +333,35 @@ export function SettingsRadioSection({
|
||||
? `Connection paused${health?.connection_info ? ` (${health.connection_info})` : ''}`
|
||||
: 'Not connected';
|
||||
|
||||
const deviceInfoLabel = useMemo(() => {
|
||||
const info = health?.radio_device_info;
|
||||
if (!info) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const model = info.model?.trim() || null;
|
||||
const firmwareParts = [info.firmware_build?.trim(), info.firmware_version?.trim()].filter(
|
||||
(value): value is string => Boolean(value)
|
||||
);
|
||||
const capacityParts = [
|
||||
typeof info.max_contacts === 'number' ? `${info.max_contacts} contacts` : null,
|
||||
typeof info.max_channels === 'number' ? `${info.max_channels} channels` : null,
|
||||
].filter((value): value is string => value !== null);
|
||||
|
||||
if (!model && firmwareParts.length === 0 && capacityParts.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let label = model ?? 'Radio';
|
||||
if (firmwareParts.length > 0) {
|
||||
label += ` running ${firmwareParts.join('/')}`;
|
||||
}
|
||||
if (capacityParts.length > 0) {
|
||||
label += ` (max: ${capacityParts.join(', ')})`;
|
||||
}
|
||||
return label;
|
||||
}, [health?.radio_device_info]);
|
||||
|
||||
const handleConnectionAction = async () => {
|
||||
setConnectionBusy(true);
|
||||
try {
|
||||
@@ -377,6 +406,7 @@ export function SettingsRadioSection({
|
||||
{connectionStatusLabel}
|
||||
</span>
|
||||
</div>
|
||||
{deviceInfoLabel && <p className="text-sm text-muted-foreground">{deviceInfoLabel}</p>}
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export { useUnreadCounts } from './useUnreadCounts';
|
||||
export { useConversationMessages, getMessageContentKey } from './useConversationMessages';
|
||||
export { useConversationMessages } from './useConversationMessages';
|
||||
export { useRadioControl } from './useRadioControl';
|
||||
export { useRepeaterDashboard } from './useRepeaterDashboard';
|
||||
export { useAppShell } from './useAppShell';
|
||||
|
||||
@@ -43,25 +43,6 @@ export function useAppSettings() {
|
||||
[fetchAppSettings]
|
||||
);
|
||||
|
||||
const handleSortOrderChange = useCallback(
|
||||
async (order: 'recent' | 'alpha') => {
|
||||
const previousOrder = appSettings?.sidebar_sort_order ?? 'recent';
|
||||
|
||||
// Optimistic update for responsive UI
|
||||
setAppSettings((prev) => (prev ? { ...prev, sidebar_sort_order: order } : prev));
|
||||
|
||||
try {
|
||||
const updatedSettings = await api.updateSettings({ sidebar_sort_order: order });
|
||||
setAppSettings(updatedSettings);
|
||||
} catch (err) {
|
||||
console.error('Failed to update sort order:', err);
|
||||
setAppSettings((prev) => (prev ? { ...prev, sidebar_sort_order: previousOrder } : prev));
|
||||
toast.error('Failed to save sort preference');
|
||||
}
|
||||
},
|
||||
[appSettings?.sidebar_sort_order]
|
||||
);
|
||||
|
||||
const handleToggleBlockedKey = useCallback(async (key: string) => {
|
||||
const normalizedKey = key.toLowerCase();
|
||||
setAppSettings((prev) => {
|
||||
@@ -198,7 +179,6 @@ export function useAppSettings() {
|
||||
favorites,
|
||||
fetchAppSettings,
|
||||
handleSaveAppSettings,
|
||||
handleSortOrderChange,
|
||||
handleToggleFavorite,
|
||||
handleToggleBlockedKey,
|
||||
handleToggleBlockedName,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { startTransition, useCallback, useState } from 'react';
|
||||
import { startTransition, useCallback, useEffect, useRef, useState } from 'react';
|
||||
|
||||
import { getLocalLabel, type LocalLabel } from '../utils/localLabel';
|
||||
import type { SettingsSection } from '../components/settings/settingsConstants';
|
||||
import { parseHashSettingsSection, updateSettingsHash } from '../utils/urlHash';
|
||||
|
||||
interface UseAppShellResult {
|
||||
showNewMessage: boolean;
|
||||
@@ -23,25 +24,47 @@ interface UseAppShellResult {
|
||||
}
|
||||
|
||||
export function useAppShell(): UseAppShellResult {
|
||||
const initialSettingsSection = typeof window === 'undefined' ? null : parseHashSettingsSection();
|
||||
const [showNewMessage, setShowNewMessage] = useState(false);
|
||||
const [showSettings, setShowSettings] = useState(false);
|
||||
const [settingsSection, setSettingsSection] = useState<SettingsSection>('radio');
|
||||
const [showSettings, setShowSettings] = useState(() => initialSettingsSection !== null);
|
||||
const [settingsSection, setSettingsSection] = useState<SettingsSection>(
|
||||
() => initialSettingsSection ?? 'radio'
|
||||
);
|
||||
const [sidebarOpen, setSidebarOpen] = useState(false);
|
||||
const [showCracker, setShowCracker] = useState(false);
|
||||
const [crackerRunning, setCrackerRunning] = useState(false);
|
||||
const [localLabel, setLocalLabel] = useState(getLocalLabel);
|
||||
const previousHashRef = useRef('');
|
||||
|
||||
useEffect(() => {
|
||||
if (showSettings) {
|
||||
updateSettingsHash(settingsSection);
|
||||
}
|
||||
}, [settingsSection, showSettings]);
|
||||
|
||||
const handleCloseSettingsView = useCallback(() => {
|
||||
if (typeof window !== 'undefined' && parseHashSettingsSection() !== null) {
|
||||
window.history.replaceState(null, '', previousHashRef.current || window.location.pathname);
|
||||
}
|
||||
startTransition(() => setShowSettings(false));
|
||||
setSidebarOpen(false);
|
||||
}, []);
|
||||
|
||||
const handleToggleSettingsView = useCallback(() => {
|
||||
if (showSettings) {
|
||||
handleCloseSettingsView();
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
previousHashRef.current =
|
||||
parseHashSettingsSection() === null ? window.location.hash : previousHashRef.current;
|
||||
}
|
||||
startTransition(() => {
|
||||
setShowSettings((prev) => !prev);
|
||||
setShowSettings(true);
|
||||
});
|
||||
setSidebarOpen(false);
|
||||
}, []);
|
||||
}, [handleCloseSettingsView, showSettings]);
|
||||
|
||||
const handleOpenNewMessage = useCallback(() => {
|
||||
setShowNewMessage(true);
|
||||
|
||||
@@ -2,22 +2,22 @@ import { useState, useCallback, type MutableRefObject } from 'react';
|
||||
import { api } from '../api';
|
||||
import { takePrefetchOrFetch } from '../prefetch';
|
||||
import { toast } from '../components/ui/sonner';
|
||||
import * as messageCache from '../messageCache';
|
||||
import { getContactDisplayName } from '../utils/pubkey';
|
||||
import { findPublicChannel, PUBLIC_CHANNEL_KEY, PUBLIC_CHANNEL_NAME } from '../utils/publicChannel';
|
||||
import type { Channel, Contact, Conversation } from '../types';
|
||||
|
||||
const PUBLIC_CHANNEL_KEY = '8B3387E9C5CDEA6AC9E5EDBAA115CD72';
|
||||
|
||||
interface UseContactsAndChannelsArgs {
|
||||
setActiveConversation: (conv: Conversation | null) => void;
|
||||
pendingDeleteFallbackRef: MutableRefObject<boolean>;
|
||||
hasSetDefaultConversation: MutableRefObject<boolean>;
|
||||
removeConversationMessages: (conversationId: string) => void;
|
||||
}
|
||||
|
||||
export function useContactsAndChannels({
|
||||
setActiveConversation,
|
||||
pendingDeleteFallbackRef,
|
||||
hasSetDefaultConversation,
|
||||
removeConversationMessages,
|
||||
}: UseContactsAndChannelsArgs) {
|
||||
const [contacts, setContacts] = useState<Contact[]>([]);
|
||||
const [contactsLoaded, setContactsLoaded] = useState(false);
|
||||
@@ -118,17 +118,15 @@ export function useContactsAndChannels({
|
||||
try {
|
||||
pendingDeleteFallbackRef.current = true;
|
||||
await api.deleteChannel(key);
|
||||
messageCache.remove(key);
|
||||
removeConversationMessages(key);
|
||||
const refreshedChannels = await api.getChannels();
|
||||
setChannels(refreshedChannels);
|
||||
const publicChannel =
|
||||
refreshedChannels.find((c) => c.key === PUBLIC_CHANNEL_KEY) ||
|
||||
refreshedChannels.find((c) => c.name === 'Public');
|
||||
const publicChannel = findPublicChannel(refreshedChannels);
|
||||
hasSetDefaultConversation.current = true;
|
||||
setActiveConversation({
|
||||
type: 'channel',
|
||||
id: publicChannel?.key || PUBLIC_CHANNEL_KEY,
|
||||
name: publicChannel?.name || 'Public',
|
||||
name: publicChannel?.name || PUBLIC_CHANNEL_NAME,
|
||||
});
|
||||
toast.success('Channel deleted');
|
||||
} catch (err) {
|
||||
@@ -138,7 +136,12 @@ export function useContactsAndChannels({
|
||||
});
|
||||
}
|
||||
},
|
||||
[setActiveConversation, pendingDeleteFallbackRef, hasSetDefaultConversation]
|
||||
[
|
||||
hasSetDefaultConversation,
|
||||
pendingDeleteFallbackRef,
|
||||
removeConversationMessages,
|
||||
setActiveConversation,
|
||||
]
|
||||
);
|
||||
|
||||
const handleDeleteContact = useCallback(
|
||||
@@ -147,18 +150,16 @@ export function useContactsAndChannels({
|
||||
try {
|
||||
pendingDeleteFallbackRef.current = true;
|
||||
await api.deleteContact(publicKey);
|
||||
messageCache.remove(publicKey);
|
||||
removeConversationMessages(publicKey);
|
||||
setContacts((prev) => prev.filter((c) => c.public_key !== publicKey));
|
||||
const refreshedChannels = await api.getChannels();
|
||||
setChannels(refreshedChannels);
|
||||
const publicChannel =
|
||||
refreshedChannels.find((c) => c.key === PUBLIC_CHANNEL_KEY) ||
|
||||
refreshedChannels.find((c) => c.name === 'Public');
|
||||
const publicChannel = findPublicChannel(refreshedChannels);
|
||||
hasSetDefaultConversation.current = true;
|
||||
setActiveConversation({
|
||||
type: 'channel',
|
||||
id: publicChannel?.key || PUBLIC_CHANNEL_KEY,
|
||||
name: publicChannel?.name || 'Public',
|
||||
name: publicChannel?.name || PUBLIC_CHANNEL_NAME,
|
||||
});
|
||||
toast.success('Contact deleted');
|
||||
} catch (err) {
|
||||
@@ -168,7 +169,12 @@ export function useContactsAndChannels({
|
||||
});
|
||||
}
|
||||
},
|
||||
[setActiveConversation, pendingDeleteFallbackRef, hasSetDefaultConversation]
|
||||
[
|
||||
hasSetDefaultConversation,
|
||||
pendingDeleteFallbackRef,
|
||||
removeConversationMessages,
|
||||
setActiveConversation,
|
||||
]
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
@@ -10,7 +10,7 @@ interface UseConversationActionsArgs {
|
||||
activeConversationRef: MutableRefObject<Conversation | null>;
|
||||
setContacts: React.Dispatch<React.SetStateAction<Contact[]>>;
|
||||
setChannels: React.Dispatch<React.SetStateAction<Channel[]>>;
|
||||
addMessageIfNew: (msg: Message) => boolean;
|
||||
observeMessage: (msg: Message) => { added: boolean; activeConversation: boolean };
|
||||
messageInputRef: RefObject<MessageInputHandle | null>;
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ export function useConversationActions({
|
||||
activeConversationRef,
|
||||
setContacts,
|
||||
setChannels,
|
||||
addMessageIfNew,
|
||||
observeMessage,
|
||||
messageInputRef,
|
||||
}: UseConversationActionsArgs): UseConversationActionsResult {
|
||||
const mergeChannelIntoList = useCallback(
|
||||
@@ -60,10 +60,10 @@ export function useConversationActions({
|
||||
: await api.sendDirectMessage(activeConversation.id, text);
|
||||
|
||||
if (activeConversationRef.current?.id === conversationId) {
|
||||
addMessageIfNew(sent);
|
||||
observeMessage(sent);
|
||||
}
|
||||
},
|
||||
[activeConversation, activeConversationRef, addMessageIfNew]
|
||||
[activeConversation, activeConversationRef, observeMessage]
|
||||
);
|
||||
|
||||
const handleResendChannelMessage = useCallback(
|
||||
@@ -77,7 +77,7 @@ export function useConversationActions({
|
||||
activeConversationRef.current?.type === 'channel' &&
|
||||
activeConversationRef.current.id === resentMessage.conversation_key
|
||||
) {
|
||||
addMessageIfNew(resentMessage);
|
||||
observeMessage(resentMessage);
|
||||
}
|
||||
toast.success(newTimestamp ? 'Message resent with new timestamp' : 'Message resent');
|
||||
} catch (err) {
|
||||
@@ -86,7 +86,7 @@ export function useConversationActions({
|
||||
});
|
||||
}
|
||||
},
|
||||
[activeConversationRef, addMessageIfNew]
|
||||
[activeConversationRef, observeMessage]
|
||||
);
|
||||
|
||||
const handleSetChannelFloodScopeOverride = useCallback(
|
||||
|
||||
@@ -1,19 +1,174 @@
|
||||
import {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useRef,
|
||||
useState,
|
||||
type Dispatch,
|
||||
type MutableRefObject,
|
||||
type SetStateAction,
|
||||
} from 'react';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { toast } from '../components/ui/sonner';
|
||||
import { api, isAbortError } from '../api';
|
||||
import * as messageCache from '../messageCache';
|
||||
import type { Conversation, Message, MessagePath } from '../types';
|
||||
import { getMessageContentKey } from '../utils/messageIdentity';
|
||||
|
||||
const MAX_PENDING_ACKS = 500;
|
||||
const MESSAGE_PAGE_SIZE = 200;
|
||||
export const MAX_CACHED_CONVERSATIONS = 20;
|
||||
export const MAX_MESSAGES_PER_ENTRY = 200;
|
||||
|
||||
interface CachedConversationEntry {
|
||||
messages: Message[];
|
||||
hasOlderMessages: boolean;
|
||||
}
|
||||
|
||||
interface InternalCachedConversationEntry extends CachedConversationEntry {
|
||||
contentKeys: Set<string>;
|
||||
}
|
||||
|
||||
export class ConversationMessageCache {
|
||||
private readonly cache = new Map<string, InternalCachedConversationEntry>();
|
||||
|
||||
get(id: string): CachedConversationEntry | undefined {
|
||||
const entry = this.cache.get(id);
|
||||
if (!entry) return undefined;
|
||||
this.cache.delete(id);
|
||||
this.cache.set(id, entry);
|
||||
return {
|
||||
messages: entry.messages,
|
||||
hasOlderMessages: entry.hasOlderMessages,
|
||||
};
|
||||
}
|
||||
|
||||
set(id: string, entry: CachedConversationEntry): void {
|
||||
const contentKeys = new Set(entry.messages.map((message) => getMessageContentKey(message)));
|
||||
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
|
||||
const trimmed = [...entry.messages]
|
||||
.sort((a, b) => b.received_at - a.received_at)
|
||||
.slice(0, MAX_MESSAGES_PER_ENTRY);
|
||||
entry = { ...entry, messages: trimmed, hasOlderMessages: true };
|
||||
}
|
||||
const internalEntry: InternalCachedConversationEntry = {
|
||||
...entry,
|
||||
contentKeys,
|
||||
};
|
||||
this.cache.delete(id);
|
||||
this.cache.set(id, internalEntry);
|
||||
if (this.cache.size > MAX_CACHED_CONVERSATIONS) {
|
||||
const lruKey = this.cache.keys().next().value as string;
|
||||
this.cache.delete(lruKey);
|
||||
}
|
||||
}
|
||||
|
||||
addMessage(id: string, msg: Message): boolean {
|
||||
const entry = this.cache.get(id);
|
||||
const contentKey = getMessageContentKey(msg);
|
||||
if (!entry) {
|
||||
this.cache.set(id, {
|
||||
messages: [msg],
|
||||
hasOlderMessages: true,
|
||||
contentKeys: new Set([contentKey]),
|
||||
});
|
||||
if (this.cache.size > MAX_CACHED_CONVERSATIONS) {
|
||||
const lruKey = this.cache.keys().next().value as string;
|
||||
this.cache.delete(lruKey);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (entry.contentKeys.has(contentKey)) return false;
|
||||
if (entry.messages.some((message) => message.id === msg.id)) return false;
|
||||
entry.contentKeys.add(contentKey);
|
||||
entry.messages = [...entry.messages, msg];
|
||||
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
|
||||
entry.messages = [...entry.messages]
|
||||
.sort((a, b) => b.received_at - a.received_at)
|
||||
.slice(0, MAX_MESSAGES_PER_ENTRY);
|
||||
}
|
||||
this.cache.delete(id);
|
||||
this.cache.set(id, entry);
|
||||
return true;
|
||||
}
|
||||
|
||||
updateAck(messageId: number, ackCount: number, paths?: MessagePath[]): void {
|
||||
for (const entry of this.cache.values()) {
|
||||
const index = entry.messages.findIndex((message) => message.id === messageId);
|
||||
if (index < 0) continue;
|
||||
const current = entry.messages[index];
|
||||
const updated = [...entry.messages];
|
||||
updated[index] = {
|
||||
...current,
|
||||
acked: Math.max(current.acked, ackCount),
|
||||
...(paths !== undefined && paths.length >= (current.paths?.length ?? 0) && { paths }),
|
||||
};
|
||||
entry.messages = updated;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
remove(id: string): void {
|
||||
this.cache.delete(id);
|
||||
}
|
||||
|
||||
rename(oldId: string, newId: string): void {
|
||||
if (oldId === newId) return;
|
||||
const oldEntry = this.cache.get(oldId);
|
||||
if (!oldEntry) return;
|
||||
|
||||
const newEntry = this.cache.get(newId);
|
||||
if (!newEntry) {
|
||||
this.cache.delete(oldId);
|
||||
this.cache.set(newId, oldEntry);
|
||||
return;
|
||||
}
|
||||
|
||||
const mergedMessages = [...newEntry.messages];
|
||||
const seenIds = new Set(mergedMessages.map((message) => message.id));
|
||||
for (const message of oldEntry.messages) {
|
||||
if (!seenIds.has(message.id)) {
|
||||
mergedMessages.push(message);
|
||||
seenIds.add(message.id);
|
||||
}
|
||||
}
|
||||
|
||||
this.cache.delete(oldId);
|
||||
this.cache.set(newId, {
|
||||
messages: mergedMessages,
|
||||
hasOlderMessages: newEntry.hasOlderMessages || oldEntry.hasOlderMessages,
|
||||
contentKeys: new Set([...newEntry.contentKeys, ...oldEntry.contentKeys]),
|
||||
});
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export function reconcileConversationMessages(
|
||||
current: Message[],
|
||||
fetched: Message[]
|
||||
): Message[] | null {
|
||||
const currentById = new Map<number, { acked: number; pathsLen: number; text: string }>();
|
||||
for (const message of current) {
|
||||
currentById.set(message.id, {
|
||||
acked: message.acked,
|
||||
pathsLen: message.paths?.length ?? 0,
|
||||
text: message.text,
|
||||
});
|
||||
}
|
||||
|
||||
let needsUpdate = false;
|
||||
for (const message of fetched) {
|
||||
const currentMessage = currentById.get(message.id);
|
||||
if (
|
||||
!currentMessage ||
|
||||
currentMessage.acked !== message.acked ||
|
||||
currentMessage.pathsLen !== (message.paths?.length ?? 0) ||
|
||||
currentMessage.text !== message.text
|
||||
) {
|
||||
needsUpdate = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!needsUpdate) return null;
|
||||
|
||||
const fetchedIds = new Set(fetched.map((message) => message.id));
|
||||
const olderMessages = current.filter((message) => !fetchedIds.has(message.id));
|
||||
return [...fetched, ...olderMessages];
|
||||
}
|
||||
|
||||
export const conversationMessageCache = new ConversationMessageCache();
|
||||
|
||||
interface PendingAckUpdate {
|
||||
ackCount: number;
|
||||
@@ -56,15 +211,6 @@ export function mergePendingAck(
|
||||
return existing;
|
||||
}
|
||||
|
||||
// Generate a key for deduplicating messages by content
|
||||
export function getMessageContentKey(msg: Message): string {
|
||||
// When sender_timestamp exists, dedup by content (catches radio-path duplicates with different IDs).
|
||||
// When null, include msg.id so each message gets a unique key — avoids silently dropping
|
||||
// different messages that share the same text and received_at second.
|
||||
const ts = msg.sender_timestamp ?? `r${msg.received_at}-${msg.id}`;
|
||||
return `${msg.type}-${msg.conversation_key}-${msg.text}-${ts}`;
|
||||
}
|
||||
|
||||
interface UseConversationMessagesResult {
|
||||
messages: Message[];
|
||||
messagesLoading: boolean;
|
||||
@@ -72,21 +218,36 @@ interface UseConversationMessagesResult {
|
||||
hasOlderMessages: boolean;
|
||||
hasNewerMessages: boolean;
|
||||
loadingNewer: boolean;
|
||||
hasNewerMessagesRef: MutableRefObject<boolean>;
|
||||
setMessages: Dispatch<SetStateAction<Message[]>>;
|
||||
fetchOlderMessages: () => Promise<void>;
|
||||
fetchNewerMessages: () => Promise<void>;
|
||||
jumpToBottom: () => void;
|
||||
reloadCurrentConversation: () => void;
|
||||
addMessageIfNew: (msg: Message) => boolean;
|
||||
updateMessageAck: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
|
||||
triggerReconcile: () => void;
|
||||
observeMessage: (msg: Message) => { added: boolean; activeConversation: boolean };
|
||||
receiveMessageAck: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
|
||||
reconcileOnReconnect: () => void;
|
||||
renameConversationMessages: (oldId: string, newId: string) => void;
|
||||
removeConversationMessages: (conversationId: string) => void;
|
||||
clearConversationMessages: () => void;
|
||||
}
|
||||
|
||||
function isMessageConversation(conversation: Conversation | null): conversation is Conversation {
|
||||
return !!conversation && !['raw', 'map', 'visualizer', 'search'].includes(conversation.type);
|
||||
}
|
||||
|
||||
function isActiveConversationMessage(
|
||||
activeConversation: Conversation | null,
|
||||
msg: Message
|
||||
): boolean {
|
||||
if (!activeConversation) return false;
|
||||
if (msg.type === 'CHAN' && activeConversation.type === 'channel') {
|
||||
return msg.conversation_key === activeConversation.id;
|
||||
}
|
||||
if (msg.type === 'PRIV' && activeConversation.type === 'contact') {
|
||||
return msg.conversation_key === activeConversation.id;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function appendUniqueMessages(current: Message[], incoming: Message[]): Message[] {
|
||||
if (incoming.length === 0) return current;
|
||||
|
||||
@@ -161,10 +322,14 @@ export function useConversationMessages(
|
||||
const [loadingNewer, setLoadingNewer] = useState(false);
|
||||
|
||||
const abortControllerRef = useRef<AbortController | null>(null);
|
||||
const olderAbortControllerRef = useRef<AbortController | null>(null);
|
||||
const newerAbortControllerRef = useRef<AbortController | null>(null);
|
||||
const fetchingConversationIdRef = useRef<string | null>(null);
|
||||
const latestReconcileRequestIdRef = useRef(0);
|
||||
const pendingReconnectReconcileRef = useRef(false);
|
||||
const messagesRef = useRef<Message[]>([]);
|
||||
const loadingOlderRef = useRef(false);
|
||||
const loadingNewerRef = useRef(false);
|
||||
const hasOlderMessagesRef = useRef(false);
|
||||
const hasNewerMessagesRef = useRef(false);
|
||||
const prevConversationIdRef = useRef<string | null>(null);
|
||||
@@ -179,6 +344,10 @@ export function useConversationMessages(
|
||||
loadingOlderRef.current = loadingOlder;
|
||||
}, [loadingOlder]);
|
||||
|
||||
useEffect(() => {
|
||||
loadingNewerRef.current = loadingNewer;
|
||||
}, [loadingNewer]);
|
||||
|
||||
useEffect(() => {
|
||||
hasOlderMessagesRef.current = hasOlderMessages;
|
||||
}, [hasOlderMessages]);
|
||||
@@ -206,6 +375,7 @@ export function useConversationMessages(
|
||||
}
|
||||
|
||||
const conversationId = activeConversation.id;
|
||||
pendingReconnectReconcileRef.current = false;
|
||||
|
||||
if (showLoading) {
|
||||
setMessagesLoading(true);
|
||||
@@ -227,7 +397,7 @@ export function useConversationMessages(
|
||||
}
|
||||
|
||||
const messagesWithPendingAck = data.map((msg) => applyPendingAck(msg));
|
||||
const merged = messageCache.reconcile(messagesRef.current, messagesWithPendingAck);
|
||||
const merged = reconcileConversationMessages(messagesRef.current, messagesWithPendingAck);
|
||||
const nextMessages = merged ?? messagesRef.current;
|
||||
if (merged) {
|
||||
setMessages(merged);
|
||||
@@ -269,7 +439,7 @@ export function useConversationMessages(
|
||||
|
||||
const dataWithPendingAck = data.map((msg) => applyPendingAck(msg));
|
||||
setHasOlderMessages(dataWithPendingAck.length >= MESSAGE_PAGE_SIZE);
|
||||
const merged = messageCache.reconcile(messagesRef.current, dataWithPendingAck);
|
||||
const merged = reconcileConversationMessages(messagesRef.current, dataWithPendingAck);
|
||||
if (!merged) return;
|
||||
|
||||
setMessages(merged);
|
||||
@@ -293,7 +463,7 @@ export function useConversationMessages(
|
||||
}
|
||||
|
||||
const conversationId = activeConversation.id;
|
||||
const oldestMessage = messages.reduce(
|
||||
const oldestMessage = messagesRef.current.reduce(
|
||||
(oldest, msg) => {
|
||||
if (!oldest) return msg;
|
||||
if (msg.received_at < oldest.received_at) return msg;
|
||||
@@ -306,14 +476,19 @@ export function useConversationMessages(
|
||||
|
||||
loadingOlderRef.current = true;
|
||||
setLoadingOlder(true);
|
||||
const controller = new AbortController();
|
||||
olderAbortControllerRef.current = controller;
|
||||
try {
|
||||
const data = await api.getMessages({
|
||||
type: activeConversation.type === 'channel' ? 'CHAN' : 'PRIV',
|
||||
conversation_key: conversationId,
|
||||
limit: MESSAGE_PAGE_SIZE,
|
||||
before: oldestMessage.received_at,
|
||||
before_id: oldestMessage.id,
|
||||
});
|
||||
const data = await api.getMessages(
|
||||
{
|
||||
type: activeConversation.type === 'channel' ? 'CHAN' : 'PRIV',
|
||||
conversation_key: conversationId,
|
||||
limit: MESSAGE_PAGE_SIZE,
|
||||
before: oldestMessage.received_at,
|
||||
before_id: oldestMessage.id,
|
||||
},
|
||||
controller.signal
|
||||
);
|
||||
|
||||
if (fetchingConversationIdRef.current !== conversationId) return;
|
||||
|
||||
@@ -335,21 +510,33 @@ export function useConversationMessages(
|
||||
}
|
||||
setHasOlderMessages(dataWithPendingAck.length >= MESSAGE_PAGE_SIZE);
|
||||
} catch (err) {
|
||||
if (isAbortError(err)) {
|
||||
return;
|
||||
}
|
||||
console.error('Failed to fetch older messages:', err);
|
||||
toast.error('Failed to load older messages', {
|
||||
description: err instanceof Error ? err.message : 'Check your connection',
|
||||
});
|
||||
} finally {
|
||||
if (olderAbortControllerRef.current === controller) {
|
||||
olderAbortControllerRef.current = null;
|
||||
}
|
||||
loadingOlderRef.current = false;
|
||||
setLoadingOlder(false);
|
||||
}
|
||||
}, [activeConversation, applyPendingAck, messages, syncSeenContent]);
|
||||
}, [activeConversation, applyPendingAck, syncSeenContent]);
|
||||
|
||||
const fetchNewerMessages = useCallback(async () => {
|
||||
if (!isMessageConversation(activeConversation) || loadingNewer || !hasNewerMessages) return;
|
||||
if (
|
||||
!isMessageConversation(activeConversation) ||
|
||||
loadingNewerRef.current ||
|
||||
!hasNewerMessagesRef.current
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const conversationId = activeConversation.id;
|
||||
const newestMessage = messages.reduce(
|
||||
const newestMessage = messagesRef.current.reduce(
|
||||
(newest, msg) => {
|
||||
if (!newest) return msg;
|
||||
if (msg.received_at > newest.received_at) return msg;
|
||||
@@ -360,15 +547,21 @@ export function useConversationMessages(
|
||||
);
|
||||
if (!newestMessage) return;
|
||||
|
||||
loadingNewerRef.current = true;
|
||||
setLoadingNewer(true);
|
||||
const controller = new AbortController();
|
||||
newerAbortControllerRef.current = controller;
|
||||
try {
|
||||
const data = await api.getMessages({
|
||||
type: activeConversation.type === 'channel' ? 'CHAN' : 'PRIV',
|
||||
conversation_key: conversationId,
|
||||
limit: MESSAGE_PAGE_SIZE,
|
||||
after: newestMessage.received_at,
|
||||
after_id: newestMessage.id,
|
||||
});
|
||||
const data = await api.getMessages(
|
||||
{
|
||||
type: activeConversation.type === 'channel' ? 'CHAN' : 'PRIV',
|
||||
conversation_key: conversationId,
|
||||
limit: MESSAGE_PAGE_SIZE,
|
||||
after: newestMessage.received_at,
|
||||
after_id: newestMessage.id,
|
||||
},
|
||||
controller.signal
|
||||
);
|
||||
|
||||
if (fetchingConversationIdRef.current !== conversationId) return;
|
||||
|
||||
@@ -383,33 +576,57 @@ export function useConversationMessages(
|
||||
seenMessageContent.current.add(getMessageContentKey(msg));
|
||||
}
|
||||
}
|
||||
setHasNewerMessages(dataWithPendingAck.length >= MESSAGE_PAGE_SIZE);
|
||||
const stillHasNewerMessages = dataWithPendingAck.length >= MESSAGE_PAGE_SIZE;
|
||||
setHasNewerMessages(stillHasNewerMessages);
|
||||
if (!stillHasNewerMessages && pendingReconnectReconcileRef.current) {
|
||||
pendingReconnectReconcileRef.current = false;
|
||||
const requestId = latestReconcileRequestIdRef.current + 1;
|
||||
latestReconcileRequestIdRef.current = requestId;
|
||||
const reconcileController = new AbortController();
|
||||
reconcileFromBackend(activeConversation, reconcileController.signal, requestId);
|
||||
}
|
||||
} catch (err) {
|
||||
if (isAbortError(err)) {
|
||||
return;
|
||||
}
|
||||
console.error('Failed to fetch newer messages:', err);
|
||||
toast.error('Failed to load newer messages', {
|
||||
description: err instanceof Error ? err.message : 'Check your connection',
|
||||
});
|
||||
} finally {
|
||||
if (newerAbortControllerRef.current === controller) {
|
||||
newerAbortControllerRef.current = null;
|
||||
}
|
||||
loadingNewerRef.current = false;
|
||||
setLoadingNewer(false);
|
||||
}
|
||||
}, [activeConversation, applyPendingAck, hasNewerMessages, loadingNewer, messages]);
|
||||
}, [activeConversation, applyPendingAck, reconcileFromBackend]);
|
||||
|
||||
const jumpToBottom = useCallback(() => {
|
||||
if (!activeConversation) return;
|
||||
setHasNewerMessages(false);
|
||||
messageCache.remove(activeConversation.id);
|
||||
conversationMessageCache.remove(activeConversation.id);
|
||||
void fetchLatestMessages(true);
|
||||
}, [activeConversation, fetchLatestMessages]);
|
||||
|
||||
const reloadCurrentConversation = useCallback(() => {
|
||||
if (!isMessageConversation(activeConversation)) return;
|
||||
setHasNewerMessages(false);
|
||||
messageCache.remove(activeConversation.id);
|
||||
conversationMessageCache.remove(activeConversation.id);
|
||||
setReloadVersion((current) => current + 1);
|
||||
}, [activeConversation]);
|
||||
|
||||
const triggerReconcile = useCallback(() => {
|
||||
if (!isMessageConversation(activeConversation)) return;
|
||||
const reconcileOnReconnect = useCallback(() => {
|
||||
if (!isMessageConversation(activeConversation)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasNewerMessagesRef.current) {
|
||||
pendingReconnectReconcileRef.current = true;
|
||||
return;
|
||||
}
|
||||
|
||||
pendingReconnectReconcileRef.current = false;
|
||||
const controller = new AbortController();
|
||||
const requestId = latestReconcileRequestIdRef.current + 1;
|
||||
latestReconcileRequestIdRef.current = requestId;
|
||||
@@ -420,6 +637,14 @@ export function useConversationMessages(
|
||||
if (abortControllerRef.current) {
|
||||
abortControllerRef.current.abort();
|
||||
}
|
||||
if (olderAbortControllerRef.current) {
|
||||
olderAbortControllerRef.current.abort();
|
||||
olderAbortControllerRef.current = null;
|
||||
}
|
||||
if (newerAbortControllerRef.current) {
|
||||
newerAbortControllerRef.current.abort();
|
||||
newerAbortControllerRef.current = null;
|
||||
}
|
||||
|
||||
const prevId = prevConversationIdRef.current;
|
||||
const newId = activeConversation?.id ?? null;
|
||||
@@ -429,6 +654,7 @@ export function useConversationMessages(
|
||||
prevConversationIdRef.current = newId;
|
||||
prevReloadVersionRef.current = reloadVersion;
|
||||
latestReconcileRequestIdRef.current = 0;
|
||||
pendingReconnectReconcileRef.current = false;
|
||||
|
||||
// Preserve around-loaded context on the same conversation when search clears targetMessageId.
|
||||
if (!conversationChanged && !targetMessageId && !reloadRequested) {
|
||||
@@ -448,9 +674,8 @@ export function useConversationMessages(
|
||||
messagesRef.current.length > 0 &&
|
||||
!hasNewerMessagesRef.current
|
||||
) {
|
||||
messageCache.set(prevId, {
|
||||
conversationMessageCache.set(prevId, {
|
||||
messages: messagesRef.current,
|
||||
seenContent: new Set(seenMessageContent.current),
|
||||
hasOlderMessages: hasOlderMessagesRef.current,
|
||||
});
|
||||
}
|
||||
@@ -492,10 +717,12 @@ export function useConversationMessages(
|
||||
setMessagesLoading(false);
|
||||
});
|
||||
} else {
|
||||
const cached = messageCache.get(activeConversation.id);
|
||||
const cached = conversationMessageCache.get(activeConversation.id);
|
||||
if (cached) {
|
||||
setMessages(cached.messages);
|
||||
seenMessageContent.current = new Set(cached.seenContent);
|
||||
seenMessageContent.current = new Set(
|
||||
cached.messages.map((message) => getMessageContentKey(message))
|
||||
);
|
||||
setHasOlderMessages(cached.hasOlderMessages);
|
||||
setMessagesLoading(false);
|
||||
const requestId = latestReconcileRequestIdRef.current + 1;
|
||||
@@ -512,9 +739,8 @@ export function useConversationMessages(
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [activeConversation?.id, activeConversation?.type, targetMessageId, reloadVersion]);
|
||||
|
||||
// Add a message if it's new (deduplication)
|
||||
// Returns true if the message was added, false if it was a duplicate
|
||||
const addMessageIfNew = useCallback(
|
||||
// Add a message to the active conversation if it is new.
|
||||
const appendActiveMessageIfNew = useCallback(
|
||||
(msg: Message): boolean => {
|
||||
const msgWithPendingAck = applyPendingAck(msg);
|
||||
const contentKey = getMessageContentKey(msgWithPendingAck);
|
||||
@@ -584,6 +810,56 @@ export function useConversationMessages(
|
||||
[messagesRef, setMessages, setPendingAck]
|
||||
);
|
||||
|
||||
const receiveMessageAck = useCallback(
|
||||
(messageId: number, ackCount: number, paths?: MessagePath[]) => {
|
||||
updateMessageAck(messageId, ackCount, paths);
|
||||
conversationMessageCache.updateAck(messageId, ackCount, paths);
|
||||
},
|
||||
[updateMessageAck]
|
||||
);
|
||||
|
||||
const observeMessage = useCallback(
|
||||
(msg: Message): { added: boolean; activeConversation: boolean } => {
|
||||
const msgWithPendingAck = applyPendingAck(msg);
|
||||
const activeConversationMessage = isActiveConversationMessage(
|
||||
activeConversation,
|
||||
msgWithPendingAck
|
||||
);
|
||||
|
||||
if (activeConversationMessage) {
|
||||
if (hasNewerMessagesRef.current) {
|
||||
return { added: false, activeConversation: true };
|
||||
}
|
||||
|
||||
return {
|
||||
added: appendActiveMessageIfNew(msgWithPendingAck),
|
||||
activeConversation: true,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
added: conversationMessageCache.addMessage(
|
||||
msgWithPendingAck.conversation_key,
|
||||
msgWithPendingAck
|
||||
),
|
||||
activeConversation: false,
|
||||
};
|
||||
},
|
||||
[activeConversation, appendActiveMessageIfNew, applyPendingAck, hasNewerMessagesRef]
|
||||
);
|
||||
|
||||
const renameConversationMessages = useCallback((oldId: string, newId: string) => {
|
||||
conversationMessageCache.rename(oldId, newId);
|
||||
}, []);
|
||||
|
||||
const removeConversationMessages = useCallback((conversationId: string) => {
|
||||
conversationMessageCache.remove(conversationId);
|
||||
}, []);
|
||||
|
||||
const clearConversationMessages = useCallback(() => {
|
||||
conversationMessageCache.clear();
|
||||
}, []);
|
||||
|
||||
return {
|
||||
messages,
|
||||
messagesLoading,
|
||||
@@ -591,14 +867,15 @@ export function useConversationMessages(
|
||||
hasOlderMessages,
|
||||
hasNewerMessages,
|
||||
loadingNewer,
|
||||
hasNewerMessagesRef,
|
||||
setMessages,
|
||||
fetchOlderMessages,
|
||||
fetchNewerMessages,
|
||||
jumpToBottom,
|
||||
reloadCurrentConversation,
|
||||
addMessageIfNew,
|
||||
updateMessageAck,
|
||||
triggerReconcile,
|
||||
observeMessage,
|
||||
receiveMessageAck,
|
||||
reconcileOnReconnect,
|
||||
renameConversationMessages,
|
||||
removeConversationMessages,
|
||||
clearConversationMessages,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useState, useCallback, useEffect, useRef, type MutableRefObject } from 'react';
|
||||
import {
|
||||
parseHashConversation,
|
||||
parseHashSettingsSection,
|
||||
updateUrlHash,
|
||||
resolveChannelFromHashToken,
|
||||
resolveContactFromHashToken,
|
||||
@@ -10,15 +11,15 @@ import {
|
||||
getReopenLastConversationEnabled,
|
||||
saveLastViewedConversation,
|
||||
} from '../utils/lastViewedConversation';
|
||||
import { findPublicChannel } from '../utils/publicChannel';
|
||||
import { getContactDisplayName } from '../utils/pubkey';
|
||||
import type { Channel, Contact, Conversation } from '../types';
|
||||
|
||||
const PUBLIC_CHANNEL_KEY = '8B3387E9C5CDEA6AC9E5EDBAA115CD72';
|
||||
|
||||
interface UseConversationRouterArgs {
|
||||
channels: Channel[];
|
||||
contacts: Contact[];
|
||||
contactsLoaded: boolean;
|
||||
suspendHashSync: boolean;
|
||||
setSidebarOpen: (open: boolean) => void;
|
||||
pendingDeleteFallbackRef: MutableRefObject<boolean>;
|
||||
hasSetDefaultConversation: MutableRefObject<boolean>;
|
||||
@@ -28,6 +29,7 @@ export function useConversationRouter({
|
||||
channels,
|
||||
contacts,
|
||||
contactsLoaded,
|
||||
suspendHashSync,
|
||||
setSidebarOpen,
|
||||
pendingDeleteFallbackRef,
|
||||
hasSetDefaultConversation,
|
||||
@@ -35,7 +37,9 @@ export function useConversationRouter({
|
||||
const [activeConversation, setActiveConversationState] = useState<Conversation | null>(null);
|
||||
const activeConversationRef = useRef<Conversation | null>(null);
|
||||
const hashSyncEnabledRef = useRef(
|
||||
typeof window !== 'undefined' ? window.location.hash.length > 0 : false
|
||||
typeof window !== 'undefined'
|
||||
? window.location.hash.length > 0 && parseHashSettingsSection() === null
|
||||
: false
|
||||
);
|
||||
|
||||
const setActiveConversation = useCallback((conv: Conversation | null) => {
|
||||
@@ -44,7 +48,7 @@ export function useConversationRouter({
|
||||
}, []);
|
||||
|
||||
const getPublicChannelConversation = useCallback((): Conversation | null => {
|
||||
const publicChannel = channels.find((c) => c.name === 'Public');
|
||||
const publicChannel = findPublicChannel(channels);
|
||||
if (!publicChannel) return null;
|
||||
return {
|
||||
type: 'channel',
|
||||
@@ -59,7 +63,7 @@ export function useConversationRouter({
|
||||
if (hasSetDefaultConversation.current || activeConversation) return;
|
||||
if (channels.length === 0) return;
|
||||
|
||||
const hashConv = parseHashConversation();
|
||||
const hashConv = parseHashSettingsSection() ? null : parseHashConversation();
|
||||
|
||||
// Handle non-data views immediately
|
||||
if (hashConv?.type === 'raw') {
|
||||
@@ -142,7 +146,7 @@ export function useConversationRouter({
|
||||
useEffect(() => {
|
||||
if (hasSetDefaultConversation.current || activeConversation) return;
|
||||
|
||||
const hashConv = parseHashConversation();
|
||||
const hashConv = parseHashSettingsSection() ? null : parseHashConversation();
|
||||
if (hashConv?.type === 'contact') {
|
||||
if (!contactsLoaded) return;
|
||||
|
||||
@@ -204,14 +208,14 @@ export function useConversationRouter({
|
||||
useEffect(() => {
|
||||
activeConversationRef.current = activeConversation;
|
||||
if (activeConversation) {
|
||||
if (hashSyncEnabledRef.current) {
|
||||
if (hashSyncEnabledRef.current && !suspendHashSync) {
|
||||
updateUrlHash(activeConversation);
|
||||
}
|
||||
if (getReopenLastConversationEnabled() && activeConversation.type !== 'search') {
|
||||
if (activeConversation.type !== 'search') {
|
||||
saveLastViewedConversation(activeConversation);
|
||||
}
|
||||
}
|
||||
}, [activeConversation]);
|
||||
}, [activeConversation, suspendHashSync]);
|
||||
|
||||
// If a delete action left us without an active conversation, recover to Public
|
||||
useEffect(() => {
|
||||
@@ -221,9 +225,7 @@ export function useConversationRouter({
|
||||
return;
|
||||
}
|
||||
|
||||
const publicChannel =
|
||||
channels.find((c) => c.key === PUBLIC_CHANNEL_KEY) ||
|
||||
channels.find((c) => c.name === 'Public');
|
||||
const publicChannel = findPublicChannel(channels);
|
||||
if (!publicChannel) return;
|
||||
|
||||
hasSetDefaultConversation.current = true;
|
||||
|
||||
@@ -6,14 +6,12 @@ import {
|
||||
type SetStateAction,
|
||||
} from 'react';
|
||||
import { api } from '../api';
|
||||
import * as messageCache from '../messageCache';
|
||||
import type { UseWebSocketOptions } from '../useWebSocket';
|
||||
import { toast } from '../components/ui/sonner';
|
||||
import { getStateKey } from '../utils/conversationState';
|
||||
import { mergeContactIntoList } from '../utils/contactMerge';
|
||||
import { getContactDisplayName } from '../utils/pubkey';
|
||||
import { appendRawPacketUnique } from '../utils/rawPacketIdentity';
|
||||
import { getMessageContentKey } from './useConversationMessages';
|
||||
import type {
|
||||
Channel,
|
||||
Contact,
|
||||
@@ -29,7 +27,7 @@ interface UseRealtimeAppStateArgs {
|
||||
setHealth: Dispatch<SetStateAction<HealthStatus | null>>;
|
||||
fetchConfig: () => void | Promise<void>;
|
||||
setRawPackets: Dispatch<SetStateAction<RawPacket[]>>;
|
||||
triggerReconcile: () => void;
|
||||
reconcileOnReconnect: () => void;
|
||||
refreshUnreads: () => Promise<void>;
|
||||
setChannels: Dispatch<SetStateAction<Channel[]>>;
|
||||
fetchAllContacts: () => Promise<Contact[]>;
|
||||
@@ -37,15 +35,20 @@ interface UseRealtimeAppStateArgs {
|
||||
blockedKeysRef: MutableRefObject<string[]>;
|
||||
blockedNamesRef: MutableRefObject<string[]>;
|
||||
activeConversationRef: MutableRefObject<Conversation | null>;
|
||||
hasNewerMessagesRef: MutableRefObject<boolean>;
|
||||
addMessageIfNew: (msg: Message) => boolean;
|
||||
trackNewMessage: (msg: Message) => void;
|
||||
incrementUnread: (stateKey: string, hasMention?: boolean) => void;
|
||||
observeMessage: (msg: Message) => { added: boolean; activeConversation: boolean };
|
||||
recordMessageEvent: (args: {
|
||||
msg: Message;
|
||||
activeConversation: boolean;
|
||||
isNewMessage: boolean;
|
||||
hasMention?: boolean;
|
||||
}) => void;
|
||||
renameConversationState: (oldStateKey: string, newStateKey: string) => void;
|
||||
checkMention: (text: string) => boolean;
|
||||
pendingDeleteFallbackRef: MutableRefObject<boolean>;
|
||||
setActiveConversation: (conv: Conversation | null) => void;
|
||||
updateMessageAck: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
|
||||
renameConversationMessages: (oldId: string, newId: string) => void;
|
||||
removeConversationMessages: (conversationId: string) => void;
|
||||
receiveMessageAck: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
|
||||
notifyIncomingMessage?: (msg: Message) => void;
|
||||
maxRawPackets?: number;
|
||||
}
|
||||
@@ -71,26 +74,12 @@ function isMessageBlocked(msg: Message, blockedKeys: string[], blockedNames: str
|
||||
return blockedNames.length > 0 && !!msg.sender_name && blockedNames.includes(msg.sender_name);
|
||||
}
|
||||
|
||||
function isActiveConversationMessage(
|
||||
activeConversation: Conversation | null,
|
||||
msg: Message
|
||||
): boolean {
|
||||
if (!activeConversation) return false;
|
||||
if (msg.type === 'CHAN' && activeConversation.type === 'channel') {
|
||||
return msg.conversation_key === activeConversation.id;
|
||||
}
|
||||
if (msg.type === 'PRIV' && activeConversation.type === 'contact') {
|
||||
return msg.conversation_key === activeConversation.id;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function useRealtimeAppState({
|
||||
prevHealthRef,
|
||||
setHealth,
|
||||
fetchConfig,
|
||||
setRawPackets,
|
||||
triggerReconcile,
|
||||
reconcileOnReconnect,
|
||||
refreshUnreads,
|
||||
setChannels,
|
||||
fetchAllContacts,
|
||||
@@ -98,15 +87,15 @@ export function useRealtimeAppState({
|
||||
blockedKeysRef,
|
||||
blockedNamesRef,
|
||||
activeConversationRef,
|
||||
hasNewerMessagesRef,
|
||||
addMessageIfNew,
|
||||
trackNewMessage,
|
||||
incrementUnread,
|
||||
observeMessage,
|
||||
recordMessageEvent,
|
||||
renameConversationState,
|
||||
checkMention,
|
||||
pendingDeleteFallbackRef,
|
||||
setActiveConversation,
|
||||
updateMessageAck,
|
||||
renameConversationMessages,
|
||||
removeConversationMessages,
|
||||
receiveMessageAck,
|
||||
notifyIncomingMessage,
|
||||
maxRawPackets = 500,
|
||||
}: UseRealtimeAppStateArgs): UseWebSocketOptions {
|
||||
@@ -180,7 +169,7 @@ export function useRealtimeAppState({
|
||||
},
|
||||
onReconnect: () => {
|
||||
setRawPackets([]);
|
||||
triggerReconcile();
|
||||
reconcileOnReconnect();
|
||||
refreshUnreads();
|
||||
api.getChannels().then(setChannels).catch(console.error);
|
||||
fetchAllContacts()
|
||||
@@ -192,34 +181,14 @@ export function useRealtimeAppState({
|
||||
return;
|
||||
}
|
||||
|
||||
const isForActiveConversation = isActiveConversationMessage(
|
||||
activeConversationRef.current,
|
||||
msg
|
||||
);
|
||||
let isNewMessage = false;
|
||||
|
||||
if (isForActiveConversation && !hasNewerMessagesRef.current) {
|
||||
isNewMessage = addMessageIfNew(msg);
|
||||
}
|
||||
|
||||
trackNewMessage(msg);
|
||||
|
||||
const contentKey = getMessageContentKey(msg);
|
||||
if (!isForActiveConversation) {
|
||||
isNewMessage = messageCache.addMessage(msg.conversation_key, msg, contentKey);
|
||||
|
||||
if (!msg.outgoing && isNewMessage) {
|
||||
let stateKey: string | null = null;
|
||||
if (msg.type === 'CHAN' && msg.conversation_key) {
|
||||
stateKey = getStateKey('channel', msg.conversation_key);
|
||||
} else if (msg.type === 'PRIV' && msg.conversation_key) {
|
||||
stateKey = getStateKey('contact', msg.conversation_key);
|
||||
}
|
||||
if (stateKey) {
|
||||
incrementUnread(stateKey, checkMention(msg.text));
|
||||
}
|
||||
}
|
||||
}
|
||||
const { added: isNewMessage, activeConversation: isForActiveConversation } =
|
||||
observeMessage(msg);
|
||||
recordMessageEvent({
|
||||
msg,
|
||||
activeConversation: isForActiveConversation,
|
||||
isNewMessage,
|
||||
hasMention: checkMention(msg.text),
|
||||
});
|
||||
|
||||
if (!msg.outgoing && isNewMessage) {
|
||||
notifyIncomingMessage?.(msg);
|
||||
@@ -235,7 +204,7 @@ export function useRealtimeAppState({
|
||||
contact
|
||||
)
|
||||
);
|
||||
messageCache.rename(previousPublicKey, contact.public_key);
|
||||
renameConversationMessages(previousPublicKey, contact.public_key);
|
||||
renameConversationState(
|
||||
getStateKey('contact', previousPublicKey),
|
||||
getStateKey('contact', contact.public_key)
|
||||
@@ -255,7 +224,7 @@ export function useRealtimeAppState({
|
||||
},
|
||||
onContactDeleted: (publicKey: string) => {
|
||||
setContacts((prev) => prev.filter((c) => c.public_key !== publicKey));
|
||||
messageCache.remove(publicKey);
|
||||
removeConversationMessages(publicKey);
|
||||
const active = activeConversationRef.current;
|
||||
if (active?.type === 'contact' && active.id === publicKey) {
|
||||
pendingDeleteFallbackRef.current = true;
|
||||
@@ -264,7 +233,7 @@ export function useRealtimeAppState({
|
||||
},
|
||||
onChannelDeleted: (key: string) => {
|
||||
setChannels((prev) => prev.filter((c) => c.key !== key));
|
||||
messageCache.remove(key);
|
||||
removeConversationMessages(key);
|
||||
const active = activeConversationRef.current;
|
||||
if (active?.type === 'channel' && active.id === key) {
|
||||
pendingDeleteFallbackRef.current = true;
|
||||
@@ -275,34 +244,33 @@ export function useRealtimeAppState({
|
||||
setRawPackets((prev) => appendRawPacketUnique(prev, packet, maxRawPackets));
|
||||
},
|
||||
onMessageAcked: (messageId: number, ackCount: number, paths?: MessagePath[]) => {
|
||||
updateMessageAck(messageId, ackCount, paths);
|
||||
messageCache.updateAck(messageId, ackCount, paths);
|
||||
receiveMessageAck(messageId, ackCount, paths);
|
||||
},
|
||||
}),
|
||||
[
|
||||
activeConversationRef,
|
||||
addMessageIfNew,
|
||||
blockedKeysRef,
|
||||
blockedNamesRef,
|
||||
checkMention,
|
||||
fetchAllContacts,
|
||||
fetchConfig,
|
||||
hasNewerMessagesRef,
|
||||
incrementUnread,
|
||||
renameConversationState,
|
||||
renameConversationMessages,
|
||||
maxRawPackets,
|
||||
mergeChannelIntoList,
|
||||
pendingDeleteFallbackRef,
|
||||
prevHealthRef,
|
||||
recordMessageEvent,
|
||||
receiveMessageAck,
|
||||
observeMessage,
|
||||
refreshUnreads,
|
||||
reconcileOnReconnect,
|
||||
removeConversationMessages,
|
||||
setActiveConversation,
|
||||
setChannels,
|
||||
setContacts,
|
||||
setHealth,
|
||||
setRawPackets,
|
||||
trackNewMessage,
|
||||
triggerReconcile,
|
||||
updateMessageAck,
|
||||
notifyIncomingMessage,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -76,6 +76,17 @@ function createInitialPaneData(): PaneData {
|
||||
|
||||
const repeaterDashboardCache = new Map<string, RepeaterDashboardCacheEntry>();
|
||||
|
||||
function getLoginToastTitle(status: string): string {
|
||||
switch (status) {
|
||||
case 'timeout':
|
||||
return 'Login confirmation not heard';
|
||||
case 'error':
|
||||
return 'Login not confirmed';
|
||||
default:
|
||||
return 'Repeater login not confirmed';
|
||||
}
|
||||
}
|
||||
|
||||
function clonePaneData(data: PaneData): PaneData {
|
||||
return { ...data };
|
||||
}
|
||||
@@ -177,8 +188,13 @@ export interface UseRepeaterDashboardResult {
|
||||
syncClock: () => Promise<void>;
|
||||
}
|
||||
|
||||
interface UseRepeaterDashboardOptions {
|
||||
hasAdvertLocation?: boolean;
|
||||
}
|
||||
|
||||
export function useRepeaterDashboard(
|
||||
activeConversation: Conversation | null
|
||||
activeConversation: Conversation | null,
|
||||
options: UseRepeaterDashboardOptions = {}
|
||||
): UseRepeaterDashboardResult {
|
||||
const conversationId =
|
||||
activeConversation && activeConversation.type === 'contact' ? activeConversation.id : null;
|
||||
@@ -255,13 +271,22 @@ export function useRepeaterDashboard(
|
||||
setLoginLoading(true);
|
||||
setLoginError(null);
|
||||
try {
|
||||
await api.repeaterLogin(publicKey, password);
|
||||
const result = await api.repeaterLogin(publicKey, password);
|
||||
if (activeIdRef.current !== conversationId) return;
|
||||
setLoggedIn(true);
|
||||
if (!result.authenticated) {
|
||||
const msg = result.message ?? 'Repeater login was not confirmed';
|
||||
setLoginError(msg);
|
||||
toast.error(getLoginToastTitle(result.status), { description: msg });
|
||||
}
|
||||
} catch (err) {
|
||||
if (activeIdRef.current !== conversationId) return;
|
||||
const msg = err instanceof Error ? err.message : 'Login failed';
|
||||
setLoggedIn(true);
|
||||
setLoginError(msg);
|
||||
toast.error('Login request failed', {
|
||||
description: `${msg}. The dashboard is still available, but repeater operations may fail until a login succeeds.`,
|
||||
});
|
||||
} finally {
|
||||
if (activeIdRef.current === conversationId) {
|
||||
setLoginLoading(false);
|
||||
@@ -281,7 +306,7 @@ export function useRepeaterDashboard(
|
||||
if (!publicKey) return;
|
||||
const conversationId = publicKey;
|
||||
|
||||
if (pane === 'neighbors') {
|
||||
if (pane === 'neighbors' && !options.hasAdvertLocation) {
|
||||
const nodeInfoState = paneStatesRef.current.nodeInfo;
|
||||
const nodeInfoData = paneDataRef.current.nodeInfo;
|
||||
const needsNodeInfoPrefetch =
|
||||
@@ -365,7 +390,7 @@ export function useRepeaterDashboard(
|
||||
}
|
||||
}
|
||||
},
|
||||
[getPublicKey]
|
||||
[getPublicKey, options.hasAdvertLocation]
|
||||
);
|
||||
|
||||
const loadAll = useCallback(async () => {
|
||||
|
||||
@@ -16,10 +16,14 @@ interface UseUnreadCountsResult {
|
||||
mentions: Record<string, boolean>;
|
||||
lastMessageTimes: ConversationTimes;
|
||||
unreadLastReadAts: Record<string, number | null>;
|
||||
incrementUnread: (stateKey: string, hasMention?: boolean) => void;
|
||||
recordMessageEvent: (args: {
|
||||
msg: Message;
|
||||
activeConversation: boolean;
|
||||
isNewMessage: boolean;
|
||||
hasMention?: boolean;
|
||||
}) => void;
|
||||
renameConversationState: (oldStateKey: string, newStateKey: string) => void;
|
||||
markAllRead: () => void;
|
||||
trackNewMessage: (msg: Message) => void;
|
||||
refreshUnreads: () => Promise<void>;
|
||||
}
|
||||
|
||||
@@ -162,7 +166,6 @@ export function useUnreadCounts(
|
||||
}
|
||||
}, [activeConversation]);
|
||||
|
||||
// Increment unread count for a conversation
|
||||
const incrementUnread = useCallback((stateKey: string, hasMention?: boolean) => {
|
||||
setUnreadCounts((prev) => ({
|
||||
...prev,
|
||||
@@ -176,6 +179,40 @@ export function useUnreadCounts(
|
||||
}
|
||||
}, []);
|
||||
|
||||
const recordMessageEvent = useCallback(
|
||||
({
|
||||
msg,
|
||||
activeConversation: isActiveConversation,
|
||||
isNewMessage,
|
||||
hasMention,
|
||||
}: {
|
||||
msg: Message;
|
||||
activeConversation: boolean;
|
||||
isNewMessage: boolean;
|
||||
hasMention?: boolean;
|
||||
}) => {
|
||||
let stateKey: string | null = null;
|
||||
if (msg.type === 'CHAN' && msg.conversation_key) {
|
||||
stateKey = getStateKey('channel', msg.conversation_key);
|
||||
} else if (msg.type === 'PRIV' && msg.conversation_key) {
|
||||
stateKey = getStateKey('contact', msg.conversation_key);
|
||||
}
|
||||
|
||||
if (!stateKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
const timestamp = msg.received_at || Math.floor(Date.now() / 1000);
|
||||
const updated = setLastMessageTime(stateKey, timestamp);
|
||||
setLastMessageTimes(updated);
|
||||
|
||||
if (!isActiveConversation && !msg.outgoing && isNewMessage) {
|
||||
incrementUnread(stateKey, hasMention);
|
||||
}
|
||||
},
|
||||
[incrementUnread]
|
||||
);
|
||||
|
||||
const renameConversationState = useCallback((oldStateKey: string, newStateKey: string) => {
|
||||
if (oldStateKey === newStateKey) return;
|
||||
|
||||
@@ -212,31 +249,14 @@ export function useUnreadCounts(
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Track a new incoming message for unread counts
|
||||
const trackNewMessage = useCallback((msg: Message) => {
|
||||
let conversationKey: string | null = null;
|
||||
if (msg.type === 'CHAN' && msg.conversation_key) {
|
||||
conversationKey = getStateKey('channel', msg.conversation_key);
|
||||
} else if (msg.type === 'PRIV' && msg.conversation_key) {
|
||||
conversationKey = getStateKey('contact', msg.conversation_key);
|
||||
}
|
||||
|
||||
if (conversationKey) {
|
||||
const timestamp = msg.received_at || Math.floor(Date.now() / 1000);
|
||||
const updated = setLastMessageTime(conversationKey, timestamp);
|
||||
setLastMessageTimes(updated);
|
||||
}
|
||||
}, []);
|
||||
|
||||
return {
|
||||
unreadCounts,
|
||||
mentions,
|
||||
lastMessageTimes,
|
||||
unreadLastReadAts,
|
||||
incrementUnread,
|
||||
recordMessageEvent,
|
||||
renameConversationState,
|
||||
markAllRead,
|
||||
trackNewMessage,
|
||||
refreshUnreads: fetchUnreads,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
/**
|
||||
* LRU message cache for recently-visited conversations.
|
||||
*
|
||||
* Uses Map insertion-order semantics: the most recently used entry
|
||||
* is always at the end. Eviction removes the first (least-recently-used) entry.
|
||||
*
|
||||
* Cache size: 20 conversations, 200 messages each (~2.4MB worst case).
|
||||
*/
|
||||
|
||||
import type { Message, MessagePath } from './types';
|
||||
|
||||
export const MAX_CACHED_CONVERSATIONS = 20;
|
||||
export const MAX_MESSAGES_PER_ENTRY = 200;
|
||||
|
||||
interface CacheEntry {
|
||||
messages: Message[];
|
||||
seenContent: Set<string>;
|
||||
hasOlderMessages: boolean;
|
||||
}
|
||||
|
||||
const cache = new Map<string, CacheEntry>();
|
||||
|
||||
/** Get a cached entry and promote it to most-recently-used. */
|
||||
export function get(id: string): CacheEntry | undefined {
|
||||
const entry = cache.get(id);
|
||||
if (!entry) return undefined;
|
||||
// Promote to MRU: delete and re-insert
|
||||
cache.delete(id);
|
||||
cache.set(id, entry);
|
||||
return entry;
|
||||
}
|
||||
|
||||
/** Insert or update an entry at MRU position, evicting LRU if over capacity. */
|
||||
export function set(id: string, entry: CacheEntry): void {
|
||||
// Trim to most recent messages to bound memory
|
||||
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
|
||||
const trimmed = [...entry.messages]
|
||||
.sort((a, b) => b.received_at - a.received_at)
|
||||
.slice(0, MAX_MESSAGES_PER_ENTRY);
|
||||
entry = { ...entry, messages: trimmed, hasOlderMessages: true };
|
||||
}
|
||||
// Remove first so re-insert moves to end
|
||||
cache.delete(id);
|
||||
cache.set(id, entry);
|
||||
// Evict LRU (first entry) if over capacity
|
||||
if (cache.size > MAX_CACHED_CONVERSATIONS) {
|
||||
const lruKey = cache.keys().next().value as string;
|
||||
cache.delete(lruKey);
|
||||
}
|
||||
}
|
||||
|
||||
/** Add a message to a cached conversation with dedup. Returns true if new, false if duplicate. */
|
||||
export function addMessage(id: string, msg: Message, contentKey: string): boolean {
|
||||
const entry = cache.get(id);
|
||||
if (!entry) {
|
||||
// Auto-create a minimal entry for never-visited conversations
|
||||
cache.set(id, {
|
||||
messages: [msg],
|
||||
seenContent: new Set([contentKey]),
|
||||
hasOlderMessages: true,
|
||||
});
|
||||
// Evict LRU if over capacity
|
||||
if (cache.size > MAX_CACHED_CONVERSATIONS) {
|
||||
const lruKey = cache.keys().next().value as string;
|
||||
cache.delete(lruKey);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (entry.seenContent.has(contentKey)) return false;
|
||||
if (entry.messages.some((m) => m.id === msg.id)) return false;
|
||||
entry.seenContent.add(contentKey);
|
||||
entry.messages = [...entry.messages, msg];
|
||||
// Trim if over limit (drop oldest by received_at)
|
||||
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
|
||||
entry.messages = [...entry.messages]
|
||||
.sort((a, b) => b.received_at - a.received_at)
|
||||
.slice(0, MAX_MESSAGES_PER_ENTRY);
|
||||
}
|
||||
// Promote to MRU so actively-messaged conversations aren't evicted
|
||||
cache.delete(id);
|
||||
cache.set(id, entry);
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Scan all cached entries for a message ID and update its ack/paths. */
|
||||
export function updateAck(messageId: number, ackCount: number, paths?: MessagePath[]): void {
|
||||
for (const entry of cache.values()) {
|
||||
const idx = entry.messages.findIndex((m) => m.id === messageId);
|
||||
if (idx >= 0) {
|
||||
const current = entry.messages[idx];
|
||||
const updated = [...entry.messages];
|
||||
updated[idx] = {
|
||||
...current,
|
||||
acked: Math.max(current.acked, ackCount),
|
||||
...(paths !== undefined && paths.length >= (current.paths?.length ?? 0) && { paths }),
|
||||
};
|
||||
entry.messages = updated;
|
||||
return; // Message IDs are unique, stop after first match
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare fetched messages against current state.
|
||||
* Returns merged array if there are differences (new messages or ack changes),
|
||||
* or null if the cache is already consistent (happy path — no rerender needed).
|
||||
* Preserves any older paginated messages not present in the fetched page.
|
||||
*/
|
||||
export function reconcile(current: Message[], fetched: Message[]): Message[] | null {
|
||||
const currentById = new Map<number, { acked: number; pathsLen: number; text: string }>();
|
||||
for (const m of current) {
|
||||
currentById.set(m.id, { acked: m.acked, pathsLen: m.paths?.length ?? 0, text: m.text });
|
||||
}
|
||||
|
||||
let needsUpdate = false;
|
||||
for (const m of fetched) {
|
||||
const cur = currentById.get(m.id);
|
||||
if (
|
||||
!cur ||
|
||||
cur.acked !== m.acked ||
|
||||
cur.pathsLen !== (m.paths?.length ?? 0) ||
|
||||
cur.text !== m.text
|
||||
) {
|
||||
needsUpdate = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!needsUpdate) return null;
|
||||
|
||||
// Merge: fresh recent page + any older paginated messages not in the fetch
|
||||
const fetchedIds = new Set(fetched.map((m) => m.id));
|
||||
const olderMessages = current.filter((m) => !fetchedIds.has(m.id));
|
||||
return [...fetched, ...olderMessages];
|
||||
}
|
||||
|
||||
/** Evict a specific conversation from the cache. */
|
||||
export function remove(id: string): void {
|
||||
cache.delete(id);
|
||||
}
|
||||
|
||||
/** Move cached conversation state to a new conversation id. */
|
||||
export function rename(oldId: string, newId: string): void {
|
||||
if (oldId === newId) return;
|
||||
const oldEntry = cache.get(oldId);
|
||||
if (!oldEntry) return;
|
||||
|
||||
const newEntry = cache.get(newId);
|
||||
if (!newEntry) {
|
||||
cache.delete(oldId);
|
||||
cache.set(newId, oldEntry);
|
||||
return;
|
||||
}
|
||||
|
||||
const mergedMessages = [...newEntry.messages];
|
||||
const seenIds = new Set(mergedMessages.map((message) => message.id));
|
||||
for (const message of oldEntry.messages) {
|
||||
if (!seenIds.has(message.id)) {
|
||||
mergedMessages.push(message);
|
||||
seenIds.add(message.id);
|
||||
}
|
||||
}
|
||||
|
||||
cache.delete(oldId);
|
||||
cache.set(newId, {
|
||||
messages: mergedMessages,
|
||||
seenContent: new Set([...newEntry.seenContent, ...oldEntry.seenContent]),
|
||||
hasOlderMessages: newEntry.hasOlderMessages || oldEntry.hasOlderMessages,
|
||||
});
|
||||
}
|
||||
|
||||
/** Clear the entire cache. */
|
||||
export function clear(): void {
|
||||
cache.clear();
|
||||
}
|
||||
@@ -31,15 +31,15 @@ const mocks = vi.hoisted(() => ({
|
||||
error: vi.fn(),
|
||||
},
|
||||
hookFns: {
|
||||
setMessages: vi.fn(),
|
||||
fetchMessages: vi.fn(async () => {}),
|
||||
fetchOlderMessages: vi.fn(async () => {}),
|
||||
addMessageIfNew: vi.fn(),
|
||||
updateMessageAck: vi.fn(),
|
||||
triggerReconcile: vi.fn(),
|
||||
incrementUnread: vi.fn(),
|
||||
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
||||
receiveMessageAck: vi.fn(),
|
||||
reconcileOnReconnect: vi.fn(),
|
||||
renameConversationMessages: vi.fn(),
|
||||
removeConversationMessages: vi.fn(),
|
||||
clearConversationMessages: vi.fn(),
|
||||
recordMessageEvent: vi.fn(),
|
||||
markAllRead: vi.fn(),
|
||||
trackNewMessage: vi.fn(),
|
||||
refreshUnreads: vi.fn(async () => {}),
|
||||
},
|
||||
}));
|
||||
@@ -63,38 +63,30 @@ vi.mock('../hooks', async (importOriginal) => {
|
||||
hasOlderMessages: false,
|
||||
hasNewerMessages: false,
|
||||
loadingNewer: false,
|
||||
hasNewerMessagesRef: { current: false },
|
||||
setMessages: mocks.hookFns.setMessages,
|
||||
fetchMessages: mocks.hookFns.fetchMessages,
|
||||
fetchOlderMessages: mocks.hookFns.fetchOlderMessages,
|
||||
fetchNewerMessages: vi.fn(async () => {}),
|
||||
jumpToBottom: vi.fn(),
|
||||
reloadCurrentConversation: vi.fn(),
|
||||
addMessageIfNew: mocks.hookFns.addMessageIfNew,
|
||||
updateMessageAck: mocks.hookFns.updateMessageAck,
|
||||
triggerReconcile: mocks.hookFns.triggerReconcile,
|
||||
observeMessage: mocks.hookFns.observeMessage,
|
||||
receiveMessageAck: mocks.hookFns.receiveMessageAck,
|
||||
reconcileOnReconnect: mocks.hookFns.reconcileOnReconnect,
|
||||
renameConversationMessages: mocks.hookFns.renameConversationMessages,
|
||||
removeConversationMessages: mocks.hookFns.removeConversationMessages,
|
||||
clearConversationMessages: mocks.hookFns.clearConversationMessages,
|
||||
}),
|
||||
useUnreadCounts: () => ({
|
||||
unreadCounts: {},
|
||||
mentions: {},
|
||||
lastMessageTimes: {},
|
||||
unreadLastReadAts: {},
|
||||
incrementUnread: mocks.hookFns.incrementUnread,
|
||||
recordMessageEvent: mocks.hookFns.recordMessageEvent,
|
||||
renameConversationState: vi.fn(),
|
||||
markAllRead: mocks.hookFns.markAllRead,
|
||||
trackNewMessage: mocks.hookFns.trackNewMessage,
|
||||
refreshUnreads: mocks.hookFns.refreshUnreads,
|
||||
}),
|
||||
getMessageContentKey: () => 'content-key',
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../messageCache', () => ({
|
||||
addMessage: vi.fn(),
|
||||
updateAck: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../components/StatusBar', () => ({
|
||||
StatusBar: ({
|
||||
settingsMode,
|
||||
@@ -172,7 +164,10 @@ vi.mock('../components/ui/sonner', () => ({
|
||||
|
||||
vi.mock('../utils/urlHash', () => ({
|
||||
parseHashConversation: () => null,
|
||||
parseHashSettingsSection: () => null,
|
||||
updateUrlHash: vi.fn(),
|
||||
updateSettingsHash: vi.fn(),
|
||||
getSettingsHash: (section: string) => `#settings/${section}`,
|
||||
getMapFocusHash: () => '#map',
|
||||
}));
|
||||
|
||||
@@ -295,7 +290,7 @@ describe('App favorite toggle flow', () => {
|
||||
await waitFor(() => {
|
||||
expect(mocks.api.getChannels).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
expect(mocks.hookFns.triggerReconcile).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.hookFns.reconcileOnReconnect).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.hookFns.refreshUnreads).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
|
||||
@@ -37,15 +37,16 @@ vi.mock('../hooks', async (importOriginal) => {
|
||||
hasOlderMessages: false,
|
||||
hasNewerMessages: false,
|
||||
loadingNewer: false,
|
||||
hasNewerMessagesRef: { current: false },
|
||||
setMessages: vi.fn(),
|
||||
fetchOlderMessages: vi.fn(async () => {}),
|
||||
fetchNewerMessages: vi.fn(async () => {}),
|
||||
jumpToBottom: vi.fn(),
|
||||
reloadCurrentConversation: vi.fn(),
|
||||
addMessageIfNew: vi.fn(),
|
||||
updateMessageAck: vi.fn(),
|
||||
triggerReconcile: vi.fn(),
|
||||
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
||||
receiveMessageAck: vi.fn(),
|
||||
reconcileOnReconnect: vi.fn(),
|
||||
renameConversationMessages: vi.fn(),
|
||||
removeConversationMessages: vi.fn(),
|
||||
clearConversationMessages: vi.fn(),
|
||||
};
|
||||
},
|
||||
useUnreadCounts: () => ({
|
||||
@@ -53,22 +54,14 @@ vi.mock('../hooks', async (importOriginal) => {
|
||||
mentions: {},
|
||||
lastMessageTimes: {},
|
||||
unreadLastReadAts: {},
|
||||
incrementUnread: vi.fn(),
|
||||
recordMessageEvent: vi.fn(),
|
||||
renameConversationState: vi.fn(),
|
||||
markAllRead: vi.fn(),
|
||||
trackNewMessage: vi.fn(),
|
||||
refreshUnreads: vi.fn(),
|
||||
}),
|
||||
getMessageContentKey: () => 'content-key',
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../messageCache', () => ({
|
||||
addMessage: vi.fn(),
|
||||
updateAck: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../components/StatusBar', () => ({
|
||||
StatusBar: () => <div data-testid="status-bar" />,
|
||||
}));
|
||||
|
||||
@@ -32,38 +32,30 @@ vi.mock('../hooks', async (importOriginal) => {
|
||||
hasOlderMessages: false,
|
||||
hasNewerMessages: false,
|
||||
loadingNewer: false,
|
||||
hasNewerMessagesRef: { current: false },
|
||||
setMessages: vi.fn(),
|
||||
fetchMessages: vi.fn(async () => {}),
|
||||
fetchOlderMessages: vi.fn(async () => {}),
|
||||
fetchNewerMessages: vi.fn(async () => {}),
|
||||
jumpToBottom: vi.fn(),
|
||||
reloadCurrentConversation: vi.fn(),
|
||||
addMessageIfNew: vi.fn(),
|
||||
updateMessageAck: vi.fn(),
|
||||
triggerReconcile: vi.fn(),
|
||||
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
||||
receiveMessageAck: vi.fn(),
|
||||
reconcileOnReconnect: vi.fn(),
|
||||
renameConversationMessages: vi.fn(),
|
||||
removeConversationMessages: vi.fn(),
|
||||
clearConversationMessages: vi.fn(),
|
||||
}),
|
||||
useUnreadCounts: () => ({
|
||||
unreadCounts: {},
|
||||
mentions: {},
|
||||
lastMessageTimes: {},
|
||||
unreadLastReadAts: {},
|
||||
incrementUnread: vi.fn(),
|
||||
recordMessageEvent: vi.fn(),
|
||||
renameConversationState: vi.fn(),
|
||||
markAllRead: vi.fn(),
|
||||
trackNewMessage: vi.fn(),
|
||||
refreshUnreads: vi.fn(async () => {}),
|
||||
}),
|
||||
getMessageContentKey: () => 'content-key',
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../messageCache', () => ({
|
||||
addMessage: vi.fn(),
|
||||
updateAck: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../components/StatusBar', () => ({
|
||||
StatusBar: () => <div data-testid="status-bar" />,
|
||||
}));
|
||||
@@ -98,7 +90,9 @@ vi.mock('../components/NewMessageModal', () => ({
|
||||
}));
|
||||
|
||||
vi.mock('../components/SettingsModal', () => ({
|
||||
SettingsModal: () => null,
|
||||
SettingsModal: ({ desktopSection }: { desktopSection?: string }) => (
|
||||
<div data-testid="settings-modal-section">{desktopSection ?? 'none'}</div>
|
||||
),
|
||||
SETTINGS_SECTION_ORDER: ['radio', 'local', 'database', 'bot'],
|
||||
SETTINGS_SECTION_LABELS: {
|
||||
radio: 'Radio',
|
||||
@@ -262,6 +256,37 @@ describe('App startup hash resolution', () => {
|
||||
expect(window.location.hash).toBe('');
|
||||
});
|
||||
|
||||
it('tracks the current conversation in local storage even before reopen is enabled', async () => {
|
||||
const chatChannel = {
|
||||
key: '11111111111111111111111111111111',
|
||||
name: 'Ops',
|
||||
is_hashtag: false,
|
||||
on_radio: false,
|
||||
last_read_at: null,
|
||||
};
|
||||
|
||||
window.location.hash = '';
|
||||
mocks.api.getChannels.mockResolvedValue([publicChannel, chatChannel]);
|
||||
localStorage.setItem(
|
||||
LAST_VIEWED_CONVERSATION_KEY,
|
||||
JSON.stringify({
|
||||
type: 'channel',
|
||||
id: chatChannel.key,
|
||||
name: chatChannel.name,
|
||||
})
|
||||
);
|
||||
|
||||
render(<App />);
|
||||
|
||||
await waitFor(() => {
|
||||
for (const node of screen.getAllByTestId('active-conversation')) {
|
||||
expect(node).toHaveTextContent(`channel:${publicChannel.key}:Public`);
|
||||
}
|
||||
});
|
||||
|
||||
expect(localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY)).toContain(publicChannel.key);
|
||||
});
|
||||
|
||||
it('restores last viewed contact from legacy name token when hash is empty and reopen is enabled', async () => {
|
||||
const aliceContact = {
|
||||
public_key: 'b'.repeat(64),
|
||||
@@ -301,4 +326,20 @@ describe('App startup hash resolution', () => {
|
||||
});
|
||||
expect(window.location.hash).toBe('');
|
||||
});
|
||||
|
||||
it('opens settings from a settings hash and falls back away from radio when disconnected', async () => {
|
||||
window.location.hash = '#settings/radio';
|
||||
mocks.api.getRadioConfig.mockRejectedValue(new Error('radio offline'));
|
||||
|
||||
render(<App />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('settings-modal-section')).toHaveTextContent('local');
|
||||
});
|
||||
|
||||
for (const button of screen.getAllByRole('button', { name: 'Radio' })) {
|
||||
expect(button).toBeDisabled();
|
||||
}
|
||||
expect(window.location.hash).toBe('#settings/local');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ChatHeader } from '../components/ChatHeader';
|
||||
import type { Channel, Contact, Conversation, Favorite, PathDiscoveryResponse } from '../types';
|
||||
import { PUBLIC_CHANNEL_KEY } from '../utils/publicChannel';
|
||||
|
||||
function makeChannel(key: string, name: string, isHashtag: boolean): Channel {
|
||||
return { key, name, is_hashtag: isHashtag, on_radio: false, last_read_at: null };
|
||||
@@ -169,6 +170,25 @@ describe('ChatHeader key visibility', () => {
|
||||
expect(onToggleNotifications).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('hides the delete button for the canonical Public channel', () => {
|
||||
const channel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public', false);
|
||||
const conversation: Conversation = { type: 'channel', id: PUBLIC_CHANNEL_KEY, name: 'Public' };
|
||||
|
||||
render(<ChatHeader {...baseProps} conversation={conversation} channels={[channel]} />);
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'Delete' })).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('still shows the delete button for non-canonical channels named Public', () => {
|
||||
const key = 'AB'.repeat(16);
|
||||
const channel = makeChannel(key, 'Public', false);
|
||||
const conversation: Conversation = { type: 'channel', id: key, name: 'Public' };
|
||||
|
||||
render(<ChatHeader {...baseProps} conversation={conversation} channels={[channel]} />);
|
||||
|
||||
expect(screen.getByRole('button', { name: 'Delete' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens path discovery modal for contacts and runs the request on demand', async () => {
|
||||
const pubKey = '21'.repeat(32);
|
||||
const contact: Contact = {
|
||||
|
||||
@@ -8,12 +8,12 @@
|
||||
* between backend and frontend - both sides test against the same data.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import fixtures from './fixtures/websocket_events.json';
|
||||
import { getMessageContentKey } from '../hooks/useConversationMessages';
|
||||
import { getStateKey } from '../utils/conversationState';
|
||||
import { mergeContactIntoList } from '../utils/contactMerge';
|
||||
import * as messageCache from '../messageCache';
|
||||
import { ConversationMessageCache } from '../hooks/useConversationMessages';
|
||||
import { getMessageContentKey } from '../utils/messageIdentity';
|
||||
import type { Contact, Message } from '../types';
|
||||
|
||||
/**
|
||||
@@ -25,6 +25,7 @@ interface MockState {
|
||||
unreadCounts: Record<string, number>;
|
||||
lastMessageTimes: Record<string, number>;
|
||||
seenActiveContent: Set<string>;
|
||||
messageCache: ConversationMessageCache;
|
||||
}
|
||||
|
||||
function createMockState(): MockState {
|
||||
@@ -33,6 +34,7 @@ function createMockState(): MockState {
|
||||
unreadCounts: {},
|
||||
lastMessageTimes: {},
|
||||
seenActiveContent: new Set(),
|
||||
messageCache: new ConversationMessageCache(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -68,7 +70,7 @@ function handleMessageEvent(
|
||||
state.lastMessageTimes[stateKey] = msg.received_at;
|
||||
|
||||
if (!isForActiveConversation) {
|
||||
const isNew = messageCache.addMessage(msg.conversation_key, msg, contentKey);
|
||||
const isNew = state.messageCache.addMessage(msg.conversation_key, msg);
|
||||
if (!msg.outgoing && isNew) {
|
||||
state.unreadCounts[stateKey] = (state.unreadCounts[stateKey] || 0) + 1;
|
||||
unreadIncremented = true;
|
||||
@@ -78,11 +80,6 @@ function handleMessageEvent(
|
||||
return { added, unreadIncremented };
|
||||
}
|
||||
|
||||
// Clear messageCache between tests to avoid cross-test contamination
|
||||
beforeEach(() => {
|
||||
messageCache.clear();
|
||||
});
|
||||
|
||||
describe('Integration: Channel Message Events', () => {
|
||||
const fixture = fixtures.channel_message;
|
||||
|
||||
@@ -180,7 +177,7 @@ describe('Integration: No phantom unreads from mesh echoes (hitlist #8 regressio
|
||||
// dual-set design the global set would drop msg-0's key during pruning,
|
||||
// so a later mesh echo of msg-0 would pass the global check and
|
||||
// phantom-increment unread. With the fix, messageCache's per-conversation
|
||||
// seenContent is the single source of truth and is never pruned.
|
||||
// Cached messages remain the source of truth for inactive-conversation dedup.
|
||||
const MESSAGE_COUNT = 1001;
|
||||
for (let i = 0; i < MESSAGE_COUNT; i++) {
|
||||
const msg: Message = {
|
||||
@@ -342,11 +339,8 @@ describe('Integration: Contact Merge', () => {
|
||||
// --- ACK + messageCache propagation tests ---
|
||||
|
||||
describe('Integration: ACK + messageCache propagation', () => {
|
||||
beforeEach(() => {
|
||||
messageCache.clear();
|
||||
});
|
||||
|
||||
it('updateAck updates acked count on cached message', () => {
|
||||
const messageCache = new ConversationMessageCache();
|
||||
const msg: Message = {
|
||||
id: 100,
|
||||
type: 'PRIV',
|
||||
@@ -362,7 +356,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
acked: 0,
|
||||
sender_name: null,
|
||||
};
|
||||
messageCache.addMessage('pk_abc', msg, 'key-100');
|
||||
messageCache.addMessage('pk_abc', msg);
|
||||
|
||||
messageCache.updateAck(100, 1);
|
||||
|
||||
@@ -372,6 +366,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
});
|
||||
|
||||
it('updateAck updates paths when longer', () => {
|
||||
const messageCache = new ConversationMessageCache();
|
||||
const msg: Message = {
|
||||
id: 101,
|
||||
type: 'PRIV',
|
||||
@@ -387,7 +382,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
acked: 1,
|
||||
sender_name: null,
|
||||
};
|
||||
messageCache.addMessage('pk_abc', msg, 'key-101');
|
||||
messageCache.addMessage('pk_abc', msg);
|
||||
|
||||
const longerPaths = [
|
||||
{ path: 'aa', received_at: 1700000001 },
|
||||
@@ -401,6 +396,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
});
|
||||
|
||||
it('preserves higher existing ack count (max semantics)', () => {
|
||||
const messageCache = new ConversationMessageCache();
|
||||
const msg: Message = {
|
||||
id: 102,
|
||||
type: 'PRIV',
|
||||
@@ -416,7 +412,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
acked: 5,
|
||||
sender_name: null,
|
||||
};
|
||||
messageCache.addMessage('pk_abc', msg, 'key-102');
|
||||
messageCache.addMessage('pk_abc', msg);
|
||||
|
||||
// Try to update with a lower ack count
|
||||
messageCache.updateAck(102, 3);
|
||||
@@ -426,6 +422,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
});
|
||||
|
||||
it('is a no-op for unknown message ID', () => {
|
||||
const messageCache = new ConversationMessageCache();
|
||||
const msg: Message = {
|
||||
id: 103,
|
||||
type: 'PRIV',
|
||||
@@ -441,7 +438,7 @@ describe('Integration: ACK + messageCache propagation', () => {
|
||||
acked: 0,
|
||||
sender_name: null,
|
||||
};
|
||||
messageCache.addMessage('pk_abc', msg, 'key-103');
|
||||
messageCache.addMessage('pk_abc', msg);
|
||||
|
||||
// Update a non-existent message ID — should not throw or modify anything
|
||||
messageCache.updateAck(999, 1);
|
||||
|
||||
@@ -52,4 +52,43 @@ describe('MapView', () => {
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('Last heard: Never heard by this server')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('keeps the 7-day cutoff stable for the lifetime of the mounted map', () => {
|
||||
vi.useFakeTimers();
|
||||
try {
|
||||
vi.setSystemTime(new Date('2026-03-15T12:00:00Z'));
|
||||
|
||||
const contact: Contact = {
|
||||
public_key: 'bb'.repeat(32),
|
||||
name: 'Almost Stale',
|
||||
type: 1,
|
||||
flags: 0,
|
||||
last_path: null,
|
||||
last_path_len: -1,
|
||||
out_path_hash_mode: -1,
|
||||
route_override_path: null,
|
||||
route_override_len: null,
|
||||
route_override_hash_mode: null,
|
||||
last_advert: null,
|
||||
lat: 41,
|
||||
lon: -73,
|
||||
last_seen: Math.floor(Date.now() / 1000) - 7 * 24 * 60 * 60 + 60,
|
||||
on_radio: false,
|
||||
last_contacted: null,
|
||||
last_read_at: null,
|
||||
first_seen: null,
|
||||
};
|
||||
|
||||
const { rerender } = render(<MapView contacts={[contact]} focusedKey={null} />);
|
||||
|
||||
expect(screen.getByText(/showing 1 contact heard in the last 7 days/i)).toBeInTheDocument();
|
||||
|
||||
vi.advanceTimersByTime(2 * 60 * 1000);
|
||||
rerender(<MapView contacts={[contact]} focusedKey={null} />);
|
||||
|
||||
expect(screen.getByText(/showing 1 contact heard in the last 7 days/i)).toBeInTheDocument();
|
||||
} finally {
|
||||
vi.useRealTimers();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,8 +3,12 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import * as messageCache from '../messageCache';
|
||||
import { MAX_CACHED_CONVERSATIONS, MAX_MESSAGES_PER_ENTRY } from '../messageCache';
|
||||
import {
|
||||
ConversationMessageCache,
|
||||
MAX_CACHED_CONVERSATIONS,
|
||||
MAX_MESSAGES_PER_ENTRY,
|
||||
reconcileConversationMessages,
|
||||
} from '../hooks/useConversationMessages';
|
||||
import type { Message } from '../types';
|
||||
|
||||
function createMessage(overrides: Partial<Message> = {}): Message {
|
||||
@@ -27,16 +31,14 @@ function createMessage(overrides: Partial<Message> = {}): Message {
|
||||
}
|
||||
|
||||
function createEntry(messages: Message[] = [], hasOlderMessages = false) {
|
||||
const seenContent = new Set<string>();
|
||||
for (const msg of messages) {
|
||||
seenContent.add(`${msg.type}-${msg.conversation_key}-${msg.text}-${msg.sender_timestamp}`);
|
||||
}
|
||||
return { messages, seenContent, hasOlderMessages };
|
||||
return { messages, hasOlderMessages };
|
||||
}
|
||||
|
||||
describe('messageCache', () => {
|
||||
let messageCache: ConversationMessageCache;
|
||||
|
||||
beforeEach(() => {
|
||||
messageCache.clear();
|
||||
messageCache = new ConversationMessageCache();
|
||||
});
|
||||
|
||||
describe('get/set', () => {
|
||||
@@ -155,11 +157,7 @@ describe('messageCache', () => {
|
||||
messageCache.set('conv1', createEntry([]));
|
||||
|
||||
const msg = createMessage({ id: 10, text: 'New message' });
|
||||
const result = messageCache.addMessage(
|
||||
'conv1',
|
||||
msg,
|
||||
'CHAN-channel123-New message-1700000000'
|
||||
);
|
||||
const result = messageCache.addMessage('conv1', msg);
|
||||
|
||||
expect(result).toBe(true);
|
||||
const entry = messageCache.get('conv1');
|
||||
@@ -171,12 +169,11 @@ describe('messageCache', () => {
|
||||
messageCache.set('conv1', createEntry([]));
|
||||
|
||||
const msg1 = createMessage({ id: 10, text: 'Hello' });
|
||||
const contentKey = 'CHAN-channel123-Hello-1700000000';
|
||||
expect(messageCache.addMessage('conv1', msg1, contentKey)).toBe(true);
|
||||
expect(messageCache.addMessage('conv1', msg1)).toBe(true);
|
||||
|
||||
// Same content key, different message id
|
||||
const msg2 = createMessage({ id: 11, text: 'Hello' });
|
||||
expect(messageCache.addMessage('conv1', msg2, contentKey)).toBe(false);
|
||||
expect(messageCache.addMessage('conv1', msg2)).toBe(false);
|
||||
|
||||
const entry = messageCache.get('conv1');
|
||||
expect(entry!.messages).toHaveLength(1);
|
||||
@@ -187,9 +184,7 @@ describe('messageCache', () => {
|
||||
|
||||
// Same id, different content key
|
||||
const msg = createMessage({ id: 10, text: 'Different' });
|
||||
expect(messageCache.addMessage('conv1', msg, 'CHAN-channel123-Different-1700000000')).toBe(
|
||||
false
|
||||
);
|
||||
expect(messageCache.addMessage('conv1', msg)).toBe(false);
|
||||
|
||||
const entry = messageCache.get('conv1');
|
||||
expect(entry!.messages).toHaveLength(1);
|
||||
@@ -208,11 +203,7 @@ describe('messageCache', () => {
|
||||
text: 'newest',
|
||||
received_at: 1700000000 + MAX_MESSAGES_PER_ENTRY,
|
||||
});
|
||||
const result = messageCache.addMessage(
|
||||
'conv1',
|
||||
newMsg,
|
||||
`CHAN-channel123-newest-${newMsg.sender_timestamp}`
|
||||
);
|
||||
const result = messageCache.addMessage('conv1', newMsg);
|
||||
|
||||
expect(result).toBe(true);
|
||||
const entry = messageCache.get('conv1');
|
||||
@@ -225,11 +216,7 @@ describe('messageCache', () => {
|
||||
|
||||
it('auto-creates a minimal entry for never-visited conversations and returns true', () => {
|
||||
const msg = createMessage({ id: 10, text: 'First contact' });
|
||||
const result = messageCache.addMessage(
|
||||
'new_conv',
|
||||
msg,
|
||||
'CHAN-channel123-First contact-1700000000'
|
||||
);
|
||||
const result = messageCache.addMessage('new_conv', msg);
|
||||
|
||||
expect(result).toBe(true);
|
||||
const entry = messageCache.get('new_conv');
|
||||
@@ -237,7 +224,6 @@ describe('messageCache', () => {
|
||||
expect(entry!.messages).toHaveLength(1);
|
||||
expect(entry!.messages[0].text).toBe('First contact');
|
||||
expect(entry!.hasOlderMessages).toBe(true);
|
||||
expect(entry!.seenContent.has('CHAN-channel123-First contact-1700000000')).toBe(true);
|
||||
});
|
||||
|
||||
it('promotes entry to MRU on addMessage', () => {
|
||||
@@ -248,7 +234,7 @@ describe('messageCache', () => {
|
||||
|
||||
// addMessage to conv0 (currently LRU) should promote it
|
||||
const msg = createMessage({ id: 999, text: 'Incoming WS message' });
|
||||
messageCache.addMessage('conv0', msg, 'CHAN-channel123-Incoming WS message-1700000000');
|
||||
messageCache.addMessage('conv0', msg);
|
||||
|
||||
// Add one more — conv1 should now be LRU and get evicted, not conv0
|
||||
messageCache.set('conv_new', createEntry());
|
||||
@@ -259,11 +245,10 @@ describe('messageCache', () => {
|
||||
|
||||
it('returns false for duplicate delivery to auto-created entry', () => {
|
||||
const msg = createMessage({ id: 10, text: 'Echo' });
|
||||
const contentKey = 'CHAN-channel123-Echo-1700000000';
|
||||
|
||||
expect(messageCache.addMessage('new_conv', msg, contentKey)).toBe(true);
|
||||
expect(messageCache.addMessage('new_conv', msg)).toBe(true);
|
||||
// Duplicate via mesh echo
|
||||
expect(messageCache.addMessage('new_conv', msg, contentKey)).toBe(false);
|
||||
expect(messageCache.addMessage('new_conv', msg)).toBe(false);
|
||||
|
||||
const entry = messageCache.get('new_conv');
|
||||
expect(entry!.messages).toHaveLength(1);
|
||||
@@ -358,7 +343,7 @@ describe('messageCache', () => {
|
||||
createMessage({ id: 3, acked: 1 }),
|
||||
];
|
||||
|
||||
expect(messageCache.reconcile(msgs, fetched)).toBeNull();
|
||||
expect(reconcileConversationMessages(msgs, fetched)).toBeNull();
|
||||
});
|
||||
|
||||
it('detects new messages missing from cache', () => {
|
||||
@@ -369,7 +354,7 @@ describe('messageCache', () => {
|
||||
createMessage({ id: 3, text: 'missed via WS' }),
|
||||
];
|
||||
|
||||
const merged = messageCache.reconcile(current, fetched);
|
||||
const merged = reconcileConversationMessages(current, fetched);
|
||||
expect(merged).not.toBeNull();
|
||||
expect(merged!.map((m) => m.id)).toEqual([1, 2, 3]);
|
||||
});
|
||||
@@ -378,7 +363,7 @@ describe('messageCache', () => {
|
||||
const current = [createMessage({ id: 1, acked: 0 })];
|
||||
const fetched = [createMessage({ id: 1, acked: 3 })];
|
||||
|
||||
const merged = messageCache.reconcile(current, fetched);
|
||||
const merged = reconcileConversationMessages(current, fetched);
|
||||
expect(merged).not.toBeNull();
|
||||
expect(merged![0].acked).toBe(3);
|
||||
});
|
||||
@@ -397,20 +382,20 @@ describe('messageCache', () => {
|
||||
createMessage({ id: 2 }),
|
||||
];
|
||||
|
||||
const merged = messageCache.reconcile(current, fetched);
|
||||
const merged = reconcileConversationMessages(current, fetched);
|
||||
expect(merged).not.toBeNull();
|
||||
// Should have fetched page + older paginated message
|
||||
expect(merged!.map((m) => m.id)).toEqual([4, 3, 2, 1]);
|
||||
});
|
||||
|
||||
it('returns null for empty fetched and empty current', () => {
|
||||
expect(messageCache.reconcile([], [])).toBeNull();
|
||||
expect(reconcileConversationMessages([], [])).toBeNull();
|
||||
});
|
||||
|
||||
it('detects difference when current is empty but fetch has messages', () => {
|
||||
const fetched = [createMessage({ id: 1 })];
|
||||
|
||||
const merged = messageCache.reconcile([], fetched);
|
||||
const merged = reconcileConversationMessages([], fetched);
|
||||
expect(merged).not.toBeNull();
|
||||
expect(merged!).toHaveLength(1);
|
||||
});
|
||||
@@ -430,7 +415,7 @@ describe('messageCache', () => {
|
||||
}),
|
||||
];
|
||||
|
||||
const merged = messageCache.reconcile(current, fetched);
|
||||
const merged = reconcileConversationMessages(current, fetched);
|
||||
expect(merged).not.toBeNull();
|
||||
expect(merged![0].paths).toHaveLength(2);
|
||||
});
|
||||
@@ -439,7 +424,7 @@ describe('messageCache', () => {
|
||||
const current = [createMessage({ id: 1, text: '[encrypted]' })];
|
||||
const fetched = [createMessage({ id: 1, text: 'Hello world' })];
|
||||
|
||||
const merged = messageCache.reconcile(current, fetched);
|
||||
const merged = reconcileConversationMessages(current, fetched);
|
||||
expect(merged).not.toBeNull();
|
||||
expect(merged![0].text).toBe('Hello world');
|
||||
});
|
||||
@@ -449,7 +434,7 @@ describe('messageCache', () => {
|
||||
const current = [createMessage({ id: 1, acked: 2, paths, text: 'Hello' })];
|
||||
const fetched = [createMessage({ id: 1, acked: 2, paths, text: 'Hello' })];
|
||||
|
||||
expect(messageCache.reconcile(current, fetched)).toBeNull();
|
||||
expect(reconcileConversationMessages(current, fetched)).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,12 +9,18 @@ import { render, screen, fireEvent } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import { MessageInput } from '../components/MessageInput';
|
||||
import { toast } from '../components/ui/sonner';
|
||||
|
||||
// Mock sonner (toast)
|
||||
vi.mock('../components/ui/sonner', () => ({
|
||||
toast: { success: vi.fn(), error: vi.fn() },
|
||||
}));
|
||||
|
||||
const mockToast = toast as unknown as {
|
||||
success: ReturnType<typeof vi.fn>;
|
||||
error: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
|
||||
const textEncoder = new TextEncoder();
|
||||
|
||||
function byteLen(s: string): number {
|
||||
@@ -182,4 +188,24 @@ describe('MessageInput', () => {
|
||||
expect(getSendButton()).toBeEnabled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('send failure toasts', () => {
|
||||
it('shows the radio no-response toast when the send outcome is unknown', async () => {
|
||||
onSend.mockRejectedValueOnce(
|
||||
new Error(
|
||||
'Send command was issued to the radio, but no response was heard back. The message may or may not have sent successfully.'
|
||||
)
|
||||
);
|
||||
renderInput({ conversationType: 'contact' });
|
||||
|
||||
fireEvent.change(getInput(), { target: { value: 'Hello' } });
|
||||
fireEvent.click(getSendButton());
|
||||
|
||||
expect(await screen.findByDisplayValue('Hello')).toBeTruthy();
|
||||
expect(mockToast.error).toHaveBeenCalledWith('Radio did not confirm send', {
|
||||
description:
|
||||
'Send command was issued to the radio, but no response was heard back. The message may or may not have sent successfully.',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,6 +11,7 @@ import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import { NewMessageModal } from '../components/NewMessageModal';
|
||||
import type { Contact } from '../types';
|
||||
import { toast } from '../components/ui/sonner';
|
||||
|
||||
// Mock sonner (toast)
|
||||
vi.mock('../components/ui/sonner', () => ({
|
||||
@@ -35,6 +36,11 @@ const mockContact: Contact = {
|
||||
first_seen: null,
|
||||
};
|
||||
|
||||
const mockToast = toast as unknown as {
|
||||
success: ReturnType<typeof vi.fn>;
|
||||
error: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
|
||||
describe('NewMessageModal form reset', () => {
|
||||
const onClose = vi.fn();
|
||||
const onSelectConversation = vi.fn();
|
||||
@@ -137,6 +143,24 @@ describe('NewMessageModal form reset', () => {
|
||||
});
|
||||
expect(onClose).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('toasts when creation fails', async () => {
|
||||
const user = userEvent.setup();
|
||||
onCreateChannel.mockRejectedValueOnce(new Error('Bad key'));
|
||||
renderModal();
|
||||
await switchToTab(user, 'Room');
|
||||
|
||||
await user.type(screen.getByPlaceholderText('Room name'), 'MyRoom');
|
||||
await user.type(screen.getByPlaceholderText('Pre-shared key (hex)'), 'cc'.repeat(16));
|
||||
await user.click(screen.getByRole('button', { name: 'Create' }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockToast.error).toHaveBeenCalledWith('Failed to create conversation', {
|
||||
description: 'Bad key',
|
||||
});
|
||||
});
|
||||
expect(screen.getByText('Bad key')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('tab switching resets form', () => {
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
formatRouteLabel,
|
||||
formatRoutingOverrideInput,
|
||||
getEffectiveContactRoute,
|
||||
isValidLocation,
|
||||
resolvePath,
|
||||
formatDistance,
|
||||
formatHopCounts,
|
||||
@@ -665,6 +666,24 @@ describe('resolvePath', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('isValidLocation', () => {
|
||||
it('rejects null and unset coordinates', () => {
|
||||
expect(isValidLocation(null, -122.3)).toBe(false);
|
||||
expect(isValidLocation(47.6, null)).toBe(false);
|
||||
expect(isValidLocation(0, 0)).toBe(false);
|
||||
});
|
||||
|
||||
it('rejects out-of-range coordinates', () => {
|
||||
expect(isValidLocation(-593.497573, -1659.939204)).toBe(false);
|
||||
expect(isValidLocation(91, 0)).toBe(false);
|
||||
expect(isValidLocation(0, 181)).toBe(false);
|
||||
});
|
||||
|
||||
it('accepts sane coordinates', () => {
|
||||
expect(isValidLocation(47.6062, -122.3321)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatDistance', () => {
|
||||
it('formats distances under 1km in meters', () => {
|
||||
expect(formatDistance(0.5)).toBe('500m');
|
||||
|
||||
@@ -265,13 +265,14 @@ describe('RepeaterDashboard', () => {
|
||||
|
||||
expect(
|
||||
screen.getByText(
|
||||
'GPS info failed to fetch; map and distance data not available. This may be due to missing or zero-zero GPS data on the repeater, or due to transient fetch failure. Try refreshing.'
|
||||
'Map and distance data are unavailable until this repeater has a valid position from either its advert or a Node Info fetch.'
|
||||
)
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('No repeater position available')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Dist')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows neighbor distance when repeater radio settings include valid coords', () => {
|
||||
it('shows neighbor distance when repeater node info includes valid coords', () => {
|
||||
mockHook.loggedIn = true;
|
||||
mockHook.paneData.neighbors = {
|
||||
neighbors: [
|
||||
@@ -324,13 +325,69 @@ describe('RepeaterDashboard', () => {
|
||||
render(<RepeaterDashboard {...defaultProps} contacts={contactsWithNeighbor} />);
|
||||
|
||||
expect(screen.getByText('Dist')).toBeInTheDocument();
|
||||
expect(screen.getByText('Using repeater-reported position')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText(
|
||||
'GPS info failed to fetch; map and distance data not available. This may be due to missing or zero-zero GPS data on the repeater, or due to transient fetch failure. Try refreshing.'
|
||||
'Map and distance data are unavailable until this repeater has a valid position from either its advert or a Node Info fetch.'
|
||||
)
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('uses advert coords for neighbor distance when node info is unavailable', () => {
|
||||
mockHook.loggedIn = true;
|
||||
mockHook.paneData.neighbors = {
|
||||
neighbors: [
|
||||
{ pubkey_prefix: 'bbbbbbbbbbbb', name: 'Neighbor', snr: 7.2, last_heard_seconds: 9 },
|
||||
],
|
||||
};
|
||||
mockHook.paneData.nodeInfo = null;
|
||||
mockHook.paneStates.neighbors = {
|
||||
loading: false,
|
||||
attempt: 1,
|
||||
error: null,
|
||||
fetched_at: Date.now(),
|
||||
};
|
||||
mockHook.paneStates.nodeInfo = {
|
||||
loading: false,
|
||||
attempt: 0,
|
||||
error: null,
|
||||
fetched_at: null,
|
||||
};
|
||||
|
||||
const contactsWithAdvertAndNeighbor = [
|
||||
{
|
||||
...contacts[0],
|
||||
lat: -31.95,
|
||||
lon: 115.86,
|
||||
},
|
||||
{
|
||||
public_key: 'bbbbbbbbbbbb0000000000000000000000000000000000000000000000000000',
|
||||
name: 'Neighbor',
|
||||
type: 1,
|
||||
flags: 0,
|
||||
last_path: null,
|
||||
last_path_len: 0,
|
||||
out_path_hash_mode: 0,
|
||||
route_override_path: null,
|
||||
route_override_len: null,
|
||||
route_override_hash_mode: null,
|
||||
last_advert: null,
|
||||
lat: -31.94,
|
||||
lon: 115.87,
|
||||
last_seen: null,
|
||||
on_radio: false,
|
||||
last_contacted: null,
|
||||
last_read_at: null,
|
||||
first_seen: null,
|
||||
},
|
||||
];
|
||||
|
||||
render(<RepeaterDashboard {...defaultProps} contacts={contactsWithAdvertAndNeighbor} />);
|
||||
|
||||
expect(screen.getByText('Dist')).toBeInTheDocument();
|
||||
expect(screen.getByText('Using advert position')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows fetching state with attempt counter', () => {
|
||||
mockHook.loggedIn = true;
|
||||
mockHook.paneStates.status = { loading: true, attempt: 2, error: null };
|
||||
@@ -401,6 +458,40 @@ describe('RepeaterDashboard', () => {
|
||||
expect(screen.getByText(/Fetched .*Just now/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('keeps repeater clock drift anchored to fetch time across remounts', () => {
|
||||
vi.useFakeTimers();
|
||||
try {
|
||||
const fetchedAt = Date.UTC(2024, 0, 1, 12, 0, 0);
|
||||
vi.setSystemTime(fetchedAt);
|
||||
|
||||
mockHook.loggedIn = true;
|
||||
mockHook.paneData.nodeInfo = {
|
||||
name: 'TestRepeater',
|
||||
lat: null,
|
||||
lon: null,
|
||||
clock_utc: '11:59:30 - 1/1/2024 UTC',
|
||||
};
|
||||
mockHook.paneStates.nodeInfo = {
|
||||
loading: false,
|
||||
attempt: 1,
|
||||
error: null,
|
||||
fetched_at: fetchedAt,
|
||||
};
|
||||
|
||||
const firstRender = render(<RepeaterDashboard {...defaultProps} />);
|
||||
expect(screen.getByText(/\(drift: 30s\)/)).toBeInTheDocument();
|
||||
|
||||
vi.setSystemTime(fetchedAt + 10 * 60 * 1000);
|
||||
firstRender.unmount();
|
||||
|
||||
render(<RepeaterDashboard {...defaultProps} />);
|
||||
expect(screen.getByText(/\(drift: 30s\)/)).toBeInTheDocument();
|
||||
expect(screen.queryByText(/\(drift: 10m30s\)/)).not.toBeInTheDocument();
|
||||
} finally {
|
||||
vi.useRealTimers();
|
||||
}
|
||||
});
|
||||
|
||||
it('renders action buttons', () => {
|
||||
mockHook.loggedIn = true;
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ const baseSettings: AppSettings = {
|
||||
};
|
||||
|
||||
function renderModal(overrides?: {
|
||||
config?: RadioConfig | null;
|
||||
appSettings?: AppSettings;
|
||||
health?: HealthStatus;
|
||||
onSaveAppSettings?: (update: AppSettingsUpdate) => Promise<void>;
|
||||
@@ -97,7 +98,7 @@ function renderModal(overrides?: {
|
||||
const commonProps = {
|
||||
open: overrides?.open ?? true,
|
||||
pageMode: overrides?.pageMode,
|
||||
config: baseConfig,
|
||||
config: overrides?.config === undefined ? baseConfig : overrides.config,
|
||||
health: overrides?.health ?? baseHealth,
|
||||
appSettings: overrides?.appSettings ?? baseSettings,
|
||||
onClose,
|
||||
@@ -205,6 +206,52 @@ describe('SettingsModal', () => {
|
||||
expect(screen.getByText(/Configured radio contact capacity/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('keeps non-radio settings available when radio config is unavailable', () => {
|
||||
renderModal({ config: null });
|
||||
|
||||
const radioToggle = screen.getByRole('button', { name: /Radio/i });
|
||||
expect(radioToggle).toBeDisabled();
|
||||
|
||||
openLocalSection();
|
||||
expect(screen.getByLabelText('Local label text')).toBeInTheDocument();
|
||||
|
||||
openDatabaseSection();
|
||||
expect(screen.getByText('Delete Undecrypted Packets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows a radio-unavailable message instead of blocking the whole settings page', () => {
|
||||
renderModal({
|
||||
config: null,
|
||||
externalSidebarNav: true,
|
||||
desktopSection: 'radio',
|
||||
});
|
||||
|
||||
expect(
|
||||
screen.getByText('Radio settings are unavailable until a radio connects.')
|
||||
).toBeInTheDocument();
|
||||
expect(screen.queryByText('Loading configuration...')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows cached radio firmware and capacity info under the connection status', () => {
|
||||
renderModal({
|
||||
health: {
|
||||
...baseHealth,
|
||||
radio_device_info: {
|
||||
model: 'T-Echo',
|
||||
firmware_build: '2025-02-01',
|
||||
firmware_version: '1.2.3',
|
||||
max_contacts: 350,
|
||||
max_channels: 64,
|
||||
},
|
||||
},
|
||||
});
|
||||
openRadioSection();
|
||||
|
||||
expect(
|
||||
screen.getByText('T-Echo running 2025-02-01/1.2.3 (max: 350 contacts, 64 channels)')
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows reconnect action when radio connection is paused', () => {
|
||||
renderModal({
|
||||
health: { ...baseHealth, radio_state: 'paused' },
|
||||
|
||||
@@ -4,6 +4,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { Sidebar } from '../components/Sidebar';
|
||||
import { CONTACT_TYPE_REPEATER, type Channel, type Contact, type Favorite } from '../types';
|
||||
import { getStateKey, type ConversationTimes } from '../utils/conversationState';
|
||||
import { PUBLIC_CHANNEL_KEY } from '../utils/publicChannel';
|
||||
|
||||
function makeChannel(key: string, name: string): Channel {
|
||||
return {
|
||||
@@ -75,8 +76,7 @@ function renderSidebar(overrides?: {
|
||||
onToggleCracker={vi.fn()}
|
||||
onMarkAllRead={vi.fn()}
|
||||
favorites={favorites}
|
||||
sortOrder="recent"
|
||||
onSortOrderChange={vi.fn()}
|
||||
legacySortOrder="recent"
|
||||
isConversationNotificationsEnabled={overrides?.isConversationNotificationsEnabled}
|
||||
/>
|
||||
);
|
||||
@@ -85,7 +85,7 @@ function renderSidebar(overrides?: {
|
||||
}
|
||||
|
||||
function getSectionHeaderContainer(title: string): HTMLElement {
|
||||
const btn = screen.getByRole('button', { name: new RegExp(title, 'i') });
|
||||
const btn = screen.getByRole('button', { name: title });
|
||||
const container = btn.closest('div');
|
||||
if (!container) throw new Error(`Missing header container for section ${title}`);
|
||||
return container;
|
||||
@@ -142,9 +142,9 @@ describe('Sidebar section summaries', () => {
|
||||
it('expands collapsed sections during search and restores collapse state after clearing search', async () => {
|
||||
const { opsChannel, aliceName } = renderSidebar();
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /Tools/i }));
|
||||
fireEvent.click(screen.getByRole('button', { name: /Channels/i }));
|
||||
fireEvent.click(screen.getByRole('button', { name: /Contacts/i }));
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Tools' }));
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Channels' }));
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Contacts' }));
|
||||
|
||||
expect(screen.queryByText('Packet Feed')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText(opsChannel.name)).not.toBeInTheDocument();
|
||||
@@ -169,9 +169,9 @@ describe('Sidebar section summaries', () => {
|
||||
it('persists collapsed section state across unmount and remount', () => {
|
||||
const { opsChannel, aliceName, unmount } = renderSidebar();
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /Tools/i }));
|
||||
fireEvent.click(screen.getByRole('button', { name: /Channels/i }));
|
||||
fireEvent.click(screen.getByRole('button', { name: /Contacts/i }));
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Tools' }));
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Channels' }));
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Contacts' }));
|
||||
|
||||
expect(screen.queryByText('Packet Feed')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText(opsChannel.name)).not.toBeInTheDocument();
|
||||
@@ -206,8 +206,7 @@ describe('Sidebar section summaries', () => {
|
||||
onToggleCracker={vi.fn()}
|
||||
onMarkAllRead={vi.fn()}
|
||||
favorites={[]}
|
||||
sortOrder="recent"
|
||||
onSortOrderChange={vi.fn()}
|
||||
legacySortOrder="recent"
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -253,4 +252,103 @@ describe('Sidebar section summaries', () => {
|
||||
const unread = within(aliceRow).getByText('3');
|
||||
expect(bell.compareDocumentPosition(unread) & Node.DOCUMENT_POSITION_FOLLOWING).toBeTruthy();
|
||||
});
|
||||
|
||||
it('sorts each section independently and persists per-section sort preferences', () => {
|
||||
const publicChannel = makeChannel('AA'.repeat(16), 'Public');
|
||||
const zebraChannel = makeChannel('BB'.repeat(16), '#zebra');
|
||||
const alphaChannel = makeChannel('CC'.repeat(16), '#alpha');
|
||||
const zed = makeContact('11'.repeat(32), 'Zed');
|
||||
const amy = makeContact('22'.repeat(32), 'Amy');
|
||||
const relayZulu = makeContact('33'.repeat(32), 'Zulu Relay', CONTACT_TYPE_REPEATER);
|
||||
const relayAlpha = makeContact('44'.repeat(32), 'Alpha Relay', CONTACT_TYPE_REPEATER);
|
||||
|
||||
const props = {
|
||||
contacts: [zed, amy, relayZulu, relayAlpha],
|
||||
channels: [publicChannel, zebraChannel, alphaChannel],
|
||||
activeConversation: null,
|
||||
onSelectConversation: vi.fn(),
|
||||
onNewMessage: vi.fn(),
|
||||
lastMessageTimes: {
|
||||
[getStateKey('channel', zebraChannel.key)]: 300,
|
||||
[getStateKey('channel', alphaChannel.key)]: 100,
|
||||
[getStateKey('contact', zed.public_key)]: 200,
|
||||
[getStateKey('contact', amy.public_key)]: 100,
|
||||
[getStateKey('contact', relayZulu.public_key)]: 300,
|
||||
[getStateKey('contact', relayAlpha.public_key)]: 100,
|
||||
},
|
||||
unreadCounts: {},
|
||||
mentions: {},
|
||||
showCracker: false,
|
||||
crackerRunning: false,
|
||||
onToggleCracker: vi.fn(),
|
||||
onMarkAllRead: vi.fn(),
|
||||
favorites: [],
|
||||
legacySortOrder: 'recent' as const,
|
||||
};
|
||||
|
||||
const getChannelsOrder = () => screen.getAllByText(/^#/).map((node) => node.textContent);
|
||||
const getContactsOrder = () =>
|
||||
screen
|
||||
.getAllByText(/^(Amy|Zed)$/)
|
||||
.map((node) => node.textContent)
|
||||
.filter((text): text is string => Boolean(text));
|
||||
const getRepeatersOrder = () =>
|
||||
screen
|
||||
.getAllByText(/Relay$/)
|
||||
.map((node) => node.textContent)
|
||||
.filter((text): text is string => Boolean(text));
|
||||
|
||||
const { unmount } = render(<Sidebar {...props} />);
|
||||
|
||||
expect(getChannelsOrder()).toEqual(['#zebra', '#alpha']);
|
||||
expect(getContactsOrder()).toEqual(['Zed', 'Amy']);
|
||||
expect(getRepeatersOrder()).toEqual(['Zulu Relay', 'Alpha Relay']);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Sort Channels alphabetically' }));
|
||||
|
||||
expect(getChannelsOrder()).toEqual(['#alpha', '#zebra']);
|
||||
expect(getContactsOrder()).toEqual(['Zed', 'Amy']);
|
||||
expect(getRepeatersOrder()).toEqual(['Zulu Relay', 'Alpha Relay']);
|
||||
|
||||
unmount();
|
||||
render(<Sidebar {...props} />);
|
||||
|
||||
expect(getChannelsOrder()).toEqual(['#alpha', '#zebra']);
|
||||
expect(getContactsOrder()).toEqual(['Zed', 'Amy']);
|
||||
expect(getRepeatersOrder()).toEqual(['Zulu Relay', 'Alpha Relay']);
|
||||
});
|
||||
|
||||
it('pins only the canonical Public channel to the top of channel sorting', () => {
|
||||
const publicChannel = makeChannel(PUBLIC_CHANNEL_KEY, 'Public');
|
||||
const fakePublic = makeChannel('DD'.repeat(16), 'Public');
|
||||
const alphaChannel = makeChannel('CC'.repeat(16), '#alpha');
|
||||
const onSelectConversation = vi.fn();
|
||||
|
||||
render(
|
||||
<Sidebar
|
||||
contacts={[]}
|
||||
channels={[fakePublic, alphaChannel, publicChannel]}
|
||||
activeConversation={null}
|
||||
onSelectConversation={onSelectConversation}
|
||||
onNewMessage={vi.fn()}
|
||||
lastMessageTimes={{}}
|
||||
unreadCounts={{}}
|
||||
mentions={{}}
|
||||
showCracker={false}
|
||||
crackerRunning={false}
|
||||
onToggleCracker={vi.fn()}
|
||||
onMarkAllRead={vi.fn()}
|
||||
favorites={[]}
|
||||
legacySortOrder="alpha"
|
||||
/>
|
||||
);
|
||||
|
||||
fireEvent.click(screen.getAllByText('Public')[0]);
|
||||
|
||||
expect(onSelectConversation).toHaveBeenCalledWith({
|
||||
type: 'channel',
|
||||
id: PUBLIC_CHANNEL_KEY,
|
||||
name: 'Public',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,11 +8,14 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import {
|
||||
parseHashConversation,
|
||||
parseHashSettingsSection,
|
||||
getSettingsHash,
|
||||
getMapFocusHash,
|
||||
resolveChannelFromHashToken,
|
||||
resolveContactFromHashToken,
|
||||
} from '../utils/urlHash';
|
||||
import type { Channel, Contact } from '../types';
|
||||
import { PUBLIC_CHANNEL_KEY } from '../utils/publicChannel';
|
||||
|
||||
describe('parseHashConversation', () => {
|
||||
let originalHash: string;
|
||||
@@ -146,10 +149,38 @@ describe('parseHashConversation', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('settings URL hashes', () => {
|
||||
let originalHash: string;
|
||||
|
||||
beforeEach(() => {
|
||||
originalHash = window.location.hash;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
window.location.hash = originalHash;
|
||||
});
|
||||
|
||||
it('parses a valid settings section hash', () => {
|
||||
window.location.hash = '#settings/database';
|
||||
|
||||
expect(parseHashSettingsSection()).toBe('database');
|
||||
});
|
||||
|
||||
it('returns null for an invalid settings section hash', () => {
|
||||
window.location.hash = '#settings/not-a-section';
|
||||
|
||||
expect(parseHashSettingsSection()).toBeNull();
|
||||
});
|
||||
|
||||
it('builds a stable settings hash', () => {
|
||||
expect(getSettingsHash('local')).toBe('#settings/local');
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveChannelFromHashToken', () => {
|
||||
const channels: Channel[] = [
|
||||
{
|
||||
key: 'ABCDEF0123456789ABCDEF0123456789',
|
||||
key: PUBLIC_CHANNEL_KEY,
|
||||
name: 'Public',
|
||||
is_hashtag: false,
|
||||
on_radio: true,
|
||||
@@ -172,13 +203,13 @@ describe('resolveChannelFromHashToken', () => {
|
||||
];
|
||||
|
||||
it('prefers stable key lookup (case-insensitive)', () => {
|
||||
const result = resolveChannelFromHashToken('abcdef0123456789abcdef0123456789', channels);
|
||||
expect(result?.key).toBe('ABCDEF0123456789ABCDEF0123456789');
|
||||
const result = resolveChannelFromHashToken(PUBLIC_CHANNEL_KEY.toLowerCase(), channels);
|
||||
expect(result?.key).toBe(PUBLIC_CHANNEL_KEY);
|
||||
});
|
||||
|
||||
it('supports legacy name-based hash lookup', () => {
|
||||
it('resolves legacy Public hashes to the canonical Public key', () => {
|
||||
const result = resolveChannelFromHashToken('Public', channels);
|
||||
expect(result?.key).toBe('ABCDEF0123456789ABCDEF0123456789');
|
||||
expect(result?.key).toBe(PUBLIC_CHANNEL_KEY);
|
||||
});
|
||||
|
||||
it('supports legacy hashtag hash without leading #', () => {
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { act, renderHook, waitFor } from '@testing-library/react';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { useAppShell } from '../hooks/useAppShell';
|
||||
|
||||
describe('useAppShell', () => {
|
||||
let originalHash: string;
|
||||
|
||||
beforeEach(() => {
|
||||
originalHash = window.location.hash;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
window.location.hash = originalHash;
|
||||
});
|
||||
|
||||
it('opens new-message modal and closes the sidebar', () => {
|
||||
const { result } = renderHook(() => useAppShell());
|
||||
|
||||
@@ -34,6 +44,55 @@ describe('useAppShell', () => {
|
||||
expect(result.current.showSettings).toBe(false);
|
||||
});
|
||||
|
||||
it('initializes settings mode from the URL hash', () => {
|
||||
window.location.hash = '#settings/database';
|
||||
|
||||
const { result } = renderHook(() => useAppShell());
|
||||
|
||||
expect(result.current.showSettings).toBe(true);
|
||||
expect(result.current.settingsSection).toBe('database');
|
||||
});
|
||||
|
||||
it('syncs the selected settings section into the URL hash', async () => {
|
||||
const { result } = renderHook(() => useAppShell());
|
||||
|
||||
act(() => {
|
||||
result.current.handleToggleSettingsView();
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(window.location.hash).toBe('#settings/radio');
|
||||
});
|
||||
|
||||
act(() => {
|
||||
result.current.setSettingsSection('fanout');
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(window.location.hash).toBe('#settings/fanout');
|
||||
});
|
||||
});
|
||||
|
||||
it('restores the previous hash when settings close', async () => {
|
||||
window.location.hash = '#channel/test/Public';
|
||||
|
||||
const { result } = renderHook(() => useAppShell());
|
||||
|
||||
act(() => {
|
||||
result.current.handleToggleSettingsView();
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(window.location.hash).toBe('#settings/radio');
|
||||
});
|
||||
|
||||
act(() => {
|
||||
result.current.handleCloseSettingsView();
|
||||
});
|
||||
|
||||
expect(window.location.hash).toBe('#channel/test/Public');
|
||||
});
|
||||
|
||||
it('toggles the cracker shell without affecting sidebar state', () => {
|
||||
const { result } = renderHook(() => useAppShell());
|
||||
|
||||
|
||||
@@ -35,11 +35,6 @@ vi.mock('../components/ui/sonner', () => ({
|
||||
toast: { success: vi.fn(), error: vi.fn() },
|
||||
}));
|
||||
|
||||
// Mock messageCache
|
||||
vi.mock('../messageCache', () => ({
|
||||
remove: vi.fn(),
|
||||
}));
|
||||
|
||||
function makeContact(suffix: string): Contact {
|
||||
const key = suffix.padStart(64, '0');
|
||||
return {
|
||||
@@ -69,6 +64,7 @@ function makeContacts(count: number, startIndex = 0): Contact[] {
|
||||
|
||||
describe('useContactsAndChannels', () => {
|
||||
const setActiveConversation = vi.fn();
|
||||
const removeConversationMessages = vi.fn();
|
||||
const pendingDeleteFallbackRef = { current: false };
|
||||
const hasSetDefaultConversation = { current: false };
|
||||
|
||||
@@ -88,6 +84,7 @@ describe('useContactsAndChannels', () => {
|
||||
setActiveConversation,
|
||||
pendingDeleteFallbackRef,
|
||||
hasSetDefaultConversation,
|
||||
removeConversationMessages,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -63,7 +63,7 @@ function createArgs(overrides: Partial<Parameters<typeof useConversationActions>
|
||||
activeConversationRef: { current: activeConversation },
|
||||
setContacts: vi.fn(),
|
||||
setChannels: vi.fn(),
|
||||
addMessageIfNew: vi.fn(() => true),
|
||||
observeMessage: vi.fn(() => ({ added: true, activeConversation: true })),
|
||||
messageInputRef: { current: { appendText: vi.fn() } },
|
||||
...overrides,
|
||||
};
|
||||
@@ -85,7 +85,7 @@ describe('useConversationActions', () => {
|
||||
});
|
||||
|
||||
expect(mocks.api.sendChannelMessage).toHaveBeenCalledWith(publicChannel.key, sentMessage.text);
|
||||
expect(args.addMessageIfNew).toHaveBeenCalledWith(sentMessage);
|
||||
expect(args.observeMessage).toHaveBeenCalledWith(sentMessage);
|
||||
});
|
||||
|
||||
it('does not append a sent message after the active conversation changes', async () => {
|
||||
@@ -111,7 +111,7 @@ describe('useConversationActions', () => {
|
||||
await sendPromise;
|
||||
});
|
||||
|
||||
expect(args.addMessageIfNew).not.toHaveBeenCalled();
|
||||
expect(args.observeMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('appends sender mentions into the message input', () => {
|
||||
@@ -146,7 +146,7 @@ describe('useConversationActions', () => {
|
||||
});
|
||||
|
||||
expect(mocks.api.resendChannelMessage).toHaveBeenCalledWith(sentMessage.id, true);
|
||||
expect(args.addMessageIfNew).toHaveBeenCalledWith(resentMessage);
|
||||
expect(args.observeMessage).toHaveBeenCalledWith(resentMessage);
|
||||
});
|
||||
|
||||
it('does not append a byte-perfect resend locally', async () => {
|
||||
@@ -162,7 +162,7 @@ describe('useConversationActions', () => {
|
||||
await result.current.handleResendChannelMessage(sentMessage.id, false);
|
||||
});
|
||||
|
||||
expect(args.addMessageIfNew).not.toHaveBeenCalled();
|
||||
expect(args.observeMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not append a resend if the user has switched conversations', async () => {
|
||||
@@ -190,7 +190,7 @@ describe('useConversationActions', () => {
|
||||
await resendPromise;
|
||||
});
|
||||
|
||||
expect(args.addMessageIfNew).not.toHaveBeenCalled();
|
||||
expect(args.observeMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('merges returned contact data after path discovery', async () => {
|
||||
|
||||
@@ -1,21 +1,31 @@
|
||||
import { act, renderHook, waitFor } from '@testing-library/react';
|
||||
import { beforeEach, describe, expect, it, vi, type Mock } from 'vitest';
|
||||
|
||||
import * as messageCache from '../messageCache';
|
||||
import { useConversationMessages } from '../hooks/useConversationMessages';
|
||||
import { api } from '../api';
|
||||
import {
|
||||
conversationMessageCache,
|
||||
useConversationMessages,
|
||||
} from '../hooks/useConversationMessages';
|
||||
import type { Conversation, Message } from '../types';
|
||||
|
||||
const mockGetMessages = vi.fn<(...args: unknown[]) => Promise<Message[]>>();
|
||||
const mockGetMessages = vi.fn<typeof api.getMessages>();
|
||||
const mockGetMessagesAround = vi.fn();
|
||||
|
||||
vi.mock('../api', () => ({
|
||||
api: {
|
||||
getMessages: (...args: unknown[]) => mockGetMessages(...args),
|
||||
getMessages: (...args: Parameters<typeof api.getMessages>) => mockGetMessages(...args),
|
||||
getMessagesAround: (...args: unknown[]) => mockGetMessagesAround(...args),
|
||||
},
|
||||
isAbortError: (err: unknown) => err instanceof DOMException && err.name === 'AbortError',
|
||||
}));
|
||||
|
||||
const mockToastError = vi.fn();
|
||||
vi.mock('../components/ui/sonner', () => ({
|
||||
toast: {
|
||||
error: (...args: unknown[]) => mockToastError(...args),
|
||||
},
|
||||
}));
|
||||
|
||||
function createConversation(): Conversation {
|
||||
return {
|
||||
type: 'contact',
|
||||
@@ -54,7 +64,8 @@ function createDeferred<T>() {
|
||||
describe('useConversationMessages ACK ordering', () => {
|
||||
beforeEach(() => {
|
||||
mockGetMessages.mockReset();
|
||||
messageCache.clear();
|
||||
conversationMessageCache.clear();
|
||||
mockToastError.mockReset();
|
||||
});
|
||||
|
||||
it('applies buffered ACK when message is added after ACK event', async () => {
|
||||
@@ -67,11 +78,11 @@ describe('useConversationMessages ACK ordering', () => {
|
||||
|
||||
const paths = [{ path: 'A1B2', received_at: 1700000010 }];
|
||||
act(() => {
|
||||
result.current.updateMessageAck(42, 2, paths);
|
||||
result.current.receiveMessageAck(42, 2, paths);
|
||||
});
|
||||
|
||||
act(() => {
|
||||
const added = result.current.addMessageIfNew(
|
||||
const { added } = result.current.observeMessage(
|
||||
createMessage({ id: 42, acked: 0, paths: null })
|
||||
);
|
||||
expect(added).toBe(true);
|
||||
@@ -91,7 +102,7 @@ describe('useConversationMessages ACK ordering', () => {
|
||||
|
||||
const paths = [{ path: 'C3D4', received_at: 1700000011 }];
|
||||
act(() => {
|
||||
result.current.updateMessageAck(42, 1, paths);
|
||||
result.current.receiveMessageAck(42, 1, paths);
|
||||
});
|
||||
|
||||
deferred.resolve([createMessage({ id: 42, acked: 0, paths: null })]);
|
||||
@@ -109,7 +120,7 @@ describe('useConversationMessages ACK ordering', () => {
|
||||
await waitFor(() => expect(mockGetMessages).toHaveBeenCalledTimes(1));
|
||||
|
||||
act(() => {
|
||||
const added = result.current.addMessageIfNew(
|
||||
const { added } = result.current.observeMessage(
|
||||
createMessage({
|
||||
id: 99,
|
||||
text: 'ws-arrived',
|
||||
@@ -144,7 +155,7 @@ describe('useConversationMessages ACK ordering', () => {
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
|
||||
act(() => {
|
||||
result.current.addMessageIfNew(createMessage({ id: 42, acked: 0, paths: null }));
|
||||
result.current.observeMessage(createMessage({ id: 42, acked: 0, paths: null }));
|
||||
});
|
||||
|
||||
const highAckPaths = [
|
||||
@@ -154,8 +165,8 @@ describe('useConversationMessages ACK ordering', () => {
|
||||
const staleAckPaths = [{ path: 'A1B2', received_at: 1700000010 }];
|
||||
|
||||
act(() => {
|
||||
result.current.updateMessageAck(42, 3, highAckPaths);
|
||||
result.current.updateMessageAck(42, 2, staleAckPaths);
|
||||
result.current.receiveMessageAck(42, 3, highAckPaths);
|
||||
result.current.receiveMessageAck(42, 2, staleAckPaths);
|
||||
});
|
||||
|
||||
expect(result.current.messages[0].acked).toBe(3);
|
||||
@@ -166,7 +177,7 @@ describe('useConversationMessages ACK ordering', () => {
|
||||
describe('useConversationMessages conversation switch', () => {
|
||||
beforeEach(() => {
|
||||
mockGetMessages.mockReset();
|
||||
messageCache.clear();
|
||||
conversationMessageCache.clear();
|
||||
});
|
||||
|
||||
it('resets loadingOlder when switching conversations mid-fetch', async () => {
|
||||
@@ -291,7 +302,7 @@ describe('useConversationMessages conversation switch', () => {
|
||||
describe('useConversationMessages background reconcile ordering', () => {
|
||||
beforeEach(() => {
|
||||
mockGetMessages.mockReset();
|
||||
messageCache.clear();
|
||||
conversationMessageCache.clear();
|
||||
});
|
||||
|
||||
it('ignores stale reconnect reconcile responses that finish after newer ones', async () => {
|
||||
@@ -312,8 +323,8 @@ describe('useConversationMessages background reconcile ordering', () => {
|
||||
.mockReturnValueOnce(secondReconcile.promise);
|
||||
|
||||
act(() => {
|
||||
result.current.triggerReconcile();
|
||||
result.current.triggerReconcile();
|
||||
result.current.reconcileOnReconnect();
|
||||
result.current.reconcileOnReconnect();
|
||||
});
|
||||
|
||||
secondReconcile.resolve([createMessage({ id: 42, text: 'newer snapshot', acked: 2 })]);
|
||||
@@ -333,11 +344,8 @@ describe('useConversationMessages background reconcile ordering', () => {
|
||||
const conv = createConversation();
|
||||
const cachedMessage = createMessage({ id: 42, text: 'cached snapshot' });
|
||||
|
||||
messageCache.set(conv.id, {
|
||||
conversationMessageCache.set(conv.id, {
|
||||
messages: [cachedMessage],
|
||||
seenContent: new Set([
|
||||
`PRIV-${cachedMessage.conversation_key}-${cachedMessage.text}-${cachedMessage.sender_timestamp}`,
|
||||
]),
|
||||
hasOlderMessages: true,
|
||||
});
|
||||
|
||||
@@ -356,7 +364,7 @@ describe('useConversationMessages background reconcile ordering', () => {
|
||||
describe('useConversationMessages older-page dedup and reentry', () => {
|
||||
beforeEach(() => {
|
||||
mockGetMessages.mockReset();
|
||||
messageCache.clear();
|
||||
conversationMessageCache.clear();
|
||||
});
|
||||
|
||||
it('prevents duplicate overlapping older-page fetches in the same tick', async () => {
|
||||
@@ -447,13 +455,63 @@ describe('useConversationMessages older-page dedup and reentry', () => {
|
||||
expect(result.current.messages.filter((msg) => msg.id === 0)).toHaveLength(1);
|
||||
expect(result.current.messages).toHaveLength(201);
|
||||
});
|
||||
|
||||
it('aborts stale older-page requests on conversation switch without toasting', async () => {
|
||||
const convA: Conversation = { type: 'contact', id: 'conv_a', name: 'Contact A' };
|
||||
const convB: Conversation = { type: 'contact', id: 'conv_b', name: 'Contact B' };
|
||||
|
||||
const fullPage = Array.from({ length: 200 }, (_, i) =>
|
||||
createMessage({
|
||||
id: i + 1,
|
||||
conversation_key: 'conv_a',
|
||||
text: `msg-${i + 1}`,
|
||||
sender_timestamp: 1700000000 + i,
|
||||
received_at: 1700000000 + i,
|
||||
})
|
||||
);
|
||||
mockGetMessages.mockResolvedValueOnce(fullPage);
|
||||
|
||||
const olderDeferred = createDeferred<Message[]>();
|
||||
let olderSignal: AbortSignal | undefined;
|
||||
mockGetMessages.mockImplementationOnce((_, signal?: AbortSignal) => {
|
||||
olderSignal = signal;
|
||||
signal?.addEventListener('abort', () => {
|
||||
olderDeferred.resolve([]);
|
||||
});
|
||||
return new Promise<Message[]>((_, reject) => {
|
||||
signal?.addEventListener('abort', () => {
|
||||
reject(new DOMException('The operation was aborted', 'AbortError'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ conv }: { conv: Conversation }) => useConversationMessages(conv),
|
||||
{ initialProps: { conv: convA } }
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
act(() => {
|
||||
void result.current.fetchOlderMessages();
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.loadingOlder).toBe(true));
|
||||
|
||||
mockGetMessages.mockResolvedValueOnce([createMessage({ id: 999, conversation_key: 'conv_b' })]);
|
||||
rerender({ conv: convB });
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
expect(olderSignal?.aborted).toBe(true);
|
||||
expect(mockToastError).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('useConversationMessages forward pagination', () => {
|
||||
beforeEach(() => {
|
||||
mockGetMessages.mockReset();
|
||||
mockGetMessagesAround.mockReset();
|
||||
messageCache.clear();
|
||||
conversationMessageCache.clear();
|
||||
mockToastError.mockReset();
|
||||
});
|
||||
|
||||
it('fetchNewerMessages loads newer messages and appends them', async () => {
|
||||
@@ -537,7 +595,7 @@ describe('useConversationMessages forward pagination', () => {
|
||||
|
||||
// Simulate WS adding a message with the same content key
|
||||
act(() => {
|
||||
result.current.addMessageIfNew(
|
||||
result.current.observeMessage(
|
||||
createMessage({
|
||||
id: 2,
|
||||
conversation_key: 'ch1',
|
||||
@@ -568,6 +626,70 @@ describe('useConversationMessages forward pagination', () => {
|
||||
expect(dupes).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('defers reconnect reconcile until forward pagination reaches the live tail', async () => {
|
||||
const conv: Conversation = { type: 'channel', id: 'ch1', name: 'Channel' };
|
||||
|
||||
mockGetMessagesAround.mockResolvedValueOnce({
|
||||
messages: [
|
||||
createMessage({
|
||||
id: 1,
|
||||
conversation_key: 'ch1',
|
||||
text: 'older-context',
|
||||
sender_timestamp: 1700000000,
|
||||
received_at: 1700000000,
|
||||
}),
|
||||
],
|
||||
has_older: false,
|
||||
has_newer: true,
|
||||
});
|
||||
|
||||
const { result } = renderHook(
|
||||
({ conv, target }: { conv: Conversation; target: number | null }) =>
|
||||
useConversationMessages(conv, target),
|
||||
{ initialProps: { conv, target: 1 } }
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
expect(result.current.hasNewerMessages).toBe(true);
|
||||
|
||||
act(() => {
|
||||
result.current.reconcileOnReconnect();
|
||||
});
|
||||
|
||||
expect(mockGetMessages).not.toHaveBeenCalled();
|
||||
|
||||
mockGetMessages
|
||||
.mockResolvedValueOnce([
|
||||
createMessage({
|
||||
id: 2,
|
||||
conversation_key: 'ch1',
|
||||
text: 'newer-page',
|
||||
sender_timestamp: 1700000001,
|
||||
received_at: 1700000001,
|
||||
}),
|
||||
])
|
||||
.mockResolvedValueOnce([
|
||||
createMessage({
|
||||
id: 2,
|
||||
conversation_key: 'ch1',
|
||||
text: 'newer-page',
|
||||
sender_timestamp: 1700000001,
|
||||
received_at: 1700000001,
|
||||
acked: 3,
|
||||
}),
|
||||
]);
|
||||
|
||||
await act(async () => {
|
||||
await result.current.fetchNewerMessages();
|
||||
});
|
||||
|
||||
await waitFor(() => expect(mockGetMessages).toHaveBeenCalledTimes(2));
|
||||
await waitFor(() =>
|
||||
expect(result.current.messages.find((message) => message.id === 2)?.acked).toBe(3)
|
||||
);
|
||||
expect(result.current.hasNewerMessages).toBe(false);
|
||||
});
|
||||
|
||||
it('jumpToBottom clears hasNewerMessages and refetches latest', async () => {
|
||||
const conv: Conversation = { type: 'channel', id: 'ch1', name: 'Channel' };
|
||||
|
||||
@@ -618,6 +740,118 @@ describe('useConversationMessages forward pagination', () => {
|
||||
expect(result.current.messages[0].text).toBe('latest-msg');
|
||||
});
|
||||
|
||||
it('jumpToBottom clears deferred reconnect reconcile without an extra reconcile fetch', async () => {
|
||||
const conv: Conversation = { type: 'channel', id: 'ch1', name: 'Channel' };
|
||||
|
||||
mockGetMessagesAround.mockResolvedValueOnce({
|
||||
messages: [
|
||||
createMessage({
|
||||
id: 5,
|
||||
conversation_key: 'ch1',
|
||||
text: 'around-msg',
|
||||
sender_timestamp: 1700000005,
|
||||
received_at: 1700000005,
|
||||
}),
|
||||
],
|
||||
has_older: true,
|
||||
has_newer: true,
|
||||
});
|
||||
|
||||
const { result } = renderHook(
|
||||
({ conv, target }: { conv: Conversation; target: number | null }) =>
|
||||
useConversationMessages(conv, target),
|
||||
{ initialProps: { conv, target: 5 } }
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
|
||||
act(() => {
|
||||
result.current.reconcileOnReconnect();
|
||||
});
|
||||
|
||||
mockGetMessages.mockResolvedValueOnce([
|
||||
createMessage({
|
||||
id: 10,
|
||||
conversation_key: 'ch1',
|
||||
text: 'latest-msg',
|
||||
sender_timestamp: 1700000010,
|
||||
received_at: 1700000010,
|
||||
}),
|
||||
]);
|
||||
|
||||
act(() => {
|
||||
result.current.jumpToBottom();
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
await waitFor(() => expect(mockGetMessages).toHaveBeenCalledTimes(1));
|
||||
expect(result.current.messages[0].text).toBe('latest-msg');
|
||||
expect(result.current.hasNewerMessages).toBe(false);
|
||||
});
|
||||
|
||||
it('aborts stale newer-page requests on conversation switch without toasting', async () => {
|
||||
const convA: Conversation = { type: 'channel', id: 'ch1', name: 'Channel A' };
|
||||
const convB: Conversation = { type: 'channel', id: 'ch2', name: 'Channel B' };
|
||||
|
||||
mockGetMessagesAround.mockResolvedValueOnce({
|
||||
messages: [
|
||||
createMessage({
|
||||
id: 1,
|
||||
type: 'CHAN',
|
||||
conversation_key: 'ch1',
|
||||
text: 'msg-0',
|
||||
sender_timestamp: 1700000000,
|
||||
received_at: 1700000000,
|
||||
}),
|
||||
],
|
||||
has_older: false,
|
||||
has_newer: true,
|
||||
});
|
||||
|
||||
let newerSignal: AbortSignal | undefined;
|
||||
mockGetMessages.mockImplementationOnce((_, signal?: AbortSignal) => {
|
||||
newerSignal = signal;
|
||||
return new Promise<Message[]>((_, reject) => {
|
||||
signal?.addEventListener('abort', () => {
|
||||
reject(new DOMException('The operation was aborted', 'AbortError'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const initialProps: { conv: Conversation; target: number | null } = {
|
||||
conv: convA,
|
||||
target: 1,
|
||||
};
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ conv, target }: { conv: Conversation; target: number | null }) =>
|
||||
useConversationMessages(conv, target),
|
||||
{ initialProps }
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
|
||||
act(() => {
|
||||
void result.current.fetchNewerMessages();
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.loadingNewer).toBe(true));
|
||||
|
||||
mockGetMessages.mockResolvedValueOnce([
|
||||
createMessage({
|
||||
id: 999,
|
||||
type: 'CHAN',
|
||||
conversation_key: 'ch2',
|
||||
text: 'conv-b',
|
||||
}),
|
||||
]);
|
||||
rerender({ conv: convB, target: null });
|
||||
|
||||
await waitFor(() => expect(result.current.messagesLoading).toBe(false));
|
||||
expect(newerSignal?.aborted).toBe(true);
|
||||
expect(mockToastError).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('preserves around-loaded messages when the jump target is cleared in the same conversation', async () => {
|
||||
const conv: Conversation = { type: 'channel', id: 'ch1', name: 'Channel' };
|
||||
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { getMessageContentKey, mergePendingAck } from '../hooks/useConversationMessages';
|
||||
import { mergePendingAck } from '../hooks/useConversationMessages';
|
||||
import { getMessageContentKey } from '../utils/messageIdentity';
|
||||
import type { Message } from '../types';
|
||||
|
||||
function createMessage(overrides: Partial<Message> = {}): Message {
|
||||
|
||||
@@ -12,12 +12,6 @@ const mocks = vi.hoisted(() => ({
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
messageCache: {
|
||||
addMessage: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
rename: vi.fn(),
|
||||
updateAck: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../api', () => ({
|
||||
@@ -28,8 +22,6 @@ vi.mock('../components/ui/sonner', () => ({
|
||||
toast: mocks.toast,
|
||||
}));
|
||||
|
||||
vi.mock('../messageCache', () => mocks.messageCache);
|
||||
|
||||
const publicChannel: Channel = {
|
||||
key: '8B3387E9C5CDEA6AC9E5EDBAA115CD72',
|
||||
name: 'Public',
|
||||
@@ -66,7 +58,7 @@ function createRealtimeArgs(overrides: Partial<Parameters<typeof useRealtimeAppS
|
||||
setHealth,
|
||||
fetchConfig: vi.fn(),
|
||||
setRawPackets,
|
||||
triggerReconcile: vi.fn(),
|
||||
reconcileOnReconnect: vi.fn(),
|
||||
refreshUnreads: vi.fn(async () => {}),
|
||||
setChannels,
|
||||
fetchAllContacts: vi.fn(async () => [] as Contact[]),
|
||||
@@ -74,15 +66,15 @@ function createRealtimeArgs(overrides: Partial<Parameters<typeof useRealtimeAppS
|
||||
blockedKeysRef: { current: [] as string[] },
|
||||
blockedNamesRef: { current: [] as string[] },
|
||||
activeConversationRef: { current: null as Conversation | null },
|
||||
hasNewerMessagesRef: { current: false },
|
||||
addMessageIfNew: vi.fn(),
|
||||
trackNewMessage: vi.fn(),
|
||||
incrementUnread: vi.fn(),
|
||||
observeMessage: vi.fn(() => ({ added: false, activeConversation: false })),
|
||||
recordMessageEvent: vi.fn(),
|
||||
renameConversationState: vi.fn(),
|
||||
checkMention: vi.fn(() => false),
|
||||
pendingDeleteFallbackRef: { current: false },
|
||||
setActiveConversation: vi.fn(),
|
||||
updateMessageAck: vi.fn(),
|
||||
renameConversationMessages: vi.fn(),
|
||||
removeConversationMessages: vi.fn(),
|
||||
receiveMessageAck: vi.fn(),
|
||||
notifyIncomingMessage: vi.fn(),
|
||||
...overrides,
|
||||
},
|
||||
@@ -133,7 +125,49 @@ describe('useRealtimeAppState', () => {
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(args.triggerReconcile).toHaveBeenCalledTimes(1);
|
||||
expect(args.reconcileOnReconnect).toHaveBeenCalledTimes(1);
|
||||
expect(args.refreshUnreads).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.api.getChannels).toHaveBeenCalledTimes(1);
|
||||
expect(args.fetchAllContacts).toHaveBeenCalledTimes(1);
|
||||
expect(fns.setRawPackets).toHaveBeenCalledWith([]);
|
||||
expect(fns.setChannels).toHaveBeenCalledWith([publicChannel]);
|
||||
expect(fns.setContacts).toHaveBeenCalledWith(contacts);
|
||||
});
|
||||
});
|
||||
|
||||
it('reconnect skips active-conversation reconcile while browsing mid-history', async () => {
|
||||
const contacts: Contact[] = [
|
||||
{
|
||||
public_key: 'bb'.repeat(32),
|
||||
name: 'Bob',
|
||||
type: 1,
|
||||
flags: 0,
|
||||
last_path: null,
|
||||
last_path_len: 0,
|
||||
out_path_hash_mode: 0,
|
||||
last_advert: null,
|
||||
lat: null,
|
||||
lon: null,
|
||||
last_seen: null,
|
||||
on_radio: false,
|
||||
last_contacted: null,
|
||||
last_read_at: null,
|
||||
first_seen: null,
|
||||
},
|
||||
];
|
||||
|
||||
const { args, fns } = createRealtimeArgs({
|
||||
fetchAllContacts: vi.fn(async () => contacts),
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useRealtimeAppState(args));
|
||||
|
||||
act(() => {
|
||||
result.current.onReconnect?.();
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(args.reconcileOnReconnect).toHaveBeenCalledTimes(1);
|
||||
expect(args.refreshUnreads).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.api.getChannels).toHaveBeenCalledTimes(1);
|
||||
expect(args.fetchAllContacts).toHaveBeenCalledTimes(1);
|
||||
@@ -144,9 +178,9 @@ describe('useRealtimeAppState', () => {
|
||||
});
|
||||
|
||||
it('tracks unread state for a new non-active incoming message', () => {
|
||||
mocks.messageCache.addMessage.mockReturnValue(true);
|
||||
const { args } = createRealtimeArgs({
|
||||
checkMention: vi.fn(() => true),
|
||||
observeMessage: vi.fn(() => ({ added: true, activeConversation: false })),
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useRealtimeAppState(args));
|
||||
@@ -155,17 +189,13 @@ describe('useRealtimeAppState', () => {
|
||||
result.current.onMessage?.(incomingDm);
|
||||
});
|
||||
|
||||
expect(args.addMessageIfNew).not.toHaveBeenCalled();
|
||||
expect(args.trackNewMessage).toHaveBeenCalledWith(incomingDm);
|
||||
expect(mocks.messageCache.addMessage).toHaveBeenCalledWith(
|
||||
incomingDm.conversation_key,
|
||||
incomingDm,
|
||||
expect.any(String)
|
||||
);
|
||||
expect(args.incrementUnread).toHaveBeenCalledWith(
|
||||
`contact-${incomingDm.conversation_key}`,
|
||||
true
|
||||
);
|
||||
expect(args.observeMessage).toHaveBeenCalledWith(incomingDm);
|
||||
expect(args.recordMessageEvent).toHaveBeenCalledWith({
|
||||
msg: incomingDm,
|
||||
activeConversation: false,
|
||||
isNewMessage: true,
|
||||
hasMention: true,
|
||||
});
|
||||
expect(args.notifyIncomingMessage).toHaveBeenCalledWith(incomingDm);
|
||||
});
|
||||
|
||||
@@ -190,7 +220,7 @@ describe('useRealtimeAppState', () => {
|
||||
});
|
||||
|
||||
expect(fns.setContacts).toHaveBeenCalledWith(expect.any(Function));
|
||||
expect(mocks.messageCache.remove).toHaveBeenCalledWith(incomingDm.conversation_key);
|
||||
expect(args.removeConversationMessages).toHaveBeenCalledWith(incomingDm.conversation_key);
|
||||
expect(args.setActiveConversation).toHaveBeenCalledWith(null);
|
||||
expect(pendingDeleteFallbackRef.current).toBe(true);
|
||||
});
|
||||
@@ -232,7 +262,7 @@ describe('useRealtimeAppState', () => {
|
||||
});
|
||||
|
||||
expect(fns.setContacts).toHaveBeenCalledWith(expect.any(Function));
|
||||
expect(mocks.messageCache.rename).toHaveBeenCalledWith(
|
||||
expect(args.renameConversationMessages).toHaveBeenCalledWith(
|
||||
previousPublicKey,
|
||||
resolvedContact.public_key
|
||||
);
|
||||
|
||||
@@ -35,6 +35,8 @@ vi.mock('../components/ui/sonner', () => ({
|
||||
// Get mock reference — cast to Record<string, Mock> for type-safe mock method access
|
||||
const { api: _rawApi } = await import('../api');
|
||||
const mockApi = _rawApi as unknown as Record<string, Mock>;
|
||||
const { toast } = await import('../components/ui/sonner');
|
||||
const mockToast = toast as unknown as Record<string, Mock>;
|
||||
|
||||
const REPEATER_KEY = 'aa'.repeat(32);
|
||||
|
||||
@@ -58,7 +60,11 @@ describe('useRepeaterDashboard', () => {
|
||||
});
|
||||
|
||||
it('login sets loggedIn on success', async () => {
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({ status: 'ok' });
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({
|
||||
status: 'ok',
|
||||
authenticated: true,
|
||||
message: null,
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useRepeaterDashboard(repeaterConversation));
|
||||
|
||||
@@ -72,7 +78,11 @@ describe('useRepeaterDashboard', () => {
|
||||
});
|
||||
|
||||
it('login sets error on failure', async () => {
|
||||
mockApi.repeaterLogin.mockRejectedValueOnce(new Error('Auth failed'));
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({
|
||||
status: 'error',
|
||||
authenticated: false,
|
||||
message: 'Auth failed',
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useRepeaterDashboard(repeaterConversation));
|
||||
|
||||
@@ -80,12 +90,19 @@ describe('useRepeaterDashboard', () => {
|
||||
await result.current.login('bad');
|
||||
});
|
||||
|
||||
expect(result.current.loggedIn).toBe(false);
|
||||
expect(result.current.loggedIn).toBe(true);
|
||||
expect(result.current.loginError).toBe('Auth failed');
|
||||
expect(mockToast.error).toHaveBeenCalledWith('Login not confirmed', {
|
||||
description: 'Auth failed',
|
||||
});
|
||||
});
|
||||
|
||||
it('loginAsGuest calls login with empty password', async () => {
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({ status: 'ok' });
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({
|
||||
status: 'ok',
|
||||
authenticated: true,
|
||||
message: null,
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useRepeaterDashboard(repeaterConversation));
|
||||
|
||||
@@ -97,6 +114,23 @@ describe('useRepeaterDashboard', () => {
|
||||
expect(result.current.loggedIn).toBe(true);
|
||||
});
|
||||
|
||||
it('login still opens dashboard when request rejects', async () => {
|
||||
mockApi.repeaterLogin.mockRejectedValueOnce(new Error('Network error'));
|
||||
|
||||
const { result } = renderHook(() => useRepeaterDashboard(repeaterConversation));
|
||||
|
||||
await act(async () => {
|
||||
await result.current.login('secret');
|
||||
});
|
||||
|
||||
expect(result.current.loggedIn).toBe(true);
|
||||
expect(result.current.loginError).toBe('Network error');
|
||||
expect(mockToast.error).toHaveBeenCalledWith('Login request failed', {
|
||||
description:
|
||||
'Network error. The dashboard is still available, but repeater operations may fail until a login succeeds.',
|
||||
});
|
||||
});
|
||||
|
||||
it('refreshPane stores data on success', async () => {
|
||||
const statusData = {
|
||||
battery_volts: 4.2,
|
||||
@@ -374,9 +408,29 @@ describe('useRepeaterDashboard', () => {
|
||||
expect(mockApi.repeaterNeighbors).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('refreshing neighbors skips node info prefetch when advert location already exists', async () => {
|
||||
mockApi.repeaterNeighbors.mockResolvedValueOnce({ neighbors: [] });
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useRepeaterDashboard(repeaterConversation, { hasAdvertLocation: true })
|
||||
);
|
||||
|
||||
await act(async () => {
|
||||
await result.current.refreshPane('neighbors');
|
||||
});
|
||||
|
||||
expect(mockApi.repeaterNodeInfo).not.toHaveBeenCalled();
|
||||
expect(mockApi.repeaterNeighbors).toHaveBeenCalledTimes(1);
|
||||
expect(result.current.paneData.neighbors).toEqual({ neighbors: [] });
|
||||
});
|
||||
|
||||
it('restores dashboard state when navigating away and back to the same repeater', async () => {
|
||||
const statusData = { battery_volts: 4.2 };
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({ status: 'ok' });
|
||||
mockApi.repeaterLogin.mockResolvedValueOnce({
|
||||
status: 'ok',
|
||||
authenticated: true,
|
||||
message: null,
|
||||
});
|
||||
mockApi.repeaterStatus.mockResolvedValueOnce(statusData);
|
||||
mockApi.sendRepeaterCommand.mockResolvedValueOnce({
|
||||
command: 'ver',
|
||||
|
||||
@@ -10,7 +10,8 @@ import { act, renderHook } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
import { useUnreadCounts } from '../hooks/useUnreadCounts';
|
||||
import type { Channel, Contact, Conversation } from '../types';
|
||||
import type { Channel, Contact, Conversation, Message } from '../types';
|
||||
import { getStateKey } from '../utils/conversationState';
|
||||
|
||||
// Mock api module
|
||||
vi.mock('../api', () => ({
|
||||
@@ -57,6 +58,25 @@ function makeContact(pubkey: string): Contact {
|
||||
};
|
||||
}
|
||||
|
||||
function makeMessage(overrides: Partial<Message> = {}): Message {
|
||||
return {
|
||||
id: 1,
|
||||
type: 'PRIV',
|
||||
conversation_key: CONTACT_KEY,
|
||||
text: 'hello',
|
||||
sender_timestamp: 1700000000,
|
||||
received_at: 1700000001,
|
||||
paths: null,
|
||||
txt_type: 0,
|
||||
signature: null,
|
||||
sender_key: null,
|
||||
outgoing: false,
|
||||
acked: 0,
|
||||
sender_name: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
const CHANNEL_KEY = 'AABB00112233445566778899AABBCCDD';
|
||||
const CONTACT_KEY = '00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff';
|
||||
|
||||
@@ -332,4 +352,74 @@ describe('useUnreadCounts', () => {
|
||||
// Raw view doesn't filter any conversation's unreads
|
||||
expect(result.current.unreadCounts[`channel-${CHANNEL_KEY}`]).toBe(5);
|
||||
});
|
||||
|
||||
it('recordMessageEvent updates last-message time and unread count for new inactive incoming messages', async () => {
|
||||
const mocks = await getMockedApi();
|
||||
const { result } = renderWith({});
|
||||
|
||||
await act(async () => {
|
||||
await vi.waitFor(() => expect(mocks.getUnreads).toHaveBeenCalled());
|
||||
});
|
||||
|
||||
const msg = makeMessage({
|
||||
id: 5,
|
||||
type: 'CHAN',
|
||||
conversation_key: CHANNEL_KEY,
|
||||
received_at: 1700001234,
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
result.current.recordMessageEvent({
|
||||
msg,
|
||||
activeConversation: false,
|
||||
isNewMessage: true,
|
||||
hasMention: true,
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.current.unreadCounts[getStateKey('channel', CHANNEL_KEY)]).toBe(1);
|
||||
expect(result.current.mentions[getStateKey('channel', CHANNEL_KEY)]).toBe(true);
|
||||
expect(result.current.lastMessageTimes[getStateKey('channel', CHANNEL_KEY)]).toBe(1700001234);
|
||||
});
|
||||
|
||||
it('recordMessageEvent skips unread increment for active or non-new messages but still tracks time', async () => {
|
||||
const mocks = await getMockedApi();
|
||||
const { result } = renderWith({});
|
||||
|
||||
await act(async () => {
|
||||
await vi.waitFor(() => expect(mocks.getUnreads).toHaveBeenCalled());
|
||||
});
|
||||
|
||||
const activeMsg = makeMessage({
|
||||
id: 6,
|
||||
type: 'PRIV',
|
||||
conversation_key: CONTACT_KEY,
|
||||
received_at: 1700002000,
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
result.current.recordMessageEvent({
|
||||
msg: activeMsg,
|
||||
activeConversation: true,
|
||||
isNewMessage: true,
|
||||
hasMention: true,
|
||||
});
|
||||
result.current.recordMessageEvent({
|
||||
msg: makeMessage({
|
||||
id: 7,
|
||||
type: 'CHAN',
|
||||
conversation_key: CHANNEL_KEY,
|
||||
received_at: 1700002001,
|
||||
}),
|
||||
activeConversation: false,
|
||||
isNewMessage: false,
|
||||
hasMention: true,
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.current.unreadCounts[getStateKey('contact', CONTACT_KEY)]).toBeUndefined();
|
||||
expect(result.current.unreadCounts[getStateKey('channel', CHANNEL_KEY)]).toBeUndefined();
|
||||
expect(result.current.lastMessageTimes[getStateKey('contact', CONTACT_KEY)]).toBe(1700002000);
|
||||
expect(result.current.lastMessageTimes[getStateKey('channel', CHANNEL_KEY)]).toBe(1700002001);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -57,6 +57,13 @@ export interface HealthStatus {
|
||||
radio_initializing: boolean;
|
||||
radio_state?: 'connected' | 'initializing' | 'connecting' | 'disconnected' | 'paused';
|
||||
connection_info: string | null;
|
||||
radio_device_info?: {
|
||||
model: string | null;
|
||||
firmware_build: string | null;
|
||||
firmware_version: string | null;
|
||||
max_contacts: number | null;
|
||||
max_channels: number | null;
|
||||
} | null;
|
||||
database_size_mb: number;
|
||||
oldest_undecrypted_timestamp: number | null;
|
||||
fanout_statuses: Record<string, FanoutStatusEntry>;
|
||||
@@ -349,6 +356,8 @@ export interface CommandResponse {
|
||||
|
||||
export interface RepeaterLoginResponse {
|
||||
status: string;
|
||||
authenticated: boolean;
|
||||
message: string | null;
|
||||
}
|
||||
|
||||
export interface RepeaterStatusResponse {
|
||||
|
||||
@@ -11,9 +11,12 @@
|
||||
|
||||
const LAST_MESSAGE_KEY = 'remoteterm-lastMessageTime';
|
||||
const SORT_ORDER_KEY = 'remoteterm-sortOrder';
|
||||
const SIDEBAR_SECTION_SORT_ORDERS_KEY = 'remoteterm-sidebar-section-sort-orders';
|
||||
|
||||
export type ConversationTimes = Record<string, number>;
|
||||
export type SortOrder = 'recent' | 'alpha';
|
||||
export type SidebarSortableSection = 'channels' | 'contacts' | 'repeaters';
|
||||
export type SidebarSectionSortOrders = Record<SidebarSortableSection, SortOrder>;
|
||||
|
||||
// In-memory cache of last message times (loaded from server on init)
|
||||
let lastMessageTimesCache: ConversationTimes = {};
|
||||
@@ -93,6 +96,56 @@ export function loadLocalStorageSortOrder(): SortOrder {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the legacy single sidebar sort order from localStorage, if present.
|
||||
*/
|
||||
export function loadLegacyLocalStorageSortOrder(): SortOrder | null {
|
||||
try {
|
||||
const stored = localStorage.getItem(SORT_ORDER_KEY);
|
||||
if (!stored) return null;
|
||||
return stored === 'alpha' ? 'alpha' : 'recent';
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function buildSidebarSectionSortOrders(
|
||||
defaultOrder: SortOrder = 'recent'
|
||||
): SidebarSectionSortOrders {
|
||||
return {
|
||||
channels: defaultOrder,
|
||||
contacts: defaultOrder,
|
||||
repeaters: defaultOrder,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load per-section sidebar sort orders from localStorage.
|
||||
*/
|
||||
export function loadLocalStorageSidebarSectionSortOrders(): SidebarSectionSortOrders | null {
|
||||
try {
|
||||
const stored = localStorage.getItem(SIDEBAR_SECTION_SORT_ORDERS_KEY);
|
||||
if (!stored) return null;
|
||||
|
||||
const parsed = JSON.parse(stored) as Partial<SidebarSectionSortOrders>;
|
||||
return {
|
||||
channels: parsed.channels === 'alpha' ? 'alpha' : 'recent',
|
||||
contacts: parsed.contacts === 'alpha' ? 'alpha' : 'recent',
|
||||
repeaters: parsed.repeaters === 'alpha' ? 'alpha' : 'recent',
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function saveLocalStorageSidebarSectionSortOrders(orders: SidebarSectionSortOrders): void {
|
||||
try {
|
||||
localStorage.setItem(SIDEBAR_SECTION_SORT_ORDERS_KEY, JSON.stringify(orders));
|
||||
} catch {
|
||||
// localStorage might be disabled
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear conversation state from localStorage (after migration)
|
||||
*/
|
||||
|
||||
10
frontend/src/utils/messageIdentity.ts
Normal file
10
frontend/src/utils/messageIdentity.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { Message } from '../types';
|
||||
|
||||
// Content identity matches the frontend's message-level dedup contract.
|
||||
export function getMessageContentKey(msg: Message): string {
|
||||
// When sender_timestamp exists, dedup by content (catches radio-path duplicates with different IDs).
|
||||
// When null, include msg.id so each message gets a unique key — avoids silently dropping
|
||||
// different messages that share the same text and received_at second.
|
||||
const ts = msg.sender_timestamp ?? `r${msg.received_at}-${msg.id}`;
|
||||
return `${msg.type}-${msg.conversation_key}-${msg.text}-${ts}`;
|
||||
}
|
||||
@@ -273,6 +273,9 @@ export function isValidLocation(lat: number | null, lon: number | null): boolean
|
||||
if (lat === null || lon === null) {
|
||||
return false;
|
||||
}
|
||||
if (lat < -90 || lat > 90 || lon < -180 || lon > 180) {
|
||||
return false;
|
||||
}
|
||||
// (0, 0) is in the Atlantic Ocean - treat as unset
|
||||
if (lat === 0 && lon === 0) {
|
||||
return false;
|
||||
|
||||
12
frontend/src/utils/publicChannel.ts
Normal file
12
frontend/src/utils/publicChannel.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { Channel } from '../types';
|
||||
|
||||
export const PUBLIC_CHANNEL_KEY = '8B3387E9C5CDEA6AC9E5EDBAA115CD72';
|
||||
export const PUBLIC_CHANNEL_NAME = 'Public';
|
||||
|
||||
export function isPublicChannelKey(key: string): boolean {
|
||||
return key.toUpperCase() === PUBLIC_CHANNEL_KEY;
|
||||
}
|
||||
|
||||
export function findPublicChannel(channels: Channel[]): Channel | undefined {
|
||||
return channels.find((channel) => isPublicChannelKey(channel.key));
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
import type { Channel, Contact, Conversation } from '../types';
|
||||
import { findPublicChannel, PUBLIC_CHANNEL_NAME } from './publicChannel';
|
||||
import { getContactDisplayName } from './pubkey';
|
||||
import type { SettingsSection } from '../components/settings/settingsConstants';
|
||||
|
||||
interface ParsedHashConversation {
|
||||
type: 'channel' | 'contact' | 'raw' | 'map' | 'visualizer' | 'search';
|
||||
@@ -11,6 +13,15 @@ interface ParsedHashConversation {
|
||||
mapFocusKey?: string;
|
||||
}
|
||||
|
||||
const SETTINGS_SECTIONS: SettingsSection[] = [
|
||||
'radio',
|
||||
'local',
|
||||
'fanout',
|
||||
'database',
|
||||
'statistics',
|
||||
'about',
|
||||
];
|
||||
|
||||
// Parse URL hash to get conversation
|
||||
// (e.g., #channel/ABCDEF0123456789ABCDEF0123456789 or #contact/<64-char-pubkey>).
|
||||
export function parseHashConversation(): ParsedHashConversation | null {
|
||||
@@ -69,6 +80,20 @@ export function parseHashConversation(): ParsedHashConversation | null {
|
||||
};
|
||||
}
|
||||
|
||||
export function parseHashSettingsSection(): SettingsSection | null {
|
||||
const hash = window.location.hash.slice(1);
|
||||
if (!hash.startsWith('settings/')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const section = decodeURIComponent(hash.slice('settings/'.length)) as SettingsSection;
|
||||
return SETTINGS_SECTIONS.includes(section) ? section : null;
|
||||
}
|
||||
|
||||
export function getSettingsHash(section: SettingsSection): string {
|
||||
return `#settings/${encodeURIComponent(section)}`;
|
||||
}
|
||||
|
||||
export function resolveChannelFromHashToken(token: string, channels: Channel[]): Channel | null {
|
||||
const normalizedToken = token.trim();
|
||||
if (!normalizedToken) return null;
|
||||
@@ -77,6 +102,13 @@ export function resolveChannelFromHashToken(token: string, channels: Channel[]):
|
||||
const byKey = channels.find((c) => c.key.toLowerCase() === normalizedToken.toLowerCase());
|
||||
if (byKey) return byKey;
|
||||
|
||||
// Legacy Public hashes should resolve to the canonical Public key, not any
|
||||
// arbitrary row that happens to share the display name.
|
||||
if (normalizedToken.toLowerCase() === PUBLIC_CHANNEL_NAME.toLowerCase()) {
|
||||
const publicChannel = findPublicChannel(channels);
|
||||
if (publicChannel) return publicChannel;
|
||||
}
|
||||
|
||||
// Backward compatibility for legacy name-based hashes.
|
||||
return (
|
||||
channels.find((c) => c.name === normalizedToken || c.name === `#${normalizedToken}`) || null
|
||||
@@ -133,3 +165,10 @@ export function updateUrlHash(conv: Conversation | null): void {
|
||||
window.history.replaceState(null, '', newHash || window.location.pathname);
|
||||
}
|
||||
}
|
||||
|
||||
export function updateSettingsHash(section: SettingsSection): void {
|
||||
const newHash = getSettingsHash(section);
|
||||
if (newHash !== window.location.hash) {
|
||||
window.history.replaceState(null, '', newHash);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "remoteterm-meshcore"
|
||||
version = "3.3.0"
|
||||
version = "3.4.1"
|
||||
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
[Unit]
|
||||
Description=RemoteTerm for MeshCore
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=remoteterm
|
||||
Group=remoteterm
|
||||
WorkingDirectory=/opt/remoteterm
|
||||
ExecStart=/opt/remoteterm/.venv/bin/uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
# Environment
|
||||
Environment=MESHCORE_DATABASE_PATH=/opt/remoteterm/data/meshcore.db
|
||||
# Uncomment and set if auto-detection doesn't work:
|
||||
# Environment=MESHCORE_SERIAL_PORT=/dev/ttyUSB0
|
||||
|
||||
# Give access to serial devices
|
||||
SupplementaryGroups=dialout
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -11,7 +11,7 @@ SCRIPT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
echo -e "${YELLOW}=== Extended Quality Checks ===${NC}"
|
||||
echo
|
||||
|
||||
echo -e "${BLUE}[all_quality]${NC} Running full lint, typecheck, unit tests, and builds..."
|
||||
echo -e "${BLUE}[all_quality]${NC} Running full lint, typecheck, unit tests, and the standard frontend build..."
|
||||
"$SCRIPT_DIR/scripts/all_quality.sh"
|
||||
echo -e "${GREEN}[all_quality]${NC} Passed!"
|
||||
echo
|
||||
|
||||
@@ -10,6 +10,20 @@ NC='\033[0m' # No Color
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
RELEASE_WORK_DIR=""
|
||||
RELEASE_BUNDLE_DIR_NAME="Remote-Terminal-for-MeshCore"
|
||||
|
||||
cleanup_release_build_artifacts() {
|
||||
if [ -d "$SCRIPT_DIR/frontend/prebuilt" ]; then
|
||||
rm -rf "$SCRIPT_DIR/frontend/prebuilt"
|
||||
fi
|
||||
if [ -n "$RELEASE_WORK_DIR" ] && [ -d "$RELEASE_WORK_DIR" ]; then
|
||||
rm -rf "$RELEASE_WORK_DIR"
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup_release_build_artifacts EXIT
|
||||
|
||||
echo -e "${YELLOW}=== RemoteTerm for MeshCore Publish Script ===${NC}"
|
||||
echo
|
||||
|
||||
@@ -157,6 +171,32 @@ echo
|
||||
# Get git hashes (after commit so they reflect the new commit)
|
||||
GIT_HASH=$(git rev-parse --short HEAD)
|
||||
FULL_GIT_HASH=$(git rev-parse HEAD)
|
||||
RELEASE_ASSET="remoteterm-prebuilt-frontend-v${VERSION}-${GIT_HASH}.zip"
|
||||
|
||||
echo -e "${YELLOW}Building packaged frontend artifact...${NC}"
|
||||
cd "$SCRIPT_DIR/frontend"
|
||||
npm run packaged-build
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
RELEASE_WORK_DIR=$(mktemp -d)
|
||||
RELEASE_BUNDLE_DIR="$RELEASE_WORK_DIR/$RELEASE_BUNDLE_DIR_NAME"
|
||||
mkdir -p "$RELEASE_BUNDLE_DIR"
|
||||
git archive "$FULL_GIT_HASH" | tar -x -C "$RELEASE_BUNDLE_DIR"
|
||||
mkdir -p "$RELEASE_BUNDLE_DIR/frontend"
|
||||
cp -R "$SCRIPT_DIR/frontend/prebuilt" "$RELEASE_BUNDLE_DIR/frontend/prebuilt"
|
||||
cat > "$RELEASE_BUNDLE_DIR/build_info.json" <<EOF
|
||||
{
|
||||
"commit_hash": "$FULL_GIT_HASH",
|
||||
"build_source": "prebuilt-release"
|
||||
}
|
||||
EOF
|
||||
rm -f "$SCRIPT_DIR/$RELEASE_ASSET"
|
||||
(
|
||||
cd "$RELEASE_WORK_DIR"
|
||||
zip -qr "$SCRIPT_DIR/$RELEASE_ASSET" "$(basename "$RELEASE_BUNDLE_DIR")"
|
||||
)
|
||||
echo -e "${GREEN}Packaged release artifact created: $RELEASE_ASSET${NC}"
|
||||
echo
|
||||
|
||||
# Build docker image
|
||||
echo -e "${YELLOW}Building Docker image...${NC}"
|
||||
@@ -200,6 +240,7 @@ else
|
||||
fi
|
||||
|
||||
gh release create "$VERSION" \
|
||||
"$RELEASE_ASSET" \
|
||||
--title "$VERSION" \
|
||||
--notes-file "$RELEASE_NOTES_FILE" \
|
||||
--verify-tag
|
||||
@@ -217,3 +258,5 @@ echo -e " - jkingsman/remoteterm-meshcore:$VERSION"
|
||||
echo -e " - jkingsman/remoteterm-meshcore:$GIT_HASH"
|
||||
echo -e "GitHub release:"
|
||||
echo -e " - $VERSION"
|
||||
echo -e "Release artifact:"
|
||||
echo -e " - $RELEASE_ASSET"
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { createChannel, deleteChannel, getChannels } from '../helpers/api';
|
||||
|
||||
const PUBLIC_CHANNEL_KEY = '8B3387E9C5CDEA6AC9E5EDBAA115CD72';
|
||||
|
||||
test.describe('Conversation deletion flow', () => {
|
||||
test.beforeAll(async () => {
|
||||
const channels = await getChannels();
|
||||
if (!channels.some((c) => c.name === 'Public')) {
|
||||
if (!channels.some((c) => c.key === PUBLIC_CHANNEL_KEY)) {
|
||||
await createChannel('Public');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -5,12 +5,14 @@ test.describe('Sidebar sort toggle', () => {
|
||||
await page.goto('/');
|
||||
await expect(page.getByRole('status', { name: 'Radio OK' })).toBeVisible();
|
||||
|
||||
// There are multiple sort toggles (Channels, Contacts, Repeaters sections).
|
||||
// Use .first() to target the Channels sort toggle.
|
||||
// When sort is 'alpha', button text is "A-Z" and title is "Sort by recent".
|
||||
// When sort is 'recent', button text is "⏱" and title is "Sort alphabetically".
|
||||
const sortByRecent = page.getByTitle('Sort by recent').first();
|
||||
const sortAlpha = page.getByTitle('Sort alphabetically').first();
|
||||
// Sidebar sort is now tracked per section, so target the Channels control
|
||||
// explicitly instead of assuming a shared global toggle.
|
||||
const sortByRecent = page.getByRole('button', {
|
||||
name: 'Sort Channels by recent',
|
||||
});
|
||||
const sortAlpha = page.getByRole('button', {
|
||||
name: 'Sort Channels alphabetically',
|
||||
});
|
||||
|
||||
// Wait for at least one sort button to appear
|
||||
await expect(sortByRecent.or(sortAlpha)).toBeVisible({ timeout: 10_000 });
|
||||
|
||||
@@ -5,6 +5,7 @@ Uses httpx.AsyncClient or direct function calls with real in-memory SQLite.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
@@ -78,6 +79,11 @@ class TestHealthEndpoint:
|
||||
with patch("app.routers.health.radio_manager") as mock_rm:
|
||||
mock_rm.is_connected = True
|
||||
mock_rm.connection_info = "Serial: /dev/ttyUSB0"
|
||||
mock_rm.is_setup_in_progress = False
|
||||
mock_rm.is_setup_complete = True
|
||||
mock_rm.connection_desired = True
|
||||
mock_rm.is_reconnecting = False
|
||||
mock_rm.device_info_loaded = False
|
||||
|
||||
from app.main import app
|
||||
|
||||
@@ -97,6 +103,11 @@ class TestHealthEndpoint:
|
||||
with patch("app.routers.health.radio_manager") as mock_rm:
|
||||
mock_rm.is_connected = False
|
||||
mock_rm.connection_info = None
|
||||
mock_rm.is_setup_in_progress = False
|
||||
mock_rm.is_setup_complete = False
|
||||
mock_rm.connection_desired = True
|
||||
mock_rm.is_reconnecting = False
|
||||
mock_rm.device_info_loaded = False
|
||||
|
||||
from app.main import app
|
||||
|
||||
@@ -119,7 +130,11 @@ class TestDebugEndpoint:
|
||||
from meshcore import EventType
|
||||
|
||||
from app.config import clear_recent_log_lines
|
||||
from app.routers.debug import DebugApplicationInfo
|
||||
from app.routers.debug import (
|
||||
LOG_COPY_BOUNDARY_LINE,
|
||||
LOG_COPY_BOUNDARY_MESSAGE,
|
||||
DebugApplicationInfo,
|
||||
)
|
||||
|
||||
clear_recent_log_lines()
|
||||
|
||||
@@ -195,6 +210,9 @@ class TestDebugEndpoint:
|
||||
assert payload["application"]["commit_hash"] == "deadbeef"
|
||||
assert payload["runtime"]["channel_slot_reuse_enabled"] is True
|
||||
assert payload["runtime"]["channels_with_incoming_messages"] == 1
|
||||
assert payload["logs"][:4] == [LOG_COPY_BOUNDARY_LINE] * 4
|
||||
assert payload["logs"][4] == LOG_COPY_BOUNDARY_MESSAGE
|
||||
assert payload["logs"][5:9] == [LOG_COPY_BOUNDARY_LINE] * 4
|
||||
assert any("support snapshot marker" in line for line in payload["logs"])
|
||||
|
||||
radio_probe = payload["radio_probe"]
|
||||
@@ -257,6 +275,53 @@ class TestRadioDisconnectedHandler:
|
||||
assert "not connected" in response.json()["detail"].lower()
|
||||
|
||||
|
||||
class TestDebugApplicationInfo:
|
||||
"""Test debug application metadata resolution."""
|
||||
|
||||
def test_build_application_info_uses_release_build_info_without_git(self, tmp_path):
|
||||
"""Release bundles should still surface commit metadata without a .git directory."""
|
||||
from app.routers import debug as debug_router
|
||||
|
||||
(tmp_path / "build_info.json").write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"commit_hash": "cf1a55e25828ee62fb077d6202b174f69f6e6340",
|
||||
"build_source": "prebuilt-release",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with (
|
||||
patch("app.routers.debug._repo_root", return_value=tmp_path),
|
||||
patch("app.routers.debug._get_app_version", return_value="3.4.0"),
|
||||
patch("app.routers.debug._git_output", return_value=None),
|
||||
):
|
||||
info = debug_router._build_application_info()
|
||||
|
||||
assert info.version == "3.4.0"
|
||||
assert info.commit_hash == "cf1a55e25828ee62fb077d6202b174f69f6e6340"
|
||||
assert info.git_branch is None
|
||||
assert info.git_dirty is False
|
||||
|
||||
def test_build_application_info_ignores_invalid_release_build_info(self, tmp_path):
|
||||
"""Malformed release metadata should not break the debug endpoint."""
|
||||
from app.routers import debug as debug_router
|
||||
|
||||
(tmp_path / "build_info.json").write_text("{not-json")
|
||||
|
||||
with (
|
||||
patch("app.routers.debug._repo_root", return_value=tmp_path),
|
||||
patch("app.routers.debug._get_app_version", return_value="3.4.0"),
|
||||
patch("app.routers.debug._git_output", return_value=None),
|
||||
):
|
||||
info = debug_router._build_application_info()
|
||||
|
||||
assert info.version == "3.4.0"
|
||||
assert info.commit_hash is None
|
||||
assert info.git_branch is None
|
||||
assert info.git_dirty is False
|
||||
|
||||
|
||||
class TestMessagesEndpoint:
|
||||
"""Test message-related endpoints."""
|
||||
|
||||
@@ -1118,6 +1183,11 @@ class TestHealthEndpointDatabaseSize:
|
||||
):
|
||||
mock_rm.is_connected = True
|
||||
mock_rm.connection_info = "Serial: /dev/ttyUSB0"
|
||||
mock_rm.is_setup_in_progress = False
|
||||
mock_rm.is_setup_complete = True
|
||||
mock_rm.connection_desired = True
|
||||
mock_rm.is_reconnecting = False
|
||||
mock_rm.device_info_loaded = False
|
||||
mock_getsize.return_value = 10 * 1024 * 1024 # 10 MB
|
||||
|
||||
from app.main import app
|
||||
@@ -1148,6 +1218,11 @@ class TestHealthEndpointOldestUndecrypted:
|
||||
):
|
||||
mock_rm.is_connected = True
|
||||
mock_rm.connection_info = "Serial: /dev/ttyUSB0"
|
||||
mock_rm.is_setup_in_progress = False
|
||||
mock_rm.is_setup_complete = True
|
||||
mock_rm.connection_desired = True
|
||||
mock_rm.is_reconnecting = False
|
||||
mock_rm.device_info_loaded = False
|
||||
mock_getsize.return_value = 5 * 1024 * 1024 # 5 MB
|
||||
mock_repo.get_oldest_undecrypted = AsyncMock(return_value=1700000000)
|
||||
|
||||
@@ -1175,6 +1250,11 @@ class TestHealthEndpointOldestUndecrypted:
|
||||
):
|
||||
mock_rm.is_connected = True
|
||||
mock_rm.connection_info = "Serial: /dev/ttyUSB0"
|
||||
mock_rm.is_setup_in_progress = False
|
||||
mock_rm.is_setup_complete = True
|
||||
mock_rm.connection_desired = True
|
||||
mock_rm.is_reconnecting = False
|
||||
mock_rm.device_info_loaded = False
|
||||
mock_getsize.return_value = 1 * 1024 * 1024 # 1 MB
|
||||
mock_repo.get_oldest_undecrypted = AsyncMock(return_value=None)
|
||||
|
||||
@@ -1202,6 +1282,11 @@ class TestHealthEndpointOldestUndecrypted:
|
||||
):
|
||||
mock_rm.is_connected = False
|
||||
mock_rm.connection_info = None
|
||||
mock_rm.is_setup_in_progress = False
|
||||
mock_rm.is_setup_complete = False
|
||||
mock_rm.connection_desired = True
|
||||
mock_rm.is_reconnecting = False
|
||||
mock_rm.device_info_loaded = False
|
||||
mock_getsize.side_effect = OSError("File not found")
|
||||
mock_repo.get_oldest_undecrypted = AsyncMock(side_effect=RuntimeError("No DB"))
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from app.channel_constants import PUBLIC_CHANNEL_KEY, PUBLIC_CHANNEL_NAME
|
||||
from app.repository import ChannelRepository, MessageRepository
|
||||
|
||||
|
||||
@@ -77,6 +78,55 @@ class TestCreateChannel:
|
||||
assert channel.flood_scope_override is None
|
||||
|
||||
|
||||
class TestPublicChannelProtection:
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_public_uses_canonical_key(self, test_db):
|
||||
from app.routers.channels import CreateChannelRequest, create_channel
|
||||
|
||||
result = await create_channel(CreateChannelRequest(name="Public"))
|
||||
|
||||
assert result.key == PUBLIC_CHANNEL_KEY
|
||||
assert result.name == PUBLIC_CHANNEL_NAME
|
||||
assert result.is_hashtag is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_public_rejects_conflicting_key(self, test_db, client):
|
||||
response = await client.post(
|
||||
"/api/channels",
|
||||
json={"name": "Public", "key": "AA" * 16},
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert "canonical Public key" in response.json()["detail"]
|
||||
assert await ChannelRepository.get_by_key("AA" * 16) is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_non_public_rejects_public_key(self, test_db, client):
|
||||
response = await client.post(
|
||||
"/api/channels",
|
||||
json={"name": "Ops", "key": PUBLIC_CHANNEL_KEY},
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert PUBLIC_CHANNEL_NAME in response.json()["detail"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_public_channel_is_rejected(self, test_db, client):
|
||||
await ChannelRepository.upsert(
|
||||
key=PUBLIC_CHANNEL_KEY,
|
||||
name=PUBLIC_CHANNEL_NAME,
|
||||
is_hashtag=False,
|
||||
on_radio=False,
|
||||
)
|
||||
|
||||
response = await client.delete(f"/api/channels/{PUBLIC_CHANNEL_KEY}")
|
||||
|
||||
assert response.status_code == 400
|
||||
assert "cannot be deleted" in response.json()["detail"]
|
||||
channel = await ChannelRepository.get_by_key(PUBLIC_CHANNEL_KEY)
|
||||
assert channel is not None
|
||||
|
||||
|
||||
class TestChannelDetail:
|
||||
"""Test GET /api/channels/{key}/detail."""
|
||||
|
||||
|
||||
@@ -448,6 +448,38 @@ class TestAdvertisementParsing:
|
||||
assert result.lat is None
|
||||
assert result.lon is None
|
||||
|
||||
def test_parse_advertisement_discards_out_of_range_gps(self, caplog):
|
||||
"""Out-of-range advert coordinates are treated as missing."""
|
||||
from app.decoder import parse_advertisement
|
||||
|
||||
payload = bytearray()
|
||||
payload.extend(
|
||||
bytes.fromhex("f29fdc7c560f9d813d1593a8587fa46a9e7efe2f5506d38c0af41307bf9e517a")
|
||||
)
|
||||
payload.extend((1718749967).to_bytes(4, byteorder="little"))
|
||||
payload.extend(bytes(64))
|
||||
payload.append(0x92)
|
||||
payload.extend((-593497573).to_bytes(4, byteorder="little", signed=True))
|
||||
payload.extend((-1659939204).to_bytes(4, byteorder="little", signed=True))
|
||||
payload.extend(b"Tacompton")
|
||||
raw_packet = bytes.fromhex("11") + bytes(payload)
|
||||
|
||||
with caplog.at_level("WARNING"):
|
||||
result = parse_advertisement(bytes(payload), raw_packet=raw_packet)
|
||||
|
||||
assert result is not None
|
||||
assert (
|
||||
result.public_key == "f29fdc7c560f9d813d1593a8587fa46a9e7efe2f5506d38c0af41307bf9e517a"
|
||||
)
|
||||
assert result.name == "Tacompton"
|
||||
assert result.device_role == 2
|
||||
assert result.lat is None
|
||||
assert result.lon is None
|
||||
assert "Dropping location data for nonsensical packet -- packet" in caplog.text
|
||||
assert raw_packet.hex().upper() in caplog.text
|
||||
assert "-593.497573/-1659.939204" in caplog.text
|
||||
assert "Outta this world!" in caplog.text
|
||||
|
||||
def test_parse_advertisement_extracts_public_key(self):
|
||||
"""Advertisement parsing extracts the public key correctly."""
|
||||
from app.decoder import parse_advertisement, parse_packet
|
||||
|
||||
@@ -7,7 +7,7 @@ paths (packet_processor + event_handler fallback) don't double-store messages.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -331,12 +331,9 @@ class TestDMEchoDetection:
|
||||
assert msg_id is not None
|
||||
broadcasts.clear()
|
||||
|
||||
# Duplicate arrives via different path
|
||||
pkt2, _ = await RawPacketRepository.create(b"dm_in_2", SENDER_TIMESTAMP + 1)
|
||||
|
||||
with patch("app.packet_processor.broadcast_event", mock_broadcast):
|
||||
result = await create_dm_message_from_decrypted(
|
||||
packet_id=pkt2,
|
||||
packet_id=pkt1,
|
||||
decrypted=decrypted,
|
||||
their_public_key=CONTACT_PUB,
|
||||
our_public_key=OUR_PUB,
|
||||
@@ -388,12 +385,9 @@ class TestDMEchoDetection:
|
||||
assert msg_id is not None
|
||||
broadcasts.clear()
|
||||
|
||||
# Duplicate arrives, also with no path
|
||||
pkt2, _ = await RawPacketRepository.create(b"dm_np_2", SENDER_TIMESTAMP + 1)
|
||||
|
||||
with patch("app.packet_processor.broadcast_event", mock_broadcast):
|
||||
result = await create_dm_message_from_decrypted(
|
||||
packet_id=pkt2,
|
||||
packet_id=pkt1,
|
||||
decrypted=decrypted,
|
||||
their_public_key=CONTACT_PUB,
|
||||
our_public_key=OUR_PUB,
|
||||
@@ -416,7 +410,8 @@ class TestDualPathDedup:
|
||||
1. Primary: RX_LOG_DATA → packet_processor (decrypts with private key)
|
||||
2. Fallback: CONTACT_MSG_RECV → on_contact_message (MeshCore library decoded)
|
||||
|
||||
The fallback uses INSERT OR IGNORE to avoid double-storage when both fire.
|
||||
The fallback path should reconcile against the packet path instead of creating
|
||||
a second row, and should still add new path observations when available.
|
||||
"""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -463,19 +458,7 @@ class TestDualPathDedup:
|
||||
"sender_timestamp": SENDER_TIMESTAMP,
|
||||
}
|
||||
|
||||
# Mock contact lookup to return a contact with the right key
|
||||
mock_contact = MagicMock()
|
||||
mock_contact.public_key = CONTACT_PUB
|
||||
mock_contact.type = 1 # Client, not repeater
|
||||
mock_contact.name = "TestContact"
|
||||
|
||||
with (
|
||||
patch("app.event_handlers.ContactRepository") as mock_contact_repo,
|
||||
patch("app.event_handlers.broadcast_event", mock_broadcast),
|
||||
):
|
||||
mock_contact_repo.get_by_key_or_prefix = AsyncMock(return_value=mock_contact)
|
||||
mock_contact_repo.update_last_contacted = AsyncMock()
|
||||
|
||||
with patch("app.event_handlers.broadcast_event", mock_broadcast):
|
||||
await on_contact_message(mock_event)
|
||||
|
||||
# No additional message broadcast should have been sent
|
||||
@@ -544,18 +527,7 @@ class TestDualPathDedup:
|
||||
"sender_timestamp": SENDER_TIMESTAMP,
|
||||
}
|
||||
|
||||
mock_contact = MagicMock()
|
||||
mock_contact.public_key = upper_key # Uppercase from DB
|
||||
mock_contact.type = 1
|
||||
mock_contact.name = "TestContact"
|
||||
|
||||
with (
|
||||
patch("app.event_handlers.ContactRepository") as mock_contact_repo,
|
||||
patch("app.event_handlers.broadcast_event", mock_broadcast),
|
||||
):
|
||||
mock_contact_repo.get_by_key_or_prefix = AsyncMock(return_value=mock_contact)
|
||||
mock_contact_repo.update_last_contacted = AsyncMock()
|
||||
|
||||
with patch("app.event_handlers.broadcast_event", mock_broadcast):
|
||||
await on_contact_message(mock_event)
|
||||
|
||||
# Should NOT create a second message (dedup catches it thanks to .lower())
|
||||
@@ -570,6 +542,146 @@ class TestDualPathDedup:
|
||||
)
|
||||
assert len(messages) == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_event_handler_duplicate_adds_path_to_existing_dm(
|
||||
self, test_db, captured_broadcasts
|
||||
):
|
||||
"""Fallback DM duplicates should reconcile path updates onto the stored message."""
|
||||
from app.event_handlers import on_contact_message
|
||||
from app.packet_processor import create_dm_message_from_decrypted
|
||||
|
||||
await ContactRepository.upsert(
|
||||
{
|
||||
"public_key": CONTACT_PUB.lower(),
|
||||
"name": "TestContact",
|
||||
"type": 1,
|
||||
"last_seen": SENDER_TIMESTAMP,
|
||||
"last_contacted": SENDER_TIMESTAMP,
|
||||
"first_seen": SENDER_TIMESTAMP,
|
||||
"on_radio": False,
|
||||
"out_path_hash_mode": 0,
|
||||
}
|
||||
)
|
||||
|
||||
pkt_id, _ = await RawPacketRepository.create(b"primary_with_no_path", SENDER_TIMESTAMP)
|
||||
decrypted = DecryptedDirectMessage(
|
||||
timestamp=SENDER_TIMESTAMP,
|
||||
flags=0,
|
||||
message="Dual path with route update",
|
||||
dest_hash="fa",
|
||||
src_hash="a1",
|
||||
)
|
||||
|
||||
broadcasts, mock_broadcast = captured_broadcasts
|
||||
|
||||
with patch("app.packet_processor.broadcast_event", mock_broadcast):
|
||||
msg_id = await create_dm_message_from_decrypted(
|
||||
packet_id=pkt_id,
|
||||
decrypted=decrypted,
|
||||
their_public_key=CONTACT_PUB,
|
||||
our_public_key=OUR_PUB,
|
||||
received_at=SENDER_TIMESTAMP,
|
||||
outgoing=False,
|
||||
)
|
||||
|
||||
assert msg_id is not None
|
||||
broadcasts.clear()
|
||||
|
||||
mock_event = MagicMock()
|
||||
mock_event.payload = {
|
||||
"public_key": CONTACT_PUB,
|
||||
"text": "Dual path with route update",
|
||||
"txt_type": 0,
|
||||
"sender_timestamp": SENDER_TIMESTAMP,
|
||||
"path": "bbcc",
|
||||
"path_len": 2,
|
||||
}
|
||||
|
||||
with patch("app.event_handlers.broadcast_event", mock_broadcast):
|
||||
await on_contact_message(mock_event)
|
||||
|
||||
message_broadcasts = [b for b in broadcasts if b["type"] == "message"]
|
||||
assert message_broadcasts == []
|
||||
|
||||
ack_broadcasts = [b for b in broadcasts if b["type"] == "message_acked"]
|
||||
assert len(ack_broadcasts) == 1
|
||||
assert ack_broadcasts[0]["data"]["message_id"] == msg_id
|
||||
assert ack_broadcasts[0]["data"]["ack_count"] == 0
|
||||
assert any(p["path"] == "bbcc" for p in ack_broadcasts[0]["data"]["paths"])
|
||||
|
||||
msg = await MessageRepository.get_by_id(msg_id)
|
||||
assert msg is not None
|
||||
assert msg.paths is not None
|
||||
assert any(p.path == "bbcc" for p in msg.paths)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_fallback_path_duplicate_reconciles_path_without_new_row(
|
||||
self, test_db, captured_broadcasts
|
||||
):
|
||||
"""Repeated fallback DMs should keep one row and merge path observations."""
|
||||
from app.event_handlers import on_contact_message
|
||||
|
||||
await ContactRepository.upsert(
|
||||
{
|
||||
"public_key": CONTACT_PUB.lower(),
|
||||
"name": "FallbackOnly",
|
||||
"type": 1,
|
||||
"last_seen": SENDER_TIMESTAMP,
|
||||
"last_contacted": SENDER_TIMESTAMP,
|
||||
"first_seen": SENDER_TIMESTAMP,
|
||||
"on_radio": False,
|
||||
"out_path_hash_mode": 0,
|
||||
}
|
||||
)
|
||||
|
||||
broadcasts, mock_broadcast = captured_broadcasts
|
||||
|
||||
first_event = MagicMock()
|
||||
first_event.payload = {
|
||||
"public_key": CONTACT_PUB,
|
||||
"text": "Fallback duplicate route test",
|
||||
"txt_type": 0,
|
||||
"sender_timestamp": SENDER_TIMESTAMP,
|
||||
}
|
||||
|
||||
with patch("app.event_handlers.broadcast_event", mock_broadcast):
|
||||
await on_contact_message(first_event)
|
||||
|
||||
messages = await MessageRepository.get_all(
|
||||
msg_type="PRIV", conversation_key=CONTACT_PUB.lower(), limit=10
|
||||
)
|
||||
assert len(messages) == 1
|
||||
msg_id = messages[0].id
|
||||
|
||||
broadcasts.clear()
|
||||
|
||||
second_event = MagicMock()
|
||||
second_event.payload = {
|
||||
"public_key": CONTACT_PUB,
|
||||
"text": "Fallback duplicate route test",
|
||||
"txt_type": 0,
|
||||
"sender_timestamp": SENDER_TIMESTAMP,
|
||||
"path": "ddee",
|
||||
"path_len": 2,
|
||||
}
|
||||
|
||||
with patch("app.event_handlers.broadcast_event", mock_broadcast):
|
||||
await on_contact_message(second_event)
|
||||
|
||||
messages = await MessageRepository.get_all(
|
||||
msg_type="PRIV", conversation_key=CONTACT_PUB.lower(), limit=10
|
||||
)
|
||||
assert len(messages) == 1
|
||||
|
||||
message_broadcasts = [b for b in broadcasts if b["type"] == "message"]
|
||||
assert message_broadcasts == []
|
||||
|
||||
ack_broadcasts = [b for b in broadcasts if b["type"] == "message_acked"]
|
||||
assert len(ack_broadcasts) == 1
|
||||
assert ack_broadcasts[0]["data"]["message_id"] == msg_id
|
||||
assert ack_broadcasts[0]["data"]["ack_count"] == 0
|
||||
assert any(p["path"] == "ddee" for p in ack_broadcasts[0]["data"]["paths"])
|
||||
|
||||
|
||||
class TestDirectMessageDirectionDetection:
|
||||
"""Test src_hash/dest_hash direction detection in _process_direct_message.
|
||||
@@ -832,21 +944,19 @@ class TestDirectMessageDirectionDetection:
|
||||
|
||||
|
||||
class TestConcurrentDMDedup:
|
||||
"""Test that concurrent DM processing deduplicates via atomic INSERT OR IGNORE.
|
||||
"""Test that concurrent DM processing deduplicates by raw-packet identity.
|
||||
|
||||
On a mesh network, the same DM packet can arrive via two RF paths nearly
|
||||
simultaneously, causing two concurrent calls to create_dm_message_from_decrypted.
|
||||
SQLite's INSERT OR IGNORE ensures only one message is stored.
|
||||
On a mesh network, the same DM payload can be observed twice before the first
|
||||
handler finishes. Both arrivals reuse the same raw_packets row and should end
|
||||
up attached to a single message.
|
||||
"""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_concurrent_identical_dms_only_store_once(self, test_db, captured_broadcasts):
|
||||
"""Two concurrent create_dm_message_from_decrypted calls with identical content
|
||||
should result in exactly one stored message."""
|
||||
async def test_concurrent_same_packet_dms_only_store_once(self, test_db, captured_broadcasts):
|
||||
"""Two concurrent handlers for the same raw DM packet store one message."""
|
||||
from app.packet_processor import create_dm_message_from_decrypted
|
||||
|
||||
pkt1, _ = await RawPacketRepository.create(b"concurrent_dm_1", SENDER_TIMESTAMP)
|
||||
pkt2, _ = await RawPacketRepository.create(b"concurrent_dm_2", SENDER_TIMESTAMP + 1)
|
||||
packet_id, _ = await RawPacketRepository.create(b"concurrent_dm_1", SENDER_TIMESTAMP)
|
||||
|
||||
decrypted = DecryptedDirectMessage(
|
||||
timestamp=SENDER_TIMESTAMP,
|
||||
@@ -861,7 +971,7 @@ class TestConcurrentDMDedup:
|
||||
with patch("app.packet_processor.broadcast_event", mock_broadcast):
|
||||
results = await asyncio.gather(
|
||||
create_dm_message_from_decrypted(
|
||||
packet_id=pkt1,
|
||||
packet_id=packet_id,
|
||||
decrypted=decrypted,
|
||||
their_public_key=CONTACT_PUB,
|
||||
our_public_key=OUR_PUB,
|
||||
@@ -870,7 +980,7 @@ class TestConcurrentDMDedup:
|
||||
outgoing=False,
|
||||
),
|
||||
create_dm_message_from_decrypted(
|
||||
packet_id=pkt2,
|
||||
packet_id=packet_id,
|
||||
decrypted=decrypted,
|
||||
their_public_key=CONTACT_PUB,
|
||||
our_public_key=OUR_PUB,
|
||||
|
||||
@@ -728,3 +728,54 @@ class TestCommunityMqttIataValidation:
|
||||
_validate_mqtt_community_config({"iata": "PDX", "auth_mode": "password"})
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "username and password" in exc_info.value.detail
|
||||
|
||||
|
||||
class TestFanoutConfigMutationInvariant:
|
||||
"""Persisted fanout rows should always be valid and canonical."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_disabled_create_still_validates_config(self, test_db):
|
||||
from app.routers.fanout import FanoutConfigCreate, create_fanout_config
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await create_fanout_config(
|
||||
FanoutConfigCreate(
|
||||
type="mqtt_community",
|
||||
name="Invalid draft",
|
||||
config={},
|
||||
scope={},
|
||||
enabled=False,
|
||||
)
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "IATA" in exc_info.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_enable_only_patch_persists_normalized_config(self, test_db):
|
||||
from app.repository.fanout import FanoutConfigRepository
|
||||
from app.routers.fanout import FanoutConfigUpdate, update_fanout_config
|
||||
|
||||
cfg = await FanoutConfigRepository.create(
|
||||
config_type="mqtt_community",
|
||||
name="Community MQTT",
|
||||
config={
|
||||
"iata": "PDX",
|
||||
"broker_host": " mqtt.example.com ",
|
||||
"topic_template": "mesh2mqtt/{iata}/node/{Public_Key}",
|
||||
},
|
||||
scope={"messages": "none", "raw_packets": "all"},
|
||||
enabled=False,
|
||||
)
|
||||
|
||||
with patch("app.fanout.manager.fanout_manager.reload_config", new_callable=AsyncMock):
|
||||
updated = await update_fanout_config(
|
||||
cfg["id"],
|
||||
FanoutConfigUpdate(enabled=True),
|
||||
)
|
||||
|
||||
assert updated["enabled"] is True
|
||||
assert updated["config"]["broker_host"] == "mqtt.example.com"
|
||||
assert updated["config"]["topic_template"] == "mesh2mqtt/{IATA}/node/{PUBLIC_KEY}"
|
||||
assert updated["config"]["transport"] == "websockets"
|
||||
assert updated["config"]["auth_mode"] == "token"
|
||||
|
||||
@@ -5,8 +5,10 @@ from fastapi.testclient import TestClient
|
||||
|
||||
from app.frontend_static import (
|
||||
ASSET_CACHE_CONTROL,
|
||||
FRONTEND_BUILD_INSTRUCTIONS,
|
||||
INDEX_CACHE_CONTROL,
|
||||
STATIC_FILE_CACHE_CONTROL,
|
||||
register_first_available_frontend_static_routes,
|
||||
register_frontend_missing_fallback,
|
||||
register_frontend_static_routes,
|
||||
)
|
||||
@@ -28,8 +30,7 @@ def test_missing_dist_logs_error_and_keeps_app_running(tmp_path, caplog):
|
||||
with TestClient(app) as client:
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == 404
|
||||
assert "npm install" in resp.json()["detail"]
|
||||
assert "npm run build" in resp.json()["detail"]
|
||||
assert FRONTEND_BUILD_INSTRUCTIONS in resp.json()["detail"]
|
||||
|
||||
|
||||
def test_missing_index_logs_error_and_skips_frontend_routes(tmp_path, caplog):
|
||||
@@ -120,3 +121,41 @@ def test_webmanifest_uses_forwarded_origin_headers(tmp_path):
|
||||
assert data["start_url"] == "https://mesh.example.com:8443/"
|
||||
assert data["scope"] == "https://mesh.example.com:8443/"
|
||||
assert data["id"] == "https://mesh.example.com:8443/"
|
||||
|
||||
|
||||
def test_first_available_prefers_dist_over_prebuilt(tmp_path):
|
||||
app = FastAPI()
|
||||
frontend_dir = tmp_path / "frontend"
|
||||
dist_dir = frontend_dir / "dist"
|
||||
prebuilt_dir = frontend_dir / "prebuilt"
|
||||
dist_dir.mkdir(parents=True)
|
||||
prebuilt_dir.mkdir(parents=True)
|
||||
(dist_dir / "index.html").write_text("<html><body>dist</body></html>")
|
||||
(prebuilt_dir / "index.html").write_text("<html><body>prebuilt</body></html>")
|
||||
|
||||
selected = register_first_available_frontend_static_routes(app, [dist_dir, prebuilt_dir])
|
||||
|
||||
assert selected == dist_dir.resolve()
|
||||
|
||||
with TestClient(app) as client:
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert "dist" in response.text
|
||||
|
||||
|
||||
def test_first_available_uses_prebuilt_when_dist_missing(tmp_path):
|
||||
app = FastAPI()
|
||||
frontend_dir = tmp_path / "frontend"
|
||||
dist_dir = frontend_dir / "dist"
|
||||
prebuilt_dir = frontend_dir / "prebuilt"
|
||||
prebuilt_dir.mkdir(parents=True)
|
||||
(prebuilt_dir / "index.html").write_text("<html><body>prebuilt</body></html>")
|
||||
|
||||
selected = register_first_available_frontend_static_routes(app, [dist_dir, prebuilt_dir])
|
||||
|
||||
assert selected == prebuilt_dir.resolve()
|
||||
|
||||
with TestClient(app) as client:
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert "prebuilt" in response.text
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user