mirror of
https://github.com/ipnet-mesh/meshcore-hub.git
synced 2026-03-28 17:42:56 +01:00
Compare commits
1 Commits
v0.8.1
...
patch/test
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31d591723d |
103
.agentmap.yaml
103
.agentmap.yaml
@@ -1,103 +0,0 @@
|
||||
# MeshCore Hub — codebase orientation map
|
||||
# See: https://github.com/anthropics/agentmap
|
||||
|
||||
meta:
|
||||
project: meshcore-hub
|
||||
version: 1
|
||||
updated: "2026-02-27"
|
||||
stack:
|
||||
- python 3.13
|
||||
- fastapi
|
||||
- sqlalchemy (async)
|
||||
- paho-mqtt
|
||||
- click
|
||||
- lit-html SPA
|
||||
- tailwind + daisyui
|
||||
- sqlite
|
||||
|
||||
tasks:
|
||||
install: "pip install -e '.[dev]'"
|
||||
test: "pytest"
|
||||
run: "meshcore-hub api --reload"
|
||||
lint: "pre-commit run --all-files"
|
||||
|
||||
tree:
|
||||
src/meshcore_hub/:
|
||||
__main__.py: "Click CLI entry point, registers subcommands"
|
||||
common/:
|
||||
config.py: "pydantic-settings, all env vars [config]"
|
||||
database.py: "async SQLAlchemy session management"
|
||||
mqtt.py: "MQTT client helpers"
|
||||
i18n.py: "translation loader, t() function"
|
||||
models/:
|
||||
base.py: "Base, UUIDMixin, TimestampMixin"
|
||||
node.py: null
|
||||
member.py: null
|
||||
advertisement.py: null
|
||||
message.py: null
|
||||
telemetry.py: null
|
||||
node_tag.py: null
|
||||
schemas/:
|
||||
events.py: "inbound MQTT event schemas"
|
||||
commands.py: "outbound command schemas"
|
||||
nodes.py: "API request/response schemas"
|
||||
members.py: null
|
||||
messages.py: null
|
||||
interface/:
|
||||
receiver.py: "reads device events, publishes to MQTT"
|
||||
sender.py: "subscribes MQTT commands, writes to device"
|
||||
device.py: "meshcore library wrapper"
|
||||
mock_device.py: "fake device for testing"
|
||||
collector/:
|
||||
subscriber.py: "MQTT subscriber, routes events to handlers"
|
||||
handlers/: "per-event-type DB persistence"
|
||||
cleanup.py: "data retention and node cleanup"
|
||||
webhook.py: "forward events to HTTP endpoints"
|
||||
tag_import.py: "seed node tags from YAML"
|
||||
member_import.py: "seed members from YAML"
|
||||
api/:
|
||||
app.py: "FastAPI app factory"
|
||||
auth.py: "API key authentication"
|
||||
dependencies.py: "DI for db session and auth"
|
||||
metrics.py: "Prometheus /metrics endpoint"
|
||||
routes/: "REST endpoints per resource"
|
||||
web/:
|
||||
app.py: "FastAPI app factory, SPA shell"
|
||||
pages.py: "custom markdown page loader"
|
||||
middleware.py: null
|
||||
templates/:
|
||||
spa.html: "single Jinja2 shell template"
|
||||
static/js/spa/:
|
||||
app.js: "SPA entry, route registration"
|
||||
router.js: "History API client-side router"
|
||||
api.js: "fetch wrapper for API calls"
|
||||
components.js: "shared lit-html helpers, t() re-export"
|
||||
icons.js: "SVG icon functions"
|
||||
pages/: "lazy-loaded page modules"
|
||||
alembic/: "DB migrations"
|
||||
etc/:
|
||||
prometheus/: "Prometheus scrape + alert rules"
|
||||
alertmanager/: null
|
||||
seed/: "YAML seed data (node_tags, members)"
|
||||
tests/:
|
||||
|
||||
key_symbols:
|
||||
- src/meshcore_hub/__main__.py::cli — Click root group [entry-point]
|
||||
- src/meshcore_hub/common/config.py::CommonSettings — shared env config base
|
||||
- src/meshcore_hub/common/database.py::DatabaseManager — async session factory
|
||||
- src/meshcore_hub/common/models/base.py::Base — declarative base for all models
|
||||
- src/meshcore_hub/api/app.py::create_app — API FastAPI factory
|
||||
- src/meshcore_hub/web/app.py::create_app — Web FastAPI factory
|
||||
- src/meshcore_hub/api/auth.py::require_read — read-key auth dependency
|
||||
- src/meshcore_hub/api/auth.py::require_admin — admin-key auth dependency
|
||||
- src/meshcore_hub/collector/subscriber.py::MQTTSubscriber — event ingestion loop
|
||||
- src/meshcore_hub/interface/receiver.py::Receiver — device→MQTT bridge
|
||||
- src/meshcore_hub/interface/sender.py::Sender — MQTT→device bridge
|
||||
|
||||
conventions:
|
||||
- four Click subcommands: interface, collector, api, web
|
||||
- "MQTT topic pattern: {prefix}/{pubkey}/event/{name} and .../command/{name}"
|
||||
- env config via pydantic-settings, no manual os.environ
|
||||
- web SPA: ES modules + lit-html, pages export async render()
|
||||
- i18n via t() with JSON locale files in static/locales/
|
||||
- node tags are freeform key-value pairs, standard keys in AGENTS.md
|
||||
38
.env.example
38
.env.example
@@ -80,14 +80,6 @@ MQTT_PREFIX=meshcore
|
||||
# When enabled, uses TLS with system CA certificates (e.g., for Let's Encrypt)
|
||||
MQTT_TLS=false
|
||||
|
||||
# MQTT transport protocol
|
||||
# Options: tcp, websockets
|
||||
MQTT_TRANSPORT=tcp
|
||||
|
||||
# MQTT WebSocket path (used only when MQTT_TRANSPORT=websockets)
|
||||
# Common values: /mqtt, /
|
||||
MQTT_WS_PATH=/mqtt
|
||||
|
||||
# External port mappings for local MQTT broker (--profile mqtt only)
|
||||
MQTT_EXTERNAL_PORT=1883
|
||||
MQTT_WS_PORT=9001
|
||||
@@ -131,30 +123,6 @@ CONTACT_CLEANUP_DAYS=7
|
||||
# =============================================================================
|
||||
# The collector subscribes to MQTT events and stores them in the database
|
||||
|
||||
# Collector MQTT ingest mode
|
||||
# - native: expects <prefix>/<pubkey>/event/<event_name> topics
|
||||
# - letsmesh_upload: expects LetsMesh observer uploads on
|
||||
# <prefix>/<pubkey>/(packets|status|internal)
|
||||
COLLECTOR_INGEST_MODE=native
|
||||
|
||||
# LetsMesh decoder support (used only when COLLECTOR_INGEST_MODE=letsmesh_upload)
|
||||
# Set to false to disable external packet decoding
|
||||
COLLECTOR_LETSMESH_DECODER_ENABLED=true
|
||||
|
||||
# Decoder command (must be available in container PATH)
|
||||
# Examples: meshcore-decoder, /usr/local/bin/meshcore-decoder, npx meshcore-decoder
|
||||
COLLECTOR_LETSMESH_DECODER_COMMAND=meshcore-decoder
|
||||
|
||||
# Optional: channel secret keys (comma or space separated) used to decrypt GroupText
|
||||
# packets. This supports unlimited keys.
|
||||
# Note: Public + #test keys are built into the collector code by default.
|
||||
# To show friendly channel names in the web feed, use label=hex (example: bot=ABCDEF...).
|
||||
# Without keys, encrypted packets cannot be shown as plaintext.
|
||||
# COLLECTOR_LETSMESH_DECODER_KEYS=
|
||||
|
||||
# Timeout in seconds per decode invocation
|
||||
COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS=2.0
|
||||
|
||||
# -------------------
|
||||
# Webhook Settings
|
||||
# -------------------
|
||||
@@ -267,12 +235,6 @@ WEB_PORT=8080
|
||||
# Supported: en (see src/meshcore_hub/web/static/locales/ for available translations)
|
||||
# WEB_LOCALE=en
|
||||
|
||||
# Locale used for date/time formatting in the web dashboard
|
||||
# Controls date ordering only; 24-hour clock is still used by default
|
||||
# Examples: en-US (MM/DD/YYYY), en-GB (DD/MM/YYYY)
|
||||
# Default: en-US
|
||||
# WEB_DATETIME_LOCALE=en-US
|
||||
|
||||
# Auto-refresh interval in seconds for list pages (nodes, advertisements, messages)
|
||||
# Set to 0 to disable auto-refresh
|
||||
# Default: 30
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
- ".python-version"
|
||||
- "pyproject.toml"
|
||||
- ".pre-commit-config.yaml"
|
||||
- ".github/workflows/**"
|
||||
- ".github/workflows/ci.yml"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
|
||||
43
.github/workflows/claude.yml
vendored
Normal file
43
.github/workflows/claude.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
|
||||
# prompt: 'Update the pull request description to include a summary of changes.'
|
||||
# claude_args: '--allowed-tools Bash(gh pr:*)'
|
||||
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
@@ -11,7 +11,7 @@ on:
|
||||
- "pyproject.toml"
|
||||
- "Dockerfile"
|
||||
- "docker-compose.yml"
|
||||
- ".github/workflows/**"
|
||||
- ".github/workflows/docker.yml"
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
@@ -31,14 +31,14 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v4
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v4
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v4
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v6
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
type=sha
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v7
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
|
||||
27
.github/workflows/issue-triage.yml
vendored
Normal file
27
.github/workflows/issue-triage.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Claude Issue Triage
|
||||
description: Run Claude Code for issue triage in GitHub Actions
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
triage-issue:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run Claude Code for Issue Triage
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
prompt: "/label-issue REPO: ${{ github.repository }} ISSUE_NUMBER${{ github.event.issue.number }}"
|
||||
allowed_non_write_users: "*"
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,7 +3,6 @@
|
||||
!example/data/
|
||||
/seed/
|
||||
!example/seed/
|
||||
/content/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
## TASK-001: Remove legacy HTML dashboard endpoint
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/api/routes/dashboard.py`
|
||||
- `tests/test_api/test_dashboard.py`
|
||||
### Notes
|
||||
Removed the `dashboard()` route handler and its `@router.get("")` decorator. Removed `HTMLResponse` and `Request` imports no longer used. Updated existing tests to verify the HTML endpoint returns 404/405. All JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) remain intact.
|
||||
---
|
||||
|
||||
## TASK-002: Replace API key comparisons with constant-time comparison
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/api/auth.py`
|
||||
- `src/meshcore_hub/api/metrics.py`
|
||||
### Notes
|
||||
Added `import hmac` to both files. Replaced `==` comparisons with `hmac.compare_digest()` in `require_read`, `require_admin`, and `verify_basic_auth`. Added truthiness guards for `read_key`/`admin_key` in `require_read` since either can be `None` and `hmac.compare_digest()` raises `TypeError` on `None`.
|
||||
---
|
||||
|
||||
## TASK-003: Add WEB_TRUSTED_PROXY_HOSTS configuration setting
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/common/config.py`
|
||||
### Notes
|
||||
Added `web_trusted_proxy_hosts: str = Field(default="*", ...)` to `WebSettings` class. Automatically configurable via `WEB_TRUSTED_PROXY_HOSTS` env var through Pydantic Settings.
|
||||
---
|
||||
|
||||
## TASK-004: Integrate trusted proxy hosts into web app middleware and add startup warning
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/app.py`
|
||||
### Notes
|
||||
Replaced hardcoded `trusted_hosts="*"` in `ProxyHeadersMiddleware` with configured value. If value is `"*"`, passes string directly; otherwise splits on commas. Added startup warning when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS="*"`. `_is_authenticated_proxy_request` unchanged.
|
||||
---
|
||||
|
||||
## TASK-005: Escape config JSON in template script block to prevent XSS breakout
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/app.py`
|
||||
### Notes
|
||||
Added `.replace("</", "<\\/")` to `_build_config_json` return value. Prevents `</script>` breakout in the Jinja2 template's `<script>` block. `<\/` is valid JSON per spec and parsed correctly by `JSON.parse()`.
|
||||
---
|
||||
|
||||
## TASK-006: Fix stored XSS in admin node-tags page
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js`
|
||||
### Notes
|
||||
Added `escapeHtml` to imports. Escaped `nodeName` with `escapeHtml()` in copy-all and delete-all confirmation dialogs (2 `unsafeHTML()` calls). Escaped `activeTagKey` with `escapeHtml()` in single tag delete confirmation (`innerHTML` assignment). Translation template `<strong>` tags preserved.
|
||||
---
|
||||
|
||||
## TASK-007: Fix stored XSS in admin members page
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/members.js`
|
||||
### Notes
|
||||
Added `escapeHtml` to imports. Escaped `memberName` with `escapeHtml()` before passing to `t()` in delete confirmation dialog. `innerHTML` retained for `<strong>` tag rendering from translation template.
|
||||
---
|
||||
|
||||
## TASK-008: Write tests for legacy dashboard endpoint removal
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `tests/test_api/test_dashboard.py`
|
||||
### Notes
|
||||
Added 5 new tests: 1 for trailing-slash 404/405 verification, 4 for authenticated JSON sub-route responses. Total 20 dashboard tests passing.
|
||||
---
|
||||
|
||||
## TASK-009: Write tests for constant-time API key comparison
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `tests/test_api/test_auth.py`
|
||||
### Notes
|
||||
Restructured from 10 tests (2 classes) to 22 tests (4 classes): `TestReadAuthentication` (9), `TestAdminAuthentication` (4), `TestMetricsAuthentication` (7), `TestHealthEndpoint` (2). Added coverage for multi-endpoint read/admin key acceptance, missing auth header rejection, and metrics credential validation.
|
||||
---
|
||||
|
||||
## TASK-010: Write tests for trusted proxy hosts configuration and startup warning
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `tests/test_common/test_config.py`
|
||||
- `tests/test_web/test_app.py`
|
||||
### Notes
|
||||
Added 3 config tests (default value, specific IP, comma-separated list) and 5 web app tests (warning logged with wildcard+admin, no warning with specific hosts, no warning with admin disabled, comma list parsing, wildcard passed as string).
|
||||
---
|
||||
|
||||
## TASK-011: Write tests for config JSON script block escaping
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
- `tests/test_web/test_app.py`
|
||||
### Notes
|
||||
Added 5 tests in `TestConfigJsonXssEscaping` class: rendered HTML escaping, normal values unaffected, escaped JSON parseable, direct `_build_config_json` escaping, direct no-escaping-needed.
|
||||
---
|
||||
|
||||
## TASK-012: Update documentation for WEB_TRUSTED_PROXY_HOSTS setting
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `README.md`
|
||||
- `AGENTS.md`
|
||||
- `PLAN.md`
|
||||
### Notes
|
||||
Added `WEB_TRUSTED_PROXY_HOSTS` to environment variables sections in all three docs. Documented default value (`*`), production recommendation, and startup warning behavior.
|
||||
---
|
||||
@@ -1,162 +0,0 @@
|
||||
# Product Requirements Document
|
||||
|
||||
> Source: `.plans/2026/03/09/01-security-fixes/prompt.md`
|
||||
|
||||
## Project Overview
|
||||
|
||||
This project addresses CRITICAL and HIGH severity vulnerabilities identified in a security audit of MeshCore Hub. The fixes span stored XSS in server-rendered and client-side code, timing attacks on authentication, proxy header forgery, and a legacy endpoint with missing authentication. All changes must be backward-compatible and preserve existing API contracts.
|
||||
|
||||
## Goals
|
||||
|
||||
- Eliminate all CRITICAL and HIGH severity security vulnerabilities found in the audit
|
||||
- Harden API key comparison against timing side-channel attacks
|
||||
- Prevent XSS vectors in both Jinja2 templates and client-side JavaScript
|
||||
- Add configurable proxy trust to defend against header forgery while maintaining backward compatibility
|
||||
- Remove the redundant legacy HTML dashboard endpoint that lacks authentication
|
||||
|
||||
## Functional Requirements
|
||||
|
||||
### REQ-001: Remove legacy HTML dashboard endpoint
|
||||
|
||||
**Description:** Remove the `GET /api/v1/dashboard/` route handler that renders a standalone HTML page with unescaped database content (stored XSS) and no authentication. The JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) must remain intact and unchanged.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] The `dashboard()` route handler in `api/routes/dashboard.py` is removed
|
||||
- [ ] The `HTMLResponse` import is removed (if no longer used)
|
||||
- [ ] `GET /api/v1/dashboard/` returns 404 or Method Not Allowed
|
||||
- [ ] `GET /api/v1/dashboard/stats` continues to return valid JSON with authentication
|
||||
- [ ] `GET /api/v1/dashboard/activity` continues to return valid JSON with authentication
|
||||
- [ ] `GET /api/v1/dashboard/message-activity` continues to return valid JSON with authentication
|
||||
- [ ] `GET /api/v1/dashboard/node-count` continues to return valid JSON with authentication
|
||||
- [ ] Existing API tests for JSON sub-routes still pass
|
||||
|
||||
### REQ-002: Use constant-time comparison for API key validation
|
||||
|
||||
**Description:** Replace all Python `==` comparisons of API keys and credentials with `hmac.compare_digest()` to prevent timing side-channel attacks that could leak key material.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] All API key comparisons in `api/auth.py` use `hmac.compare_digest()` instead of `==`
|
||||
- [ ] All credential comparisons in `api/metrics.py` use `hmac.compare_digest()` instead of `==`
|
||||
- [ ] `hmac` is imported in all files where secret comparison occurs
|
||||
- [ ] The authentication behavior is unchanged — valid keys are accepted, invalid keys are rejected
|
||||
- [ ] Tests confirm authentication still works correctly with valid and invalid keys
|
||||
|
||||
### REQ-003: Add configurable trusted proxy hosts for admin authentication
|
||||
|
||||
**Description:** Add a `WEB_TRUSTED_PROXY_HOSTS` configuration setting that controls which hosts are trusted for proxy authentication headers (`X-Forwarded-User`, `X-Auth-Request-User`, `Authorization: Basic`). The setting defaults to `*` for backward compatibility. A startup warning is emitted when admin is enabled with the wildcard default. The `Authorization: Basic` header check must be preserved for Nginx Proxy Manager compatibility.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] A `WEB_TRUSTED_PROXY_HOSTS` setting is added to the configuration (Pydantic Settings)
|
||||
- [ ] The setting defaults to `*` (backward compatible)
|
||||
- [ ] `ProxyHeadersMiddleware` uses the configured `trusted_hosts` value instead of hardcoded `*`
|
||||
- [ ] A warning is logged at startup when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS` is `*`
|
||||
- [ ] The warning message recommends restricting trusted hosts to the operator's proxy IP
|
||||
- [ ] The `_is_authenticated_proxy_request` function continues to accept `X-Forwarded-User`, `X-Auth-Request-User`, and `Authorization: Basic` headers
|
||||
- [ ] OAuth2 proxy setups continue to function correctly
|
||||
- [ ] Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP restricts proxy header trust to that IP
|
||||
|
||||
### REQ-004: Escape config JSON in template script block
|
||||
|
||||
**Description:** Prevent XSS via `</script>` breakout in the `config_json|safe` template injection by escaping `</` sequences in the serialized JSON string before passing it to the Jinja2 template.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] `config_json` is escaped by replacing `</` with `<\\/` before template rendering (in `web/app.py`)
|
||||
- [ ] The `|safe` filter continues to be used (the escaping happens in Python, not Jinja2)
|
||||
- [ ] A config value containing `</script><script>alert(1)</script>` does not execute JavaScript
|
||||
- [ ] The SPA application correctly parses the escaped config JSON on the client side
|
||||
- [ ] Normal config values (without special characters) render unchanged
|
||||
|
||||
### REQ-005: Fix stored XSS in admin page JavaScript
|
||||
|
||||
**Description:** Sanitize API-sourced data (node names, tag keys, member names) before rendering in admin pages. Replace `unsafeHTML()` and direct `innerHTML` assignment with safe alternatives — either `escapeHtml()` (already available in `components.js`) or lit-html safe templating (`${value}` interpolation without `unsafeHTML`).
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] Node names in `admin/node-tags.js` are escaped or safely templated before HTML rendering
|
||||
- [ ] Tag keys in `admin/node-tags.js` are escaped or safely templated before HTML rendering
|
||||
- [ ] Member names in `admin/members.js` are escaped or safely templated before HTML rendering
|
||||
- [ ] All `unsafeHTML()` calls on API-sourced data in the identified files are replaced with safe alternatives
|
||||
- [ ] All direct `innerHTML` assignments of API-sourced data in the identified files are replaced with safe alternatives
|
||||
- [ ] A node name containing `<img src=x onerror=alert(1)>` renders as text, not as an HTML element
|
||||
- [ ] A member name containing `<script>alert(1)</script>` renders as text, not as executable script
|
||||
- [ ] Normal names (without special characters) continue to display correctly
|
||||
|
||||
## Non-Functional Requirements
|
||||
|
||||
### REQ-006: Backward compatibility
|
||||
|
||||
**Category:** Reliability
|
||||
|
||||
**Description:** All security fixes must maintain backward compatibility with existing deployments. No breaking changes to API contracts, configuration defaults, or deployment workflows.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] All existing API endpoints (except the removed HTML dashboard) return the same response format
|
||||
- [ ] Default configuration values preserve existing behavior without requiring operator action
|
||||
- [ ] Docker Compose deployments continue to function without configuration changes
|
||||
- [ ] All existing tests pass after the security fixes are applied
|
||||
|
||||
### REQ-007: No regression in authentication flows
|
||||
|
||||
**Category:** Security
|
||||
|
||||
**Description:** The security hardening must not introduce authentication regressions. Valid credentials must continue to be accepted, and invalid credentials must continue to be rejected, across all authentication methods.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] API read key authentication accepts valid keys and rejects invalid keys
|
||||
- [ ] API admin key authentication accepts valid keys and rejects invalid keys
|
||||
- [ ] Metrics endpoint authentication (if configured) accepts valid credentials and rejects invalid ones
|
||||
- [ ] Proxy header authentication continues to work with OAuth2 proxy setups
|
||||
- [ ] Basic auth header forwarding from Nginx Proxy Manager continues to work
|
||||
|
||||
## Technical Constraints and Assumptions
|
||||
|
||||
### Constraints
|
||||
|
||||
- Python 3.13+ (specified by project `.python-version`)
|
||||
- Must use `hmac.compare_digest()` from the Python standard library for constant-time comparison
|
||||
- The `Authorization: Basic` header check in `_is_authenticated_proxy_request` must not be removed or modified to validate credentials server-side — credential validation is the proxy's responsibility
|
||||
- Changes must not alter existing API response schemas or status codes (except removing the HTML dashboard endpoint)
|
||||
|
||||
### Assumptions
|
||||
|
||||
- The `escapeHtml()` utility in `components.js` correctly escapes `<`, `>`, `&`, `"`, and `'` characters
|
||||
- The SPA client-side JavaScript can parse JSON containing escaped `<\/` sequences (standard behavior per JSON spec)
|
||||
- Operators using proxy authentication have a reverse proxy (e.g., Nginx, Traefik, NPM) in front of MeshCore Hub
|
||||
|
||||
## Scope
|
||||
|
||||
### In Scope
|
||||
|
||||
- Removing the legacy HTML dashboard route handler (C1 + H2)
|
||||
- Replacing `==` with `hmac.compare_digest()` for all secret comparisons (H1)
|
||||
- Adding `WEB_TRUSTED_PROXY_HOSTS` configuration and startup warning (H3)
|
||||
- Escaping `</` in config JSON template injection (H4)
|
||||
- Fixing `unsafeHTML()`/`innerHTML` XSS in admin JavaScript pages (H5)
|
||||
- Updating tests to cover the security fixes
|
||||
- Updating documentation for the new `WEB_TRUSTED_PROXY_HOSTS` setting
|
||||
|
||||
### Out of Scope
|
||||
|
||||
- MEDIUM severity findings (CORS, error detail leakage, rate limiting, security headers, CSRF, CDN SRI, markdown sanitization, input validation, channel key exposure)
|
||||
- LOW severity findings (auth warnings, version disclosure, unbounded fields, credential logging, SecretStr, port exposure, cache safety, image pinning)
|
||||
- INFO findings (OpenAPI docs, proxy IP logging, alertmanager comments, DOM XSS in error handler, locale path)
|
||||
- Adding rate limiting infrastructure
|
||||
- Adding Content-Security-Policy or other security headers
|
||||
- Dependency version pinning or lockfile generation
|
||||
- Server-side credential validation for Basic auth (proxy responsibility)
|
||||
|
||||
## Suggested Tech Stack
|
||||
|
||||
| Layer | Technology | Rationale |
|
||||
|-------|-----------|-----------|
|
||||
| Secret comparison | `hmac.compare_digest()` (stdlib) | Specified by prompt; constant-time comparison prevents timing attacks |
|
||||
| Template escaping | Python `str.replace()` | Minimal approach to escape `</` in JSON before Jinja2 rendering |
|
||||
| Client-side escaping | `escapeHtml()` from `components.js` | Already available in the codebase; standard HTML entity escaping |
|
||||
| Configuration | Pydantic Settings | Specified by project stack; used for `WEB_TRUSTED_PROXY_HOSTS` |
|
||||
| Testing | pytest, pytest-asyncio | Specified by project stack |
|
||||
@@ -1,65 +0,0 @@
|
||||
# Phase: 01-security-fixes
|
||||
|
||||
## Overview
|
||||
|
||||
Address CRITICAL and HIGH severity vulnerabilities identified in the MeshCore Hub security audit across API and Web components. These findings represent exploitable vulnerabilities including XSS, timing attacks, authentication bypasses, and insecure defaults.
|
||||
|
||||
## Goals
|
||||
|
||||
- Eliminate all CRITICAL and HIGH severity security vulnerabilities
|
||||
- Harden authentication mechanisms against timing attacks and header forgery
|
||||
- Prevent XSS vectors in both server-rendered HTML and client-side JavaScript
|
||||
- Secure default MQTT configuration against unauthenticated access
|
||||
|
||||
## Requirements
|
||||
|
||||
### C1 + H2 — Remove legacy HTML dashboard endpoint
|
||||
- **File:** `src/meshcore_hub/api/routes/dashboard.py:367-536`
|
||||
- The `GET /api/v1/dashboard/` endpoint is a standalone HTML page with two CRITICAL/HIGH issues: stored XSS (unescaped DB content in f-string HTML) and missing authentication
|
||||
- The SPA web dashboard provides a full-featured replacement, making this endpoint redundant
|
||||
- **Fix:** Remove the `dashboard()` route handler and its `HTMLResponse` import. Keep all JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) intact.
|
||||
|
||||
### H1 — Fix timing attack on API key comparison
|
||||
- **Files:** `api/auth.py:82,127` | `api/metrics.py:57`
|
||||
- All secret comparisons use Python `==`, which is not constant-time
|
||||
- **Fix:** Replace with `hmac.compare_digest()` for all key/credential comparisons
|
||||
|
||||
### H3 — Harden admin auth against proxy header forgery
|
||||
- **File:** `web/app.py:73-86,239`
|
||||
- Admin access trusts `X-Forwarded-User`, `X-Auth-Request-User`, or `Authorization: Basic` header
|
||||
- `ProxyHeadersMiddleware(trusted_hosts="*")` accepts forged headers from any client
|
||||
- The `Authorization: Basic` check must be preserved — it is required by the Nginx Proxy Manager (NPM) Access List setup documented in README.md (NPM validates credentials and forwards the header)
|
||||
- **Fix:** Add a `WEB_TRUSTED_PROXY_HOSTS` config setting (default `*` for backward compatibility). Pass it to `ProxyHeadersMiddleware(trusted_hosts=...)`. Add a startup warning when `WEB_ADMIN_ENABLED=true` and `trusted_hosts` is still `*`, recommending operators restrict it to their proxy IP. Do NOT remove the Basic auth header check or validate credentials server-side — that is the proxy's responsibility.
|
||||
|
||||
### H4 — Fix XSS via config_json|safe script block breakout
|
||||
- **File:** `web/templates/spa.html:188` | `web/app.py:157-183`
|
||||
- Operator config values injected into `<script>` block with `|safe` — a value containing `</script>` breaks out and executes arbitrary JS
|
||||
- **Fix:** Escape `</` sequences in the JSON string: `config_json = json.dumps(config).replace("</", "<\\/")`
|
||||
|
||||
### H5 — Fix stored XSS via unsafeHTML/innerHTML with API-sourced data
|
||||
- **Files:** `web/static/js/spa/pages/admin/node-tags.js:243,272,454` | `admin/members.js:309`
|
||||
- Node names, tag keys, and member names from the API are interpolated into HTML via `unsafeHTML()` and direct `innerHTML` assignment
|
||||
- **Fix:** Use `escapeHtml()` (already in `components.js`) on API data before HTML interpolation, or replace with lit-html safe templating
|
||||
|
||||
|
||||
## Constraints
|
||||
|
||||
- Must not break existing functionality or API contracts
|
||||
- Changes to docker-compose.yml and mosquitto.conf must remain backward-compatible (use env var defaults)
|
||||
- The `_is_authenticated_proxy_request` function must continue to work with OAuth2 proxy setups — only add defense-in-depth, don't remove proxy header support entirely
|
||||
|
||||
## Out of Scope
|
||||
|
||||
- MEDIUM severity findings (CORS config, error detail leakage, rate limiting, security headers, CSRF, CDN SRI, markdown sanitization, input validation, channel key exposure)
|
||||
- LOW severity findings (auth warnings, version disclosure, unbounded fields, credential logging, SecretStr, port exposure, cache safety, image pinning)
|
||||
- INFO findings (OpenAPI docs, proxy IP logging, alertmanager comments, DOM XSS in error handler, locale path)
|
||||
- Adding rate limiting infrastructure
|
||||
- Adding Content-Security-Policy or other security headers
|
||||
- Dependency version pinning or lockfile generation
|
||||
|
||||
## References
|
||||
|
||||
- Security audit performed in this conversation (2026-03-09)
|
||||
- OWASP Top 10: XSS (A7:2017), Broken Authentication (A2:2017)
|
||||
- Python `hmac.compare_digest` documentation
|
||||
- FastAPI security best practices
|
||||
@@ -1,54 +0,0 @@
|
||||
# Code review round 001
|
||||
# Phase: .plans/2026/03/09/01-security-fixes
|
||||
# Scope: full
|
||||
# Generated by: /jp-codereview
|
||||
|
||||
issues:
|
||||
- id: "ISSUE-001"
|
||||
severity: "MINOR"
|
||||
category: "integration"
|
||||
file: "src/meshcore_hub/web/app.py"
|
||||
line: 251
|
||||
description: |
|
||||
The startup warning for insecure trusted proxy hosts checks `settings.web_admin_enabled`
|
||||
instead of the effective admin_enabled value that gets stored in `app.state.admin_enabled`.
|
||||
The `create_app()` function accepts an `admin_enabled` parameter (line 193) that can override
|
||||
the setting. If a caller passes `admin_enabled=True` but `settings.web_admin_enabled` is False,
|
||||
the warning will not fire despite admin being enabled. In practice this does not affect production
|
||||
deployments (CLI always uses the settings value), only programmatic/test usage.
|
||||
suggestion: |
|
||||
Consider computing the effective admin_enabled value before the warning check and using
|
||||
that for both the warning and `app.state.admin_enabled`, e.g.:
|
||||
`effective_admin = admin_enabled if admin_enabled is not None else settings.web_admin_enabled`
|
||||
related_tasks:
|
||||
- "TASK-004"
|
||||
|
||||
- id: "ISSUE-002"
|
||||
severity: "MINOR"
|
||||
category: "style"
|
||||
file: "src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js"
|
||||
line: 3
|
||||
description: |
|
||||
The `unsafeHTML` import is retained and still used on lines 243 and 272. Although the
|
||||
API-sourced data (`nodeName`) is now safely escaped via `escapeHtml()` before interpolation,
|
||||
the continued use of `unsafeHTML()` may confuse future reviewers into thinking the XSS
|
||||
fix is incomplete. The `unsafeHTML()` is needed to render the translation template's HTML
|
||||
tags (e.g., `<strong>`), so this is functionally correct.
|
||||
suggestion: |
|
||||
Add a brief inline comment above each `unsafeHTML()` call explaining that the dynamic
|
||||
values are pre-escaped and `unsafeHTML()` is only needed for the template's HTML formatting.
|
||||
related_tasks:
|
||||
- "TASK-006"
|
||||
|
||||
summary:
|
||||
total_issues: 2
|
||||
critical: 0
|
||||
major: 0
|
||||
minor: 2
|
||||
by_category:
|
||||
integration: 1
|
||||
architecture: 0
|
||||
security: 0
|
||||
duplication: 0
|
||||
error-handling: 0
|
||||
style: 1
|
||||
@@ -1,70 +0,0 @@
|
||||
# PRD Review
|
||||
|
||||
> Phase: `.plans/2026/03/09/01-security-fixes`
|
||||
> PRD: `.plans/2026/03/09/01-security-fixes/prd.md`
|
||||
> Prompt: `.plans/2026/03/09/01-security-fixes/prompt.md`
|
||||
|
||||
## Verdict: PASS
|
||||
|
||||
The PRD fully covers all five security requirements from the prompt with clear, implementable, and testable acceptance criteria. No contradictions, blocking ambiguities, or feasibility concerns were found. One prompt goal ("Secure default MQTT configuration") has no corresponding requirement in either the prompt or the PRD, but since no prompt requirement addresses it, the PRD correctly does not fabricate one.
|
||||
|
||||
## Coverage Assessment
|
||||
|
||||
| Prompt Item | PRD Section | Covered? | Notes |
|
||||
|---|---|---|---|
|
||||
| C1+H2: Remove legacy HTML dashboard endpoint | REQ-001 | Yes | Route removal, import cleanup, sub-route preservation all specified |
|
||||
| H1: Fix timing attack on API key comparison | REQ-002 | Yes | Files and `hmac.compare_digest()` approach match |
|
||||
| H3: Harden admin auth / proxy header forgery | REQ-003 | Yes | Config setting, default, warning, Basic auth preservation all covered |
|
||||
| H4: Fix XSS via config_json\|safe breakout | REQ-004 | Yes | Escape approach and XSS test payload specified |
|
||||
| H5: Fix stored XSS via unsafeHTML/innerHTML | REQ-005 | Yes | Files, fix approach, and XSS test payloads specified |
|
||||
| Constraint: No breaking changes to API contracts | REQ-006 | Yes | |
|
||||
| Constraint: docker-compose.yml/mosquitto.conf backward-compatible | REQ-006 | Partial | REQ-006 covers Docker Compose but not mosquitto.conf; moot since no requirement changes mosquitto.conf |
|
||||
| Constraint: _is_authenticated_proxy_request works with OAuth2 | REQ-003, REQ-007 | Yes | |
|
||||
| Goal: Secure default MQTT configuration | -- | No | Goal stated in prompt but no prompt requirement addresses it; PRD correctly does not fabricate one |
|
||||
| Out of scope items | Scope section | Yes | All exclusions match prompt |
|
||||
|
||||
**Coverage summary:** 5 of 5 prompt requirements fully covered, 1 constraint partially covered (moot), 1 prompt goal has no corresponding requirement in the prompt itself.
|
||||
|
||||
## Requirement Evaluation
|
||||
|
||||
All requirements passed evaluation. Minor observations noted below.
|
||||
|
||||
### REQ-003: Add configurable trusted proxy hosts
|
||||
|
||||
- **Implementability:** Pass -- A developer familiar with Pydantic Settings and `ProxyHeadersMiddleware` can implement this without ambiguity. The env var format (comma-separated list vs. single value) is not explicitly stated but follows standard Pydantic patterns.
|
||||
- **Testability:** Pass
|
||||
- **Completeness:** Pass
|
||||
- **Consistency:** Pass
|
||||
|
||||
### REQ-006: Backward compatibility
|
||||
|
||||
- **Implementability:** Pass
|
||||
- **Testability:** Pass
|
||||
- **Completeness:** Pass -- The prompt constraint about mosquitto.conf backward compatibility is not explicitly mentioned, but no requirement modifies mosquitto.conf, making this moot.
|
||||
- **Consistency:** Pass
|
||||
|
||||
## Structural Issues
|
||||
|
||||
### Contradictions
|
||||
|
||||
None found.
|
||||
|
||||
### Ambiguities
|
||||
|
||||
None that would block implementation. The `WEB_TRUSTED_PROXY_HOSTS` env var format is a minor detail resolvable by the developer from the `ProxyHeadersMiddleware` API and standard Pydantic Settings patterns.
|
||||
|
||||
### Missing Edge Cases
|
||||
|
||||
None significant. The `hmac.compare_digest()` change (REQ-002) assumes the existing code handles the "no key configured" case before reaching the comparison, which is standard practice and verifiable during implementation.
|
||||
|
||||
### Feasibility Concerns
|
||||
|
||||
None.
|
||||
|
||||
### Scope Inconsistencies
|
||||
|
||||
The prompt states a goal of "Secure default MQTT configuration against unauthenticated access" but provides no requirement for it. The PRD drops this goal without explanation. This is a prompt-level gap, not a PRD-level gap -- the PRD should not invent requirements that the prompt does not specify.
|
||||
|
||||
## Action Items
|
||||
|
||||
No action items. The PRD is ready for task breakdown.
|
||||
@@ -1,90 +0,0 @@
|
||||
# Task Review
|
||||
|
||||
> Phase: `.plans/2026/03/09/01-security-fixes`
|
||||
> Tasks: `.plans/2026/03/09/01-security-fixes/tasks.yaml`
|
||||
> PRD: `.plans/2026/03/09/01-security-fixes/prd.md`
|
||||
|
||||
## Verdict: PASS
|
||||
|
||||
The task list is structurally sound, correctly ordered, and fully covers all 7 PRD requirements. The dependency graph is a valid DAG with no cycles or invalid references. No ordering issues, coverage gaps, vague tasks, or invalid fields were found. Two non-blocking warnings are noted: TASK-006 and TASK-007 (frontend XSS fixes) lack corresponding test tasks, and two pairs of independent tasks share output files but modify independent sections.
|
||||
|
||||
## Dependency Validation
|
||||
|
||||
### Reference Validity
|
||||
|
||||
All dependency references are valid. Every task ID referenced in a `dependencies` list corresponds to an existing task in the inventory.
|
||||
|
||||
### DAG Validation
|
||||
|
||||
The dependency graph is a valid directed acyclic graph. No cycles detected.
|
||||
|
||||
Topological layers:
|
||||
- **Layer 0 (roots):** TASK-001, TASK-002, TASK-003, TASK-005, TASK-006, TASK-007
|
||||
- **Layer 1:** TASK-004 (depends on TASK-003), TASK-008 (depends on TASK-001), TASK-009 (depends on TASK-002), TASK-011 (depends on TASK-005)
|
||||
- **Layer 2:** TASK-010 (depends on TASK-003, TASK-004), TASK-012 (depends on TASK-003, TASK-004)
|
||||
|
||||
### Orphan Tasks
|
||||
|
||||
No orphan tasks detected. All non-root tasks with dependencies are either terminal test/docs tasks (TASK-008, TASK-009, TASK-010, TASK-011, TASK-012) or integration tasks (TASK-004). Root tasks without dependents (TASK-006, TASK-007) are excluded from orphan detection per the review protocol.
|
||||
|
||||
## Ordering Check
|
||||
|
||||
No blocking ordering issues detected.
|
||||
|
||||
**Observation (non-blocking):** Two pairs of independent tasks share output files:
|
||||
|
||||
1. **TASK-004 and TASK-005** both modify `src/meshcore_hub/web/app.py` without a dependency between them. TASK-004 modifies `ProxyHeadersMiddleware` (line ~239) and adds a startup warning, while TASK-005 modifies `_build_config_json` (line ~183). These are independent functions in the same file; no actual conflict exists.
|
||||
|
||||
2. **TASK-010 and TASK-011** both modify `tests/test_web/test_app.py` without a dependency between them. Both add new test functions to the same test file. No actual conflict exists.
|
||||
|
||||
These are not blocking because neither task creates the shared file — both modify existing files in independent sections. Adding artificial dependencies would unnecessarily serialize parallelizable work.
|
||||
|
||||
## Coverage Check
|
||||
|
||||
### Uncovered Requirements
|
||||
|
||||
All PRD requirements are covered.
|
||||
|
||||
### Phantom References
|
||||
|
||||
No phantom references detected.
|
||||
|
||||
**Coverage summary:** 7 of 7 PRD requirements covered by tasks.
|
||||
|
||||
| Requirement | Tasks |
|
||||
|---|---|
|
||||
| REQ-001 | TASK-001, TASK-008 |
|
||||
| REQ-002 | TASK-002, TASK-009 |
|
||||
| REQ-003 | TASK-003, TASK-004, TASK-010, TASK-012 |
|
||||
| REQ-004 | TASK-005, TASK-011 |
|
||||
| REQ-005 | TASK-006, TASK-007 |
|
||||
| REQ-006 | TASK-001, TASK-003, TASK-004, TASK-005, TASK-006, TASK-007, TASK-008, TASK-010, TASK-011, TASK-012 |
|
||||
| REQ-007 | TASK-002, TASK-004, TASK-009 |
|
||||
|
||||
## Scope Check
|
||||
|
||||
### Tasks Too Large
|
||||
|
||||
No tasks flagged as too large. No task has `estimated_complexity: large`.
|
||||
|
||||
### Tasks Too Vague
|
||||
|
||||
No tasks flagged as too vague. All tasks have detailed descriptions (>50 chars), multiple testable acceptance criteria, and specific file paths in `files_affected`.
|
||||
|
||||
### Missing Test Tasks
|
||||
|
||||
Two implementation tasks lack corresponding test tasks:
|
||||
|
||||
- **TASK-006** (Fix stored XSS in admin node-tags page) — modifies `admin/node-tags.js` but no test task verifies the XSS fix in this JavaScript file. The acceptance criteria include XSS payload testing, but no automated test is specified. This is a frontend JavaScript change where manual verification or browser-based testing may be appropriate.
|
||||
|
||||
- **TASK-007** (Fix stored XSS in admin members page) — modifies `admin/members.js` but no test task verifies the XSS fix in this JavaScript file. Same reasoning as TASK-006.
|
||||
|
||||
**Note:** These are warnings, not blocking issues. The project's test infrastructure (`tests/test_web/`) focuses on server-side rendering and API responses. Client-side JavaScript XSS fixes are typically verified through acceptance criteria rather than automated unit tests.
|
||||
|
||||
### Field Validation
|
||||
|
||||
All tasks have valid fields:
|
||||
|
||||
- **Roles:** All `suggested_role` values are valid (`python`, `frontend`, `docs`).
|
||||
- **Complexity:** All `estimated_complexity` values are valid (`small`, `medium`).
|
||||
- **Completeness:** All 12 tasks have all required fields (`id`, `title`, `description`, `requirements`, `dependencies`, `suggested_role`, `acceptance_criteria`, `estimated_complexity`, `files_affected`). All list fields have at least one entry.
|
||||
@@ -1,22 +0,0 @@
|
||||
status: running
|
||||
phase_path: .plans/2026/03/09/01-security-fixes
|
||||
branch: fix/security-fixes
|
||||
current_phase: summary
|
||||
current_task: null
|
||||
fix_round: 0
|
||||
last_review_round: 1
|
||||
review_loop_exit_reason: success
|
||||
quality_gate: pass
|
||||
tasks:
|
||||
TASK-001: completed
|
||||
TASK-002: completed
|
||||
TASK-003: completed
|
||||
TASK-004: completed
|
||||
TASK-005: completed
|
||||
TASK-006: completed
|
||||
TASK-007: completed
|
||||
TASK-008: completed
|
||||
TASK-009: completed
|
||||
TASK-010: completed
|
||||
TASK-011: completed
|
||||
TASK-012: completed
|
||||
@@ -1,117 +0,0 @@
|
||||
# Phase Summary
|
||||
|
||||
> Phase: `.plans/2026/03/09/01-security-fixes`
|
||||
> Generated by: `/jp-summary`
|
||||
|
||||
## Project Overview
|
||||
|
||||
This phase addresses CRITICAL and HIGH severity vulnerabilities identified in a security audit of MeshCore Hub. The fixes span stored XSS in server-rendered and client-side code, timing attacks on authentication, proxy header forgery, and a legacy endpoint with missing authentication. All changes are backward-compatible and preserve existing API contracts.
|
||||
|
||||
### Goals
|
||||
|
||||
- Eliminate all CRITICAL and HIGH severity security vulnerabilities found in the audit
|
||||
- Harden API key comparison against timing side-channel attacks
|
||||
- Prevent XSS vectors in both Jinja2 templates and client-side JavaScript
|
||||
- Add configurable proxy trust to defend against header forgery while maintaining backward compatibility
|
||||
- Remove the redundant legacy HTML dashboard endpoint that lacks authentication
|
||||
|
||||
## Task Execution
|
||||
|
||||
### Overview
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total tasks | 12 |
|
||||
| Completed | 12 |
|
||||
| Failed | 0 |
|
||||
| Blocked | 0 |
|
||||
| Skipped | 0 |
|
||||
|
||||
### Task Details
|
||||
|
||||
| ID | Title | Role | Complexity | Status |
|
||||
|---|---|---|---|---|
|
||||
| TASK-001 | Remove legacy HTML dashboard endpoint | python | small | completed |
|
||||
| TASK-002 | Replace API key comparisons with constant-time comparison | python | small | completed |
|
||||
| TASK-003 | Add WEB_TRUSTED_PROXY_HOSTS configuration setting | python | small | completed |
|
||||
| TASK-004 | Integrate trusted proxy hosts into web app middleware and add startup warning | python | medium | completed |
|
||||
| TASK-005 | Escape config JSON in template script block to prevent XSS breakout | python | small | completed |
|
||||
| TASK-006 | Fix stored XSS in admin node-tags page | frontend | medium | completed |
|
||||
| TASK-007 | Fix stored XSS in admin members page | frontend | small | completed |
|
||||
| TASK-008 | Write tests for legacy dashboard endpoint removal | python | small | completed |
|
||||
| TASK-009 | Write tests for constant-time API key comparison | python | small | completed |
|
||||
| TASK-010 | Write tests for trusted proxy hosts configuration and startup warning | python | medium | completed |
|
||||
| TASK-011 | Write tests for config JSON script block escaping | python | small | completed |
|
||||
| TASK-012 | Update documentation for WEB_TRUSTED_PROXY_HOSTS setting | docs | small | completed |
|
||||
|
||||
### Requirement Coverage
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total PRD requirements | 7 |
|
||||
| Requirements covered by completed tasks | 7 |
|
||||
| Requirements with incomplete coverage | 0 |
|
||||
|
||||
All functional requirements (REQ-001 through REQ-005) and non-functional requirements (REQ-006, REQ-007) are fully covered by completed tasks.
|
||||
|
||||
## Files Created and Modified
|
||||
|
||||
### Created
|
||||
|
||||
- `tests/test_web/test_app.py`
|
||||
|
||||
### Modified
|
||||
|
||||
- `src/meshcore_hub/api/routes/dashboard.py`
|
||||
- `src/meshcore_hub/api/auth.py`
|
||||
- `src/meshcore_hub/api/metrics.py`
|
||||
- `src/meshcore_hub/common/config.py`
|
||||
- `src/meshcore_hub/web/app.py`
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js`
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/members.js`
|
||||
- `tests/test_api/test_dashboard.py`
|
||||
- `tests/test_api/test_auth.py`
|
||||
- `tests/test_common/test_config.py`
|
||||
- `README.md`
|
||||
- `AGENTS.md`
|
||||
- `PLAN.md`
|
||||
|
||||
## Review Rounds
|
||||
|
||||
### Overview
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total review rounds | 1 |
|
||||
| Total issues found | 2 |
|
||||
| Issues fixed | 2 |
|
||||
| Issues deferred | 0 |
|
||||
| Issues remaining | 0 |
|
||||
| Regressions introduced | 0 |
|
||||
|
||||
### Round Details
|
||||
|
||||
#### Round 1 (scope: full)
|
||||
|
||||
- **Issues found:** 2 (0 CRITICAL, 0 MAJOR, 2 MINOR)
|
||||
- **Issues fixed:** 2 (both MINOR issues were addressed post-review)
|
||||
- **Exit reason:** success (no CRITICAL or MAJOR issues)
|
||||
|
||||
## Known Issues and Deferred Items
|
||||
|
||||
No known issues. Both MINOR issues identified in the code review were addressed:
|
||||
|
||||
- **ISSUE-001** (MINOR, integration) -- Startup warning for proxy hosts used `settings.web_admin_enabled` instead of the effective admin_enabled value. Fixed by computing `effective_admin` before the warning check.
|
||||
- **ISSUE-002** (MINOR, style) -- `unsafeHTML()` calls on pre-escaped data lacked explanatory comments. Fixed by adding inline HTML comments explaining that dynamic values are pre-escaped.
|
||||
|
||||
## Decisions
|
||||
|
||||
- **Truthiness guards for `hmac.compare_digest()`** -- Added `read_key and ...` / `admin_key and ...` guards in `require_read` because either key can be `None` when only one is configured, and `hmac.compare_digest()` raises `TypeError` on `None` arguments. This ensures the existing behavior of accepting either key type when configured.
|
||||
- **`unsafeHTML()` retained with `escapeHtml()` pre-processing** -- The `unsafeHTML()` calls in admin JS pages were retained because translation strings contain intentional HTML formatting tags (e.g., `<strong>`). API-sourced data is escaped before interpolation, making this pattern safe.
|
||||
- **`innerHTML` retained for tag delete confirmation** -- The delete confirmation in `node-tags.js` uses `innerHTML` because the translation template includes `<span>` formatting. The dynamic tag key is escaped with `escapeHtml()` before interpolation.
|
||||
|
||||
## Suggested Next Steps
|
||||
|
||||
1. Run full manual testing of admin pages (node-tags, members) with XSS payloads to verify fixes in a browser environment.
|
||||
2. Test `WEB_TRUSTED_PROXY_HOSTS` with a real reverse proxy (Traefik/Nginx) to verify proxy header trust restriction works as expected.
|
||||
3. Push commits and create a pull request for merge into `main`.
|
||||
@@ -1,401 +0,0 @@
|
||||
# Task list generated from PRD: .plans/2026/03/09/01-security-fixes/prd.md
|
||||
# Generated by: /jp-task-list
|
||||
|
||||
tasks:
|
||||
- id: "TASK-001"
|
||||
title: "Remove legacy HTML dashboard endpoint"
|
||||
description: |
|
||||
Remove the `dashboard()` route handler from `src/meshcore_hub/api/routes/dashboard.py` (lines ~367-536).
|
||||
This handler renders a standalone HTML page using f-string HTML with unescaped database content (stored XSS)
|
||||
and has no authentication. The JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`)
|
||||
must remain intact and unchanged.
|
||||
|
||||
Specifically:
|
||||
1. Delete the `dashboard()` async function and its `@router.get("")` decorator (the handler that returns HTMLResponse).
|
||||
2. Remove the `HTMLResponse` import from `fastapi.responses` if it is no longer used by any remaining route.
|
||||
3. Verify that `GET /api/v1/dashboard/stats`, `/activity`, `/message-activity`, and `/node-count` still function.
|
||||
requirements:
|
||||
- "REQ-001"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "The `dashboard()` route handler is removed from `api/routes/dashboard.py`"
|
||||
- "`HTMLResponse` import is removed if no longer used"
|
||||
- "`GET /api/v1/dashboard/` returns 404 or 405"
|
||||
- "`GET /api/v1/dashboard/stats` returns valid JSON with authentication"
|
||||
- "`GET /api/v1/dashboard/activity` returns valid JSON with authentication"
|
||||
- "`GET /api/v1/dashboard/message-activity` returns valid JSON with authentication"
|
||||
- "`GET /api/v1/dashboard/node-count` returns valid JSON with authentication"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/api/routes/dashboard.py"
|
||||
|
||||
- id: "TASK-002"
|
||||
title: "Replace API key comparisons with constant-time comparison"
|
||||
description: |
|
||||
Replace all Python `==` comparisons of API keys and credentials with `hmac.compare_digest()` to prevent
|
||||
timing side-channel attacks.
|
||||
|
||||
In `src/meshcore_hub/api/auth.py`:
|
||||
1. Add `import hmac` at the top of the file.
|
||||
2. Line ~82 in `require_read`: replace `if token == read_key or token == admin_key:` with
|
||||
`if hmac.compare_digest(token, read_key) or hmac.compare_digest(token, admin_key):`.
|
||||
3. Line ~127 in `require_admin`: replace `if token == admin_key:` with
|
||||
`if hmac.compare_digest(token, admin_key):`.
|
||||
|
||||
In `src/meshcore_hub/api/metrics.py`:
|
||||
1. Add `import hmac` at the top of the file.
|
||||
2. Line ~57: replace `return username == "metrics" and password == read_key` with
|
||||
`return hmac.compare_digest(username, "metrics") and hmac.compare_digest(password, read_key)`.
|
||||
|
||||
Note: `hmac.compare_digest()` requires both arguments to be strings (or both bytes). The existing code
|
||||
already works with strings, so no type conversion is needed.
|
||||
requirements:
|
||||
- "REQ-002"
|
||||
- "REQ-007"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "All API key comparisons in `api/auth.py` use `hmac.compare_digest()`"
|
||||
- "All credential comparisons in `api/metrics.py` use `hmac.compare_digest()`"
|
||||
- "`hmac` is imported in both files"
|
||||
- "Valid API keys are accepted and invalid keys are rejected (no behavior change)"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/api/auth.py"
|
||||
- "src/meshcore_hub/api/metrics.py"
|
||||
|
||||
- id: "TASK-003"
|
||||
title: "Add WEB_TRUSTED_PROXY_HOSTS configuration setting"
|
||||
description: |
|
||||
Add a `web_trusted_proxy_hosts` field to the web settings in `src/meshcore_hub/common/config.py`.
|
||||
|
||||
1. In the `WebSettings` class (or the relevant settings class containing web config), add:
|
||||
```python
|
||||
web_trusted_proxy_hosts: str = Field(default="*", description="Comma-separated list of trusted proxy hosts or '*' for all")
|
||||
```
|
||||
2. The field should accept a string value. The `ProxyHeadersMiddleware` in uvicorn accepts either `"*"` or a list of strings.
|
||||
If the value is `"*"`, pass it directly. Otherwise, split on commas and strip whitespace to produce a list.
|
||||
|
||||
This task only adds the configuration field. The middleware integration and startup warning are in TASK-004.
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "A `web_trusted_proxy_hosts` setting exists in the configuration with default value `*`"
|
||||
- "The setting can be configured via the `WEB_TRUSTED_PROXY_HOSTS` environment variable"
|
||||
- "The setting accepts `*` or a comma-separated list of hostnames/IPs"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/common/config.py"
|
||||
|
||||
- id: "TASK-004"
|
||||
title: "Integrate trusted proxy hosts into web app middleware and add startup warning"
|
||||
description: |
|
||||
Update `src/meshcore_hub/web/app.py` to use the new `WEB_TRUSTED_PROXY_HOSTS` setting and emit a
|
||||
startup warning when using the insecure default.
|
||||
|
||||
1. Find the `ProxyHeadersMiddleware` addition (line ~239):
|
||||
```python
|
||||
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts="*")
|
||||
```
|
||||
Replace the hardcoded `"*"` with the configured value. If the config value is `"*"`, pass `"*"`.
|
||||
Otherwise, split the comma-separated string into a list of strings.
|
||||
|
||||
2. Add a startup warning (in the app factory or lifespan) when `WEB_ADMIN_ENABLED=true` and
|
||||
`WEB_TRUSTED_PROXY_HOSTS` is `"*"`:
|
||||
```python
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
if settings.web_admin_enabled and settings.web_trusted_proxy_hosts == "*":
|
||||
logger.warning(
|
||||
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*' (trust all). "
|
||||
"Consider restricting to your reverse proxy IP for production deployments."
|
||||
)
|
||||
```
|
||||
|
||||
3. Verify that the `_is_authenticated_proxy_request` function still accepts `X-Forwarded-User`,
|
||||
`X-Auth-Request-User`, and `Authorization: Basic` headers — do not modify that function.
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "`ProxyHeadersMiddleware` uses the configured `trusted_hosts` value instead of hardcoded `*`"
|
||||
- "A warning is logged at startup when admin is enabled and trusted hosts is `*`"
|
||||
- "The warning recommends restricting trusted hosts to the proxy IP"
|
||||
- "`_is_authenticated_proxy_request` still accepts all three header types"
|
||||
- "Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP restricts proxy header trust"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/app.py"
|
||||
|
||||
- id: "TASK-005"
|
||||
title: "Escape config JSON in template script block to prevent XSS breakout"
|
||||
description: |
|
||||
Prevent XSS via `</script>` breakout in the config JSON template injection in `src/meshcore_hub/web/app.py`.
|
||||
|
||||
In the `_build_config_json` function (or wherever `config_json` is prepared for the template, around
|
||||
line 183), after calling `json.dumps(config)`, escape `</` sequences:
|
||||
```python
|
||||
config_json = json.dumps(config).replace("</", "<\\/")
|
||||
```
|
||||
|
||||
This prevents a config value containing `</script><script>alert(1)</script>` from breaking out of the
|
||||
`<script>` block in `spa.html` (line ~188: `window.__APP_CONFIG__ = {{ config_json|safe }};`).
|
||||
|
||||
The `|safe` filter in the template remains unchanged — the escaping happens in Python before the value
|
||||
reaches Jinja2. The SPA client-side JavaScript can parse JSON containing `<\/` sequences because this
|
||||
is valid JSON per the spec.
|
||||
requirements:
|
||||
- "REQ-004"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "`config_json` is escaped by replacing `</` with `<\\/` before template rendering"
|
||||
- "The `|safe` filter continues to be used in the template"
|
||||
- "A config value containing `</script><script>alert(1)</script>` does not execute JavaScript"
|
||||
- "The SPA application correctly parses the escaped config JSON"
|
||||
- "Normal config values without special characters render unchanged"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/app.py"
|
||||
|
||||
- id: "TASK-006"
|
||||
title: "Fix stored XSS in admin node-tags page"
|
||||
description: |
|
||||
Sanitize API-sourced data in `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js` to prevent
|
||||
stored XSS.
|
||||
|
||||
Three locations need fixing:
|
||||
|
||||
1. **Line ~243** — `unsafeHTML()` with nodeName in copy-all confirmation:
|
||||
```javascript
|
||||
<p class="mb-4">${unsafeHTML(t('common.copy_all_entity_description', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
|
||||
```
|
||||
Replace `unsafeHTML()` with safe rendering. Either escape `nodeName` with `escapeHtml()` before
|
||||
passing to `t()`, or use `textContent`-based rendering.
|
||||
|
||||
2. **Line ~272** — `unsafeHTML()` with nodeName in delete-all confirmation:
|
||||
```javascript
|
||||
<p class="mb-4">${unsafeHTML(t('common.delete_all_entity_confirm', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
|
||||
```
|
||||
Same fix as above.
|
||||
|
||||
3. **Line ~454** — `innerHTML` with tag key in delete confirmation:
|
||||
```javascript
|
||||
container.querySelector('#delete_tag_confirm_message').innerHTML = confirmMsg;
|
||||
```
|
||||
where `confirmMsg` is built with `activeTagKey` interpolated into an HTML span. Replace `innerHTML`
|
||||
with `textContent`, or escape `activeTagKey` with `escapeHtml()` before interpolation.
|
||||
|
||||
Import `escapeHtml` from `../components.js` if not already imported. The function escapes `<`, `>`,
|
||||
`&`, `"`, and `'` characters using DOM textContent.
|
||||
requirements:
|
||||
- "REQ-005"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "frontend"
|
||||
acceptance_criteria:
|
||||
- "Node names in node-tags.js are escaped before HTML rendering"
|
||||
- "Tag keys in node-tags.js are escaped before HTML rendering"
|
||||
- "All `unsafeHTML()` calls on API-sourced data are replaced with safe alternatives"
|
||||
- "All `innerHTML` assignments of API-sourced data are replaced with safe alternatives"
|
||||
- "A node name containing `<img src=x onerror=alert(1)>` renders as text"
|
||||
- "Normal names without special characters display correctly"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js"
|
||||
|
||||
- id: "TASK-007"
|
||||
title: "Fix stored XSS in admin members page"
|
||||
description: |
|
||||
Sanitize API-sourced data in `src/meshcore_hub/web/static/js/spa/pages/admin/members.js` to prevent
|
||||
stored XSS.
|
||||
|
||||
**Line ~309** — `innerHTML` with memberName in delete confirmation:
|
||||
```javascript
|
||||
container.querySelector('#delete_confirm_message').innerHTML = confirmMsg;
|
||||
```
|
||||
where `confirmMsg` is built from `t('common.delete_entity_confirm', { entity: ..., name: memberName })`.
|
||||
`memberName` comes from `row.dataset.memberName` which is API-sourced data.
|
||||
|
||||
Fix by escaping `memberName` with `escapeHtml()` before passing to `t()`, or replace `innerHTML` with
|
||||
`textContent`.
|
||||
|
||||
Import `escapeHtml` from `../components.js` if not already imported.
|
||||
requirements:
|
||||
- "REQ-005"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "frontend"
|
||||
acceptance_criteria:
|
||||
- "Member names in members.js are escaped before HTML rendering"
|
||||
- "The `innerHTML` assignment of API-sourced data is replaced with a safe alternative"
|
||||
- "A member name containing `<script>alert(1)</script>` renders as text"
|
||||
- "Normal member names display correctly"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/static/js/spa/pages/admin/members.js"
|
||||
|
||||
- id: "TASK-008"
|
||||
title: "Write tests for legacy dashboard endpoint removal"
|
||||
description: |
|
||||
Add or update tests in `tests/test_api/` to verify that the legacy HTML dashboard endpoint is removed
|
||||
while JSON sub-routes remain functional.
|
||||
|
||||
Tests to add/update:
|
||||
1. `GET /api/v1/dashboard/` returns 404 or 405 (no longer serves HTML).
|
||||
2. `GET /api/v1/dashboard/stats` returns 200 with valid JSON when authenticated.
|
||||
3. `GET /api/v1/dashboard/activity` returns 200 with valid JSON when authenticated.
|
||||
4. `GET /api/v1/dashboard/message-activity` returns 200 with valid JSON when authenticated.
|
||||
5. `GET /api/v1/dashboard/node-count` returns 200 with valid JSON when authenticated.
|
||||
|
||||
Use the existing test fixtures and patterns from `tests/test_api/`. Check `tests/conftest.py` for
|
||||
available fixtures (test client, db session, auth headers).
|
||||
requirements:
|
||||
- "REQ-001"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-001"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Test confirms `GET /api/v1/dashboard/` returns 404 or 405"
|
||||
- "Tests confirm all four JSON sub-routes return valid JSON with authentication"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_api/test_dashboard.py"
|
||||
|
||||
- id: "TASK-009"
|
||||
title: "Write tests for constant-time API key comparison"
|
||||
description: |
|
||||
Add or update tests in `tests/test_api/` to verify that authentication still works correctly after
|
||||
switching to `hmac.compare_digest()`.
|
||||
|
||||
Tests to add/update:
|
||||
1. Valid read key is accepted by read-protected endpoints.
|
||||
2. Valid admin key is accepted by admin-protected endpoints.
|
||||
3. Invalid keys are rejected with 401/403.
|
||||
4. Valid admin key also grants read access.
|
||||
5. Metrics endpoint accepts valid credentials and rejects invalid ones (if metrics auth is testable).
|
||||
|
||||
These tests verify no behavioral regression from the `==` to `hmac.compare_digest()` change.
|
||||
Use existing test patterns and fixtures from `tests/test_api/`.
|
||||
requirements:
|
||||
- "REQ-002"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-002"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Tests confirm valid read key is accepted"
|
||||
- "Tests confirm valid admin key is accepted"
|
||||
- "Tests confirm invalid keys are rejected"
|
||||
- "Tests confirm metrics auth works correctly"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_api/test_auth.py"
|
||||
|
||||
- id: "TASK-010"
|
||||
title: "Write tests for trusted proxy hosts configuration and startup warning"
|
||||
description: |
|
||||
Add tests to verify the `WEB_TRUSTED_PROXY_HOSTS` configuration setting and the startup warning.
|
||||
|
||||
Tests to add:
|
||||
1. Default value of `WEB_TRUSTED_PROXY_HOSTS` is `*`.
|
||||
2. Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP is correctly parsed.
|
||||
3. Setting `WEB_TRUSTED_PROXY_HOSTS` to a comma-separated list is correctly parsed into a list.
|
||||
4. A warning is logged when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS` is `*`.
|
||||
5. No warning is logged when `WEB_TRUSTED_PROXY_HOSTS` is set to a specific value.
|
||||
|
||||
Place config tests in `tests/test_common/` and web app tests in `tests/test_web/`.
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
- "TASK-004"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Tests confirm default value is `*`"
|
||||
- "Tests confirm specific IP/list parsing works"
|
||||
- "Tests confirm startup warning is emitted with wildcard default"
|
||||
- "Tests confirm no warning when specific hosts are configured"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "tests/test_common/test_config.py"
|
||||
- "tests/test_web/test_app.py"
|
||||
|
||||
- id: "TASK-011"
|
||||
title: "Write tests for config JSON script block escaping"
|
||||
description: |
|
||||
Add tests in `tests/test_web/` to verify that the config JSON escaping prevents XSS breakout.
|
||||
|
||||
Tests to add:
|
||||
1. A config value containing `</script><script>alert(1)</script>` is escaped to `<\/script>...` in
|
||||
the rendered HTML.
|
||||
2. A config value without special characters renders unchanged.
|
||||
3. The escaped JSON is still valid and parseable by `json.loads()` (after un-escaping `<\/` back to `</`
|
||||
if needed, though `json.loads` handles `<\/` fine).
|
||||
|
||||
Test by calling the config JSON builder function directly or by checking the rendered template output.
|
||||
requirements:
|
||||
- "REQ-004"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-005"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Test confirms `</script>` in config values is escaped to `<\\/script>`"
|
||||
- "Test confirms normal config values are unaffected"
|
||||
- "Test confirms escaped JSON is still valid and parseable"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_web/test_app.py"
|
||||
|
||||
- id: "TASK-012"
|
||||
title: "Update documentation for WEB_TRUSTED_PROXY_HOSTS setting"
|
||||
description: |
|
||||
Update project documentation to document the new `WEB_TRUSTED_PROXY_HOSTS` environment variable.
|
||||
|
||||
Files to update:
|
||||
|
||||
1. **README.md** — Add `WEB_TRUSTED_PROXY_HOSTS` to the environment variables table with description:
|
||||
"Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts).
|
||||
Recommended: set to your reverse proxy IP in production."
|
||||
|
||||
2. **AGENTS.md** — Add `WEB_TRUSTED_PROXY_HOSTS` to the Environment Variables section with the same description.
|
||||
|
||||
3. **PLAN.md** — If there is a configuration section, add the new variable there as well.
|
||||
|
||||
Ensure the documentation notes:
|
||||
- Default is `*` for backward compatibility
|
||||
- A startup warning is emitted when using the default with admin enabled
|
||||
- Operators should set this to their reverse proxy IP in production
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
- "TASK-004"
|
||||
suggested_role: "docs"
|
||||
acceptance_criteria:
|
||||
- "`WEB_TRUSTED_PROXY_HOSTS` is documented in README.md"
|
||||
- "`WEB_TRUSTED_PROXY_HOSTS` is documented in AGENTS.md"
|
||||
- "Documentation notes the default value, startup warning, and production recommendation"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "README.md"
|
||||
- "AGENTS.md"
|
||||
- "PLAN.md"
|
||||
@@ -12,7 +12,6 @@ This document provides context and guidelines for AI coding assistants working o
|
||||
- `source .venv/bin/activate`
|
||||
* You MUST install all project dependencies using `pip install -e ".[dev]"` command`
|
||||
* You MUST install `pre-commit` for quality checks
|
||||
* You MUST keep project documentation in sync with behavior/config/schema changes made in code (at minimum update relevant sections in `README.md`, `SCHEMAS.md`, `PLAN.md`, and/or `TASKS.md` when applicable)
|
||||
* Before commiting:
|
||||
- Run **targeted tests** for the components you changed, not the full suite:
|
||||
- `pytest tests/test_web/` for web-only changes (templates, static JS, web routes)
|
||||
@@ -612,7 +611,6 @@ Key variables:
|
||||
- `MQTT_TLS` - Enable TLS/SSL for MQTT (default: `false`)
|
||||
- `API_READ_KEY`, `API_ADMIN_KEY` - API authentication keys
|
||||
- `WEB_ADMIN_ENABLED` - Enable admin interface at /a/ (default: `false`, requires auth proxy)
|
||||
- `WEB_TRUSTED_PROXY_HOSTS` - Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. A startup warning is emitted when using the default `*` with admin enabled.
|
||||
- `WEB_THEME` - Default theme for the web dashboard (default: `dark`, options: `dark`, `light`). Users can override via the theme toggle in the navbar, which persists their preference in browser localStorage.
|
||||
- `WEB_AUTO_REFRESH_SECONDS` - Auto-refresh interval in seconds for list pages (default: `30`, `0` to disable)
|
||||
- `TZ` - Timezone for web dashboard date/time display (default: `UTC`, e.g., `America/New_York`, `Europe/London`)
|
||||
@@ -641,8 +639,7 @@ ${CONTENT_HOME}/
|
||||
│ └── getting-started.md # Example: Getting Started (/pages/getting-started)
|
||||
└── media/ # Custom media files
|
||||
└── images/
|
||||
├── logo.svg # Full-color custom logo (default)
|
||||
└── logo-invert.svg # Monochrome custom logo (darkened in light mode)
|
||||
└── logo.svg # Custom logo (replaces default favicon and navbar/home logo)
|
||||
```
|
||||
|
||||
Pages use YAML frontmatter for metadata:
|
||||
|
||||
17
Dockerfile
17
Dockerfile
@@ -65,26 +65,9 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
# For serial port access
|
||||
udev \
|
||||
# LetsMesh decoder runtime
|
||||
nodejs \
|
||||
npm \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mkdir -p /data
|
||||
|
||||
# Install meshcore-decoder CLI.
|
||||
RUN mkdir -p /opt/letsmesh-decoder \
|
||||
&& cd /opt/letsmesh-decoder \
|
||||
&& npm init -y >/dev/null 2>&1 \
|
||||
&& npm install --omit=dev @michaelhart/meshcore-decoder@0.2.7 patch-package
|
||||
|
||||
# Apply maintained meshcore-decoder compatibility patch.
|
||||
COPY patches/@michaelhart+meshcore-decoder+0.2.7.patch /opt/letsmesh-decoder/patches/@michaelhart+meshcore-decoder+0.2.7.patch
|
||||
RUN cd /opt/letsmesh-decoder \
|
||||
&& npx patch-package --error-on-fail \
|
||||
&& npm uninstall patch-package \
|
||||
&& npm prune --omit=dev
|
||||
RUN ln -s /opt/letsmesh-decoder/node_modules/.bin/meshcore-decoder /usr/local/bin/meshcore-decoder
|
||||
|
||||
# Copy virtual environment from builder
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
14
PLAN.md
14
PLAN.md
@@ -489,16 +489,6 @@ ${DATA_HOME}/
|
||||
|----------|---------|-------------|
|
||||
| DATABASE_URL | sqlite:///{DATA_HOME}/collector/meshcore.db | SQLAlchemy URL |
|
||||
| TAGS_FILE | {DATA_HOME}/collector/tags.json | Path to tags JSON file |
|
||||
| COLLECTOR_INGEST_MODE | native | Ingest mode (`native` or `letsmesh_upload`) |
|
||||
| COLLECTOR_LETSMESH_DECODER_ENABLED | true | Enable external packet decoding in LetsMesh mode |
|
||||
|
||||
LetsMesh compatibility parity note:
|
||||
- `status` feed packets are stored as informational `letsmesh_status` events and do not create advertisement rows.
|
||||
- Advertisement rows in LetsMesh mode are created from decoded payload type `4` only.
|
||||
- Decoded payload type `11` is normalized to native `contact` updates.
|
||||
- Decoded payload type `9` is normalized to native `trace_data`.
|
||||
- Decoded payload type `8` is normalized to informational `path_updated`.
|
||||
- Decoded payload type `1` can map to native response-style events when decrypted structured content is available.
|
||||
|
||||
### API
|
||||
| Variable | Default | Description |
|
||||
@@ -516,10 +506,6 @@ LetsMesh compatibility parity note:
|
||||
| WEB_PORT | 8080 | Web bind port |
|
||||
| API_BASE_URL | http://localhost:8000 | API endpoint |
|
||||
| API_KEY | | API key for queries |
|
||||
| WEB_TRUSTED_PROXY_HOSTS | * | Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. |
|
||||
| WEB_LOCALE | en | UI translation locale |
|
||||
| WEB_DATETIME_LOCALE | en-US | Date formatting locale for UI timestamps |
|
||||
| TZ | UTC | Timezone used for UI timestamp rendering |
|
||||
| NETWORK_DOMAIN | | Network domain |
|
||||
| NETWORK_NAME | MeshCore Network | Network name |
|
||||
| NETWORK_CITY | | City location |
|
||||
|
||||
107
README.md
107
README.md
@@ -278,8 +278,6 @@ All components are configured via environment variables. Create a `.env` file or
|
||||
| `MQTT_PASSWORD` | *(none)* | MQTT password (optional) |
|
||||
| `MQTT_PREFIX` | `meshcore` | Topic prefix for all MQTT messages |
|
||||
| `MQTT_TLS` | `false` | Enable TLS/SSL for MQTT connection |
|
||||
| `MQTT_TRANSPORT` | `tcp` | MQTT transport (`tcp` or `websockets`) |
|
||||
| `MQTT_WS_PATH` | `/mqtt` | MQTT WebSocket path (used when `MQTT_TRANSPORT=websockets`) |
|
||||
|
||||
### Interface Settings
|
||||
|
||||
@@ -293,44 +291,6 @@ All components are configured via environment variables. Create a `.env` file or
|
||||
| `CONTACT_CLEANUP_ENABLED` | `true` | Enable automatic removal of stale contacts from companion node |
|
||||
| `CONTACT_CLEANUP_DAYS` | `7` | Remove contacts not advertised for this many days |
|
||||
|
||||
### Collector Settings
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `COLLECTOR_INGEST_MODE` | `native` | Ingest mode (`native` or `letsmesh_upload`) |
|
||||
| `COLLECTOR_LETSMESH_DECODER_ENABLED` | `true` | Enable external LetsMesh packet decoding |
|
||||
| `COLLECTOR_LETSMESH_DECODER_COMMAND` | `meshcore-decoder` | Decoder CLI command |
|
||||
| `COLLECTOR_LETSMESH_DECODER_KEYS` | *(none)* | Additional decoder channel keys (`label=hex`, `label:hex`, or `hex`) |
|
||||
| `COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS` | `2.0` | Timeout per decoder invocation |
|
||||
|
||||
#### LetsMesh Upload Compatibility Mode
|
||||
|
||||
When `COLLECTOR_INGEST_MODE=letsmesh_upload`, the collector subscribes to:
|
||||
|
||||
- `<prefix>/+/packets`
|
||||
- `<prefix>/+/status`
|
||||
- `<prefix>/+/internal`
|
||||
|
||||
Normalization behavior:
|
||||
|
||||
- `status` packets are stored as informational `letsmesh_status` events and are not mapped to `advertisement` rows.
|
||||
- Decoder payload type `4` is mapped to `advertisement` when node identity metadata is present.
|
||||
- Decoder payload type `11` (control discover response) is mapped to `contact`.
|
||||
- Decoder payload type `9` is mapped to `trace_data`.
|
||||
- Decoder payload type `8` is mapped to informational `path_updated` events.
|
||||
- Decoder payload type `1` can map to native response events (`telemetry_response`, `battery`, `path_updated`, `status_response`) when decrypted structured content is available.
|
||||
- `packet_type=5` packets are mapped to `channel_msg_recv`.
|
||||
- `packet_type=1`, `2`, and `7` packets are mapped to `contact_msg_recv` when decryptable text is available.
|
||||
- For channel packets, if a channel key is available, a channel label is attached (for example `Public` or `#test`) for UI display.
|
||||
- In the messages feed and dashboard channel sections, known channel indexes are preferred for labels (`17 -> Public`, `217 -> #test`) to avoid stale channel-name mismatches.
|
||||
- Additional channel names are loaded from `COLLECTOR_LETSMESH_DECODER_KEYS` when entries are provided as `label=hex` (for example `bot=<key>`).
|
||||
- Decoder-advertisement packets with location metadata update node GPS (`lat/lon`) for map display.
|
||||
- This keeps advertisement listings closer to native mode behavior (node advert traffic only, not observer status telemetry).
|
||||
- Packets without decryptable message text are kept as informational `letsmesh_packet` events and are not shown in the messages feed; when decode succeeds the decoded JSON is attached to those packet log events.
|
||||
- When decoder output includes a human sender (`payload.decoded.decrypted.sender`), message text is normalized to `Name: Message` before storage; receiver/observer names are never used as sender fallback.
|
||||
- The collector keeps built-in keys for `Public` and `#test`, and merges any additional keys from `COLLECTOR_LETSMESH_DECODER_KEYS`.
|
||||
- Docker runtime installs `@michaelhart/meshcore-decoder@0.2.7` and applies `patches/@michaelhart+meshcore-decoder+0.2.7.patch` via `patch-package` for Node compatibility.
|
||||
|
||||
### Webhooks
|
||||
|
||||
The collector can forward certain events to external HTTP endpoints:
|
||||
@@ -391,10 +351,8 @@ The collector automatically cleans up old event data and inactive nodes:
|
||||
| `API_KEY` | *(none)* | API key for web dashboard queries (optional) |
|
||||
| `WEB_THEME` | `dark` | Default theme (`dark` or `light`). Users can override via theme toggle in navbar. |
|
||||
| `WEB_LOCALE` | `en` | Locale/language for the web dashboard (e.g., `en`, `es`, `fr`) |
|
||||
| `WEB_DATETIME_LOCALE` | `en-US` | Locale used for date formatting in the web dashboard (e.g., `en-US` for MM/DD/YYYY, `en-GB` for DD/MM/YYYY). |
|
||||
| `WEB_AUTO_REFRESH_SECONDS` | `30` | Auto-refresh interval in seconds for list pages (0 to disable) |
|
||||
| `WEB_ADMIN_ENABLED` | `false` | Enable admin interface at /a/ (requires auth proxy: `X-Forwarded-User`/`X-Auth-Request-User` or forwarded `Authorization: Basic ...`) |
|
||||
| `WEB_TRUSTED_PROXY_HOSTS` | `*` | Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. A startup warning is emitted when using the default `*` with admin enabled. |
|
||||
| `WEB_ADMIN_ENABLED` | `false` | Enable admin interface at /a/ (requires auth proxy) |
|
||||
| `TZ` | `UTC` | Timezone for displaying dates/times (e.g., `America/New_York`, `Europe/London`) |
|
||||
| `NETWORK_DOMAIN` | *(none)* | Network domain name (optional) |
|
||||
| `NETWORK_NAME` | `MeshCore Network` | Display name for the network |
|
||||
@@ -408,59 +366,6 @@ The collector automatically cleans up old event data and inactive nodes:
|
||||
| `NETWORK_CONTACT_YOUTUBE` | *(none)* | YouTube channel URL |
|
||||
| `CONTENT_HOME` | `./content` | Directory containing custom content (pages/, media/) |
|
||||
|
||||
Timezone handling note:
|
||||
- API timestamps that omit an explicit timezone suffix are treated as UTC before rendering in the configured `TZ`.
|
||||
|
||||
#### Nginx Proxy Manager (NPM) Admin Setup
|
||||
|
||||
Use two hostnames so the public map/site stays open while admin stays protected:
|
||||
|
||||
1. Public host: no Access List (normal users).
|
||||
2. Admin host: Access List enabled (operators only).
|
||||
|
||||
Both proxy hosts should forward to the same web container:
|
||||
- Scheme: `http`
|
||||
- Forward Hostname/IP: your MeshCore Hub host
|
||||
- Forward Port: `18080` (or your mapped web port)
|
||||
- Websockets Support: `ON`
|
||||
- Block Common Exploits: `ON`
|
||||
|
||||
Important:
|
||||
- Do not host this app under a subpath (for example `/meshcore`); proxy it at `/`.
|
||||
- `WEB_ADMIN_ENABLED` must be `true`.
|
||||
|
||||
In NPM, for the **admin host**, paste this in the `Advanced` field:
|
||||
|
||||
```nginx
|
||||
# Forward authenticated identity for MeshCore Hub admin checks
|
||||
proxy_set_header Authorization $http_authorization;
|
||||
proxy_set_header X-Forwarded-User $remote_user;
|
||||
proxy_set_header X-Auth-Request-User $remote_user;
|
||||
proxy_set_header X-Forwarded-Email "";
|
||||
proxy_set_header X-Forwarded-Groups "";
|
||||
```
|
||||
|
||||
Then attach your NPM Access List (Basic auth users) to that admin host.
|
||||
|
||||
Verify auth forwarding:
|
||||
|
||||
```bash
|
||||
curl -s -u 'admin:password' "https://admin.example.com/config.js?t=$(date +%s)" \
|
||||
| grep -o '"is_authenticated":[^,]*'
|
||||
```
|
||||
|
||||
Expected:
|
||||
|
||||
```text
|
||||
"is_authenticated": true
|
||||
```
|
||||
|
||||
If it still shows `false`, check:
|
||||
1. You are using the admin hostname, not the public hostname.
|
||||
2. The Access List is attached to that admin host.
|
||||
3. The `Advanced` block above is present exactly.
|
||||
4. `WEB_ADMIN_ENABLED=true` is loaded in the running web container.
|
||||
|
||||
#### Feature Flags
|
||||
|
||||
Control which pages are visible in the web dashboard. Disabled features are fully hidden: removed from navigation, return 404 on their routes, and excluded from sitemap/robots.txt.
|
||||
@@ -481,17 +386,13 @@ Control which pages are visible in the web dashboard. Disabled features are full
|
||||
|
||||
The web dashboard supports custom content including markdown pages and media files. Content is organized in subdirectories:
|
||||
|
||||
Custom logo options:
|
||||
- `logo.svg` — full-color logo, displayed as-is in both themes (no automatic darkening)
|
||||
- `logo-invert.svg` — monochrome/two-tone logo, automatically darkened in light mode for visibility
|
||||
```
|
||||
content/
|
||||
├── pages/ # Custom markdown pages
|
||||
│ └── about.md
|
||||
└── media/ # Custom media files
|
||||
└── images/
|
||||
├── logo.svg # Full-color custom logo (default)
|
||||
└── logo-invert.svg # Monochrome custom logo (darkened in light mode)
|
||||
└── logo.svg # Custom logo (replaces favicon and navbar/home logo)
|
||||
```
|
||||
|
||||
**Setup:**
|
||||
@@ -765,7 +666,7 @@ meshcore-hub/
|
||||
├── content/ # Custom content directory (CONTENT_HOME, optional)
|
||||
│ ├── pages/ # Custom markdown pages
|
||||
│ └── media/ # Custom media files
|
||||
│ └── images/ # Custom images (logo.svg/png/jpg/jpeg/webp replace default logo)
|
||||
│ └── images/ # Custom images (logo.svg replaces default logo)
|
||||
├── data/ # Runtime data directory (DATA_HOME, created at runtime)
|
||||
├── Dockerfile # Docker build configuration
|
||||
├── docker-compose.yml # Docker Compose services
|
||||
@@ -798,6 +699,8 @@ meshcore-hub/
|
||||
|
||||
This project is licensed under the GNU General Public License v3.0 or later (GPL-3.0-or-later). See [LICENSE](LICENSE) for details.
|
||||
|
||||
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
- [MeshCore](https://meshcore.dev/) - The mesh networking protocol
|
||||
|
||||
41
SCHEMAS.md
41
SCHEMAS.md
@@ -45,19 +45,15 @@ Node advertisements announcing presence and metadata.
|
||||
"public_key": "string (64 hex chars)",
|
||||
"name": "string (optional)",
|
||||
"adv_type": "string (optional)",
|
||||
"flags": "integer (optional)",
|
||||
"lat": "number (optional)",
|
||||
"lon": "number (optional)"
|
||||
"flags": "integer (optional)"
|
||||
}
|
||||
```
|
||||
|
||||
**Field Descriptions**:
|
||||
- `public_key`: Node's full 64-character hexadecimal public key (required)
|
||||
- `name`: Node name/alias (e.g., "Gateway-01", "Alice")
|
||||
- `adv_type`: Node type - common values: `"chat"`, `"repeater"`, `"room"`, `"companion"` (other values may appear from upstream feeds and are normalized by the collector when possible)
|
||||
- `adv_type`: Node type - one of: `"chat"`, `"repeater"`, `"room"`, `"none"`
|
||||
- `flags`: Node capability/status flags (bitmask)
|
||||
- `lat`: GPS latitude when provided by decoder metadata
|
||||
- `lon`: GPS longitude when provided by decoder metadata
|
||||
|
||||
**Example**:
|
||||
```json
|
||||
@@ -65,9 +61,7 @@ Node advertisements announcing presence and metadata.
|
||||
"public_key": "4767c2897c256df8d85a5fa090574284bfd15b92d47359741b0abd5098ed30c4",
|
||||
"name": "Gateway-01",
|
||||
"adv_type": "repeater",
|
||||
"flags": 218,
|
||||
"lat": 42.470001,
|
||||
"lon": -71.330001
|
||||
"flags": 218
|
||||
}
|
||||
```
|
||||
|
||||
@@ -96,7 +90,7 @@ Direct/private messages between two nodes.
|
||||
```
|
||||
|
||||
**Field Descriptions**:
|
||||
- `pubkey_prefix`: First 12 characters of sender's public key (or source hash prefix in compatibility ingest modes)
|
||||
- `pubkey_prefix`: First 12 characters of sender's public key
|
||||
- `path_len`: Number of hops message traveled
|
||||
- `txt_type`: Message type indicator (0=plain, 2=signed, etc.)
|
||||
- `signature`: Message signature (8 hex chars) when `txt_type=2`
|
||||
@@ -134,9 +128,7 @@ Group/broadcast messages on specific channels.
|
||||
**Payload Schema**:
|
||||
```json
|
||||
{
|
||||
"channel_idx": "integer (optional)",
|
||||
"channel_name": "string (optional)",
|
||||
"pubkey_prefix": "string (12 chars, optional)",
|
||||
"channel_idx": "integer",
|
||||
"path_len": "integer (optional)",
|
||||
"txt_type": "integer (optional)",
|
||||
"signature": "string (optional)",
|
||||
@@ -147,9 +139,7 @@ Group/broadcast messages on specific channels.
|
||||
```
|
||||
|
||||
**Field Descriptions**:
|
||||
- `channel_idx`: Channel number (0-255) when available
|
||||
- `channel_name`: Channel display label (e.g., `"Public"`, `"#test"`) when available
|
||||
- `pubkey_prefix`: First 12 characters of sender's public key when available
|
||||
- `channel_idx`: Channel number (0-255)
|
||||
- `path_len`: Number of hops message traveled
|
||||
- `txt_type`: Message type indicator (0=plain, 2=signed, etc.)
|
||||
- `signature`: Message signature (8 hex chars) when `txt_type=2`
|
||||
@@ -176,25 +166,6 @@ Group/broadcast messages on specific channels.
|
||||
- Send only text: `$.data.text`
|
||||
- Send channel + text: `$.data.[channel_idx,text]`
|
||||
|
||||
**Compatibility ingest note**:
|
||||
- In LetsMesh upload compatibility mode, packet type `5` is normalized to `CHANNEL_MSG_RECV` and packet types `1`, `2`, and `7` are normalized to `CONTACT_MSG_RECV` when decryptable text is available.
|
||||
- LetsMesh packets without decryptable message text are treated as informational `letsmesh_packet` events instead of message events.
|
||||
- For UI labels, known channel indexes are mapped (`17 -> Public`, `217 -> #test`) and preferred over ambiguous/stale channel-name hints.
|
||||
- Additional channel labels can be provided through `COLLECTOR_LETSMESH_DECODER_KEYS` using `label=hex` entries.
|
||||
- When decoder output includes a human sender (`payload.decoded.decrypted.sender`), message text is normalized to `Name: Message`; sender identity remains unknown when only hash/prefix metadata is available.
|
||||
|
||||
**Compatibility ingest note (advertisements)**:
|
||||
- In LetsMesh upload compatibility mode, `status` feed payloads are persisted as informational `letsmesh_status` events and are not normalized to `ADVERTISEMENT`.
|
||||
- In LetsMesh upload compatibility mode, decoded payload type `4` is normalized to `ADVERTISEMENT` when node identity metadata is present.
|
||||
- Payload type `4` location metadata (`appData.location.latitude/longitude`) is mapped to node `lat/lon` for map rendering.
|
||||
- This keeps advertisement persistence aligned with native mode expectations (advertisement traffic only).
|
||||
|
||||
**Compatibility ingest note (non-message structured events)**:
|
||||
- Decoded payload type `9` is normalized to `TRACE_DATA` (`traceTag`, flags, auth, path hashes, and SNR values).
|
||||
- Decoded payload type `11` (`Control/NodeDiscoverResp`) is normalized to `contact` events for node upsert parity.
|
||||
- Decoded payload type `8` is normalized to informational `PATH_UPDATED` events (`hop_count` + path hashes).
|
||||
- Decoded payload type `1` can be normalized to `TELEMETRY_RESPONSE`, `BATTERY`, `PATH_UPDATED`, or `STATUS_RESPONSE` when decrypted response content is structured and parseable.
|
||||
|
||||
---
|
||||
|
||||
## Persisted Events (Non-Webhook)
|
||||
|
||||
3
TASKS.md
3
TASKS.md
@@ -753,9 +753,6 @@ This document tracks implementation progress for the MeshCore Hub project. Each
|
||||
### Decisions Made
|
||||
*(Record architectural decisions and answers to clarifying questions here)*
|
||||
|
||||
- [x] LetsMesh/native advertisement parity: in `letsmesh_upload` mode, observer `status` feed stays informational (`letsmesh_status`) and does not populate `advertisements`.
|
||||
- [x] LetsMesh advertisement persistence source: decoded packet payload type `4` maps to `advertisement`; payload type `11` maps to `contact` parity updates.
|
||||
- [x] LetsMesh native-event parity extensions: payload type `9` maps to `trace_data`, payload type `8` maps to informational `path_updated`, and payload type `1` can map to response-style native events when decryptable structured content exists.
|
||||
- [ ] Q1 (MQTT Broker):
|
||||
- [ ] Q2 (Database):
|
||||
- [ ] Q3 (Web Dashboard Separation):
|
||||
|
||||
@@ -48,8 +48,6 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- SERIAL_PORT=${SERIAL_PORT:-/dev/ttyUSB0}
|
||||
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
|
||||
- NODE_ADDRESS=${NODE_ADDRESS:-}
|
||||
@@ -85,8 +83,6 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- SERIAL_PORT=${SERIAL_PORT_SENDER:-/dev/ttyUSB1}
|
||||
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
|
||||
- NODE_ADDRESS=${NODE_ADDRESS_SENDER:-}
|
||||
@@ -119,8 +115,6 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- MOCK_DEVICE=true
|
||||
- NODE_ADDRESS=${NODE_ADDRESS:-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef}
|
||||
command: ["interface", "receiver", "--mock"]
|
||||
@@ -158,13 +152,6 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- COLLECTOR_INGEST_MODE=${COLLECTOR_INGEST_MODE:-native}
|
||||
- COLLECTOR_LETSMESH_DECODER_ENABLED=${COLLECTOR_LETSMESH_DECODER_ENABLED:-true}
|
||||
- COLLECTOR_LETSMESH_DECODER_COMMAND=${COLLECTOR_LETSMESH_DECODER_COMMAND:-meshcore-decoder}
|
||||
- COLLECTOR_LETSMESH_DECODER_KEYS=${COLLECTOR_LETSMESH_DECODER_KEYS:-}
|
||||
- COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS=${COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS:-2.0}
|
||||
- DATA_HOME=/data
|
||||
- SEED_HOME=/seed
|
||||
# Webhook configuration
|
||||
@@ -223,8 +210,6 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- DATA_HOME=/data
|
||||
- API_HOST=0.0.0.0
|
||||
- API_PORT=8000
|
||||
@@ -270,7 +255,6 @@ services:
|
||||
- WEB_PORT=8080
|
||||
- WEB_THEME=${WEB_THEME:-dark}
|
||||
- WEB_LOCALE=${WEB_LOCALE:-en}
|
||||
- WEB_DATETIME_LOCALE=${WEB_DATETIME_LOCALE:-en-US}
|
||||
- WEB_ADMIN_ENABLED=${WEB_ADMIN_ENABLED:-false}
|
||||
- NETWORK_NAME=${NETWORK_NAME:-MeshCore Network}
|
||||
- NETWORK_CITY=${NETWORK_CITY:-}
|
||||
@@ -283,7 +267,6 @@ services:
|
||||
- NETWORK_WELCOME_TEXT=${NETWORK_WELCOME_TEXT:-}
|
||||
- CONTENT_HOME=/content
|
||||
- TZ=${TZ:-UTC}
|
||||
- COLLECTOR_LETSMESH_DECODER_KEYS=${COLLECTOR_LETSMESH_DECODER_KEYS:-}
|
||||
# Feature flags (set to false to disable specific pages)
|
||||
- FEATURE_DASHBOARD=${FEATURE_DASHBOARD:-true}
|
||||
- FEATURE_NODES=${FEATURE_NODES:-true}
|
||||
|
||||
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
@@ -1,58 +0,0 @@
|
||||
diff --git a/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js b/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
|
||||
index d33ffd6..8d040d0 100644
|
||||
--- a/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
|
||||
+++ b/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
|
||||
@@ -36,7 +36,27 @@ var __importStar = (this && this.__importStar) || (function () {
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Ed25519SignatureVerifier = void 0;
|
||||
-const ed25519 = __importStar(require("@noble/ed25519"));
|
||||
+let _ed25519 = null;
|
||||
+async function getEd25519() {
|
||||
+ if (_ed25519) {
|
||||
+ return _ed25519;
|
||||
+ }
|
||||
+ const mod = await import("@noble/ed25519");
|
||||
+ _ed25519 = mod.default ? mod.default : mod;
|
||||
+ try {
|
||||
+ _ed25519.etc.sha512Async = sha512Hash;
|
||||
+ }
|
||||
+ catch (error) {
|
||||
+ console.debug("Could not set async SHA-512:", error);
|
||||
+ }
|
||||
+ try {
|
||||
+ _ed25519.etc.sha512Sync = sha512HashSync;
|
||||
+ }
|
||||
+ catch (error) {
|
||||
+ console.debug("Could not set up synchronous SHA-512:", error);
|
||||
+ }
|
||||
+ return _ed25519;
|
||||
+}
|
||||
const hex_1 = require("../utils/hex");
|
||||
const orlp_ed25519_wasm_1 = require("./orlp-ed25519-wasm");
|
||||
// Cross-platform SHA-512 implementation
|
||||
@@ -90,16 +110,6 @@ function sha512HashSync(data) {
|
||||
throw new Error('No SHA-512 implementation available for synchronous operation');
|
||||
}
|
||||
}
|
||||
-// Set up SHA-512 for @noble/ed25519
|
||||
-ed25519.etc.sha512Async = sha512Hash;
|
||||
-// Always set up sync version - @noble/ed25519 requires it
|
||||
-// It will throw in browser environments, which @noble/ed25519 can handle
|
||||
-try {
|
||||
- ed25519.etc.sha512Sync = sha512HashSync;
|
||||
-}
|
||||
-catch (error) {
|
||||
- console.debug('Could not set up synchronous SHA-512:', error);
|
||||
-}
|
||||
class Ed25519SignatureVerifier {
|
||||
/**
|
||||
* Verify an Ed25519 signature for MeshCore advertisement packets
|
||||
@@ -116,6 +126,7 @@ class Ed25519SignatureVerifier {
|
||||
// Construct the signed message according to MeshCore format
|
||||
const message = this.constructAdvertSignedMessage(publicKeyHex, timestamp, appData);
|
||||
// Verify the signature using noble-ed25519
|
||||
+ const ed25519 = await getEd25519();
|
||||
return await ed25519.verify(signature, message, publicKey);
|
||||
}
|
||||
catch (error) {
|
||||
@@ -51,12 +51,8 @@ def create_app(
|
||||
admin_key: str | None = None,
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_username: str | None = None,
|
||||
mqtt_password: str | None = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
mqtt_transport: str = "tcp",
|
||||
mqtt_ws_path: str = "/mqtt",
|
||||
cors_origins: list[str] | None = None,
|
||||
metrics_enabled: bool = True,
|
||||
metrics_cache_ttl: int = 60,
|
||||
@@ -69,12 +65,8 @@ def create_app(
|
||||
admin_key: Admin API key
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
mqtt_transport: MQTT transport protocol (tcp or websockets)
|
||||
mqtt_ws_path: WebSocket path (used when transport=websockets)
|
||||
cors_origins: Allowed CORS origins
|
||||
metrics_enabled: Enable Prometheus metrics endpoint at /metrics
|
||||
metrics_cache_ttl: Seconds to cache metrics output
|
||||
@@ -98,12 +90,8 @@ def create_app(
|
||||
app.state.admin_key = admin_key
|
||||
app.state.mqtt_host = mqtt_host
|
||||
app.state.mqtt_port = mqtt_port
|
||||
app.state.mqtt_username = mqtt_username
|
||||
app.state.mqtt_password = mqtt_password
|
||||
app.state.mqtt_prefix = mqtt_prefix
|
||||
app.state.mqtt_tls = mqtt_tls
|
||||
app.state.mqtt_transport = mqtt_transport
|
||||
app.state.mqtt_ws_path = mqtt_ws_path
|
||||
app.state.metrics_cache_ttl = metrics_cache_ttl
|
||||
|
||||
# Configure CORS
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Authentication middleware for the API."""
|
||||
|
||||
import hmac
|
||||
import logging
|
||||
from typing import Annotated
|
||||
|
||||
@@ -80,9 +79,7 @@ async def require_read(
|
||||
)
|
||||
|
||||
# Check if token matches any key
|
||||
if (read_key and hmac.compare_digest(token, read_key)) or (
|
||||
admin_key and hmac.compare_digest(token, admin_key)
|
||||
):
|
||||
if token == read_key or token == admin_key:
|
||||
return token
|
||||
|
||||
raise HTTPException(
|
||||
@@ -127,7 +124,7 @@ async def require_admin(
|
||||
)
|
||||
|
||||
# Check if token matches admin key
|
||||
if hmac.compare_digest(token, admin_key):
|
||||
if token == admin_key:
|
||||
return token
|
||||
|
||||
raise HTTPException(
|
||||
|
||||
@@ -60,25 +60,11 @@ import click
|
||||
envvar="MQTT_PORT",
|
||||
help="MQTT broker port",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-username",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MQTT_USERNAME",
|
||||
help="MQTT username",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-password",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MQTT_PASSWORD",
|
||||
help="MQTT password",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-prefix",
|
||||
type=str,
|
||||
default="meshcore",
|
||||
envvar=["MQTT_PREFIX", "MQTT_TOPIC_PREFIX"],
|
||||
envvar="MQTT_TOPIC_PREFIX",
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
@@ -88,20 +74,6 @@ import click
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-transport",
|
||||
type=click.Choice(["tcp", "websockets"], case_sensitive=False),
|
||||
default="tcp",
|
||||
envvar="MQTT_TRANSPORT",
|
||||
help="MQTT transport protocol",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-ws-path",
|
||||
type=str,
|
||||
default="/mqtt",
|
||||
envvar="MQTT_WS_PATH",
|
||||
help="MQTT WebSocket path (used when transport=websockets)",
|
||||
)
|
||||
@click.option(
|
||||
"--cors-origins",
|
||||
type=str,
|
||||
@@ -139,12 +111,8 @@ def api(
|
||||
admin_key: str | None,
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
mqtt_prefix: str,
|
||||
mqtt_tls: bool,
|
||||
mqtt_transport: str,
|
||||
mqtt_ws_path: str,
|
||||
cors_origins: str | None,
|
||||
metrics_enabled: bool,
|
||||
metrics_cache_ttl: int,
|
||||
@@ -193,7 +161,6 @@ def api(
|
||||
click.echo(f"Data home: {effective_data_home}")
|
||||
click.echo(f"Database: {effective_db_url}")
|
||||
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {mqtt_prefix})")
|
||||
click.echo(f"MQTT transport: {mqtt_transport} (ws_path: {mqtt_ws_path})")
|
||||
click.echo(f"Read key configured: {read_key is not None}")
|
||||
click.echo(f"Admin key configured: {admin_key is not None}")
|
||||
click.echo(f"CORS origins: {cors_origins or 'none'}")
|
||||
@@ -228,12 +195,8 @@ def api(
|
||||
admin_key=admin_key,
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
cors_origins=origins_list,
|
||||
metrics_enabled=metrics_enabled,
|
||||
metrics_cache_ttl=metrics_cache_ttl,
|
||||
|
||||
@@ -56,25 +56,17 @@ def get_mqtt_client(request: Request) -> MQTTClient:
|
||||
"""
|
||||
mqtt_host = getattr(request.app.state, "mqtt_host", "localhost")
|
||||
mqtt_port = getattr(request.app.state, "mqtt_port", 1883)
|
||||
mqtt_username = getattr(request.app.state, "mqtt_username", None)
|
||||
mqtt_password = getattr(request.app.state, "mqtt_password", None)
|
||||
mqtt_prefix = getattr(request.app.state, "mqtt_prefix", "meshcore")
|
||||
mqtt_tls = getattr(request.app.state, "mqtt_tls", False)
|
||||
mqtt_transport = getattr(request.app.state, "mqtt_transport", "tcp")
|
||||
mqtt_ws_path = getattr(request.app.state, "mqtt_ws_path", "/mqtt")
|
||||
|
||||
# Use unique client ID to allow multiple API instances
|
||||
unique_id = uuid.uuid4().hex[:8]
|
||||
config = MQTTConfig(
|
||||
host=mqtt_host,
|
||||
port=mqtt_port,
|
||||
username=mqtt_username,
|
||||
password=mqtt_password,
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-api-{unique_id}",
|
||||
tls=mqtt_tls,
|
||||
transport=mqtt_transport,
|
||||
ws_path=mqtt_ws_path,
|
||||
)
|
||||
|
||||
client = MQTTClient(config)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Prometheus metrics endpoint for MeshCore Hub API."""
|
||||
|
||||
import base64
|
||||
import hmac
|
||||
import logging
|
||||
import time
|
||||
from typing import Any
|
||||
@@ -55,9 +54,7 @@ def verify_basic_auth(request: Request) -> bool:
|
||||
try:
|
||||
decoded = base64.b64decode(auth_header[6:]).decode("utf-8")
|
||||
username, password = decoded.split(":", 1)
|
||||
return hmac.compare_digest(username, "metrics") and hmac.compare_digest(
|
||||
password, read_key
|
||||
)
|
||||
return username == "metrics" and password == read_key
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from meshcore_hub.api.auth import RequireRead
|
||||
@@ -361,3 +362,175 @@ async def get_node_count_history(
|
||||
data.append(DailyActivityPoint(date=date_str, count=count))
|
||||
|
||||
return NodeCountHistory(days=days, data=data)
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def dashboard(
|
||||
request: Request,
|
||||
session: DbSession,
|
||||
) -> HTMLResponse:
|
||||
"""Simple HTML dashboard page."""
|
||||
now = datetime.now(timezone.utc)
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
yesterday = now - timedelta(days=1)
|
||||
|
||||
# Get stats
|
||||
total_nodes = session.execute(select(func.count()).select_from(Node)).scalar() or 0
|
||||
|
||||
active_nodes = (
|
||||
session.execute(
|
||||
select(func.count()).select_from(Node).where(Node.last_seen >= yesterday)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
|
||||
total_messages = (
|
||||
session.execute(select(func.count()).select_from(Message)).scalar() or 0
|
||||
)
|
||||
|
||||
messages_today = (
|
||||
session.execute(
|
||||
select(func.count())
|
||||
.select_from(Message)
|
||||
.where(Message.received_at >= today_start)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
|
||||
# Get recent nodes
|
||||
recent_nodes = (
|
||||
session.execute(select(Node).order_by(Node.last_seen.desc()).limit(10))
|
||||
.scalars()
|
||||
.all()
|
||||
)
|
||||
|
||||
# Get recent messages
|
||||
recent_messages = (
|
||||
session.execute(select(Message).order_by(Message.received_at.desc()).limit(10))
|
||||
.scalars()
|
||||
.all()
|
||||
)
|
||||
|
||||
# Build HTML
|
||||
html = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>MeshCore Hub Dashboard</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta http-equiv="refresh" content="30">
|
||||
<style>
|
||||
body {{
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
background: #f5f5f5;
|
||||
color: #333;
|
||||
}}
|
||||
h1 {{ color: #2c3e50; }}
|
||||
.container {{ max-width: 1200px; margin: 0 auto; }}
|
||||
.stats {{
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||
gap: 20px;
|
||||
margin-bottom: 30px;
|
||||
}}
|
||||
.stat-card {{
|
||||
background: white;
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||
}}
|
||||
.stat-card h3 {{ margin: 0 0 10px 0; color: #666; font-size: 14px; }}
|
||||
.stat-card .value {{ font-size: 32px; font-weight: bold; color: #2c3e50; }}
|
||||
.section {{
|
||||
background: white;
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||
margin-bottom: 20px;
|
||||
}}
|
||||
table {{ width: 100%; border-collapse: collapse; }}
|
||||
th, td {{ padding: 10px; text-align: left; border-bottom: 1px solid #eee; }}
|
||||
th {{ background: #f8f9fa; font-weight: 600; }}
|
||||
.text-muted {{ color: #666; }}
|
||||
.truncate {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>MeshCore Hub Dashboard</h1>
|
||||
<p class="text-muted">Last updated: {now.strftime('%Y-%m-%d %H:%M:%S UTC')}</p>
|
||||
|
||||
<div class="stats">
|
||||
<div class="stat-card">
|
||||
<h3>Total Nodes</h3>
|
||||
<div class="value">{total_nodes}</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Active Nodes (24h)</h3>
|
||||
<div class="value">{active_nodes}</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Total Messages</h3>
|
||||
<div class="value">{total_messages}</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Messages Today</h3>
|
||||
<div class="value">{messages_today}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="section">
|
||||
<h2>Recent Nodes</h2>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Public Key</th>
|
||||
<th>Type</th>
|
||||
<th>Last Seen</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{"".join(f'''
|
||||
<tr>
|
||||
<td>{n.name or '-'}</td>
|
||||
<td class="truncate">{n.public_key[:16]}...</td>
|
||||
<td>{n.adv_type or '-'}</td>
|
||||
<td>{n.last_seen.strftime('%Y-%m-%d %H:%M') if n.last_seen else '-'}</td>
|
||||
</tr>
|
||||
''' for n in recent_nodes)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="section">
|
||||
<h2>Recent Messages</h2>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Type</th>
|
||||
<th>From/Channel</th>
|
||||
<th>Text</th>
|
||||
<th>Received</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{"".join(f'''
|
||||
<tr>
|
||||
<td>{m.message_type}</td>
|
||||
<td>{m.pubkey_prefix or f'Ch {m.channel_idx}' or '-'}</td>
|
||||
<td class="truncate">{m.text[:50]}{'...' if len(m.text) > 50 else ''}</td>
|
||||
<td>{m.received_at.strftime('%Y-%m-%d %H:%M') if m.received_at else '-'}</td>
|
||||
</tr>
|
||||
''' for m in recent_messages)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return HTMLResponse(content=html)
|
||||
|
||||
@@ -48,39 +48,7 @@ async def list_nodes(
|
||||
)
|
||||
|
||||
if adv_type:
|
||||
normalized_adv_type = adv_type.strip().lower()
|
||||
if normalized_adv_type == "repeater":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "repeater",
|
||||
Node.adv_type.ilike("%repeater%"),
|
||||
Node.adv_type.ilike("%relay%"),
|
||||
)
|
||||
)
|
||||
elif normalized_adv_type == "companion":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "companion",
|
||||
Node.adv_type.ilike("%companion%"),
|
||||
Node.adv_type.ilike("%observer%"),
|
||||
)
|
||||
)
|
||||
elif normalized_adv_type == "room":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "room",
|
||||
Node.adv_type.ilike("%room%"),
|
||||
)
|
||||
)
|
||||
elif normalized_adv_type == "chat":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "chat",
|
||||
Node.adv_type.ilike("%chat%"),
|
||||
)
|
||||
)
|
||||
else:
|
||||
query = query.where(Node.adv_type == adv_type)
|
||||
query = query.where(Node.adv_type == adv_type)
|
||||
|
||||
if member_id:
|
||||
# Filter nodes that have a member_id tag with the specified value
|
||||
|
||||
@@ -54,31 +54,6 @@ if TYPE_CHECKING:
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-transport",
|
||||
type=click.Choice(["tcp", "websockets"], case_sensitive=False),
|
||||
default="tcp",
|
||||
envvar="MQTT_TRANSPORT",
|
||||
help="MQTT transport protocol",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-ws-path",
|
||||
type=str,
|
||||
default="/mqtt",
|
||||
envvar="MQTT_WS_PATH",
|
||||
help="MQTT WebSocket path (used when transport=websockets)",
|
||||
)
|
||||
@click.option(
|
||||
"--ingest-mode",
|
||||
"collector_ingest_mode",
|
||||
type=click.Choice(["native", "letsmesh_upload"], case_sensitive=False),
|
||||
default="native",
|
||||
envvar="COLLECTOR_INGEST_MODE",
|
||||
help=(
|
||||
"Collector ingest mode: native MeshCore events or LetsMesh upload "
|
||||
"(packets/status/internal)"
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--data-home",
|
||||
type=str,
|
||||
@@ -115,9 +90,6 @@ def collector(
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
mqtt_transport: str,
|
||||
mqtt_ws_path: str,
|
||||
collector_ingest_mode: str,
|
||||
data_home: str | None,
|
||||
seed_home: str | None,
|
||||
database_url: str | None,
|
||||
@@ -162,9 +134,6 @@ def collector(
|
||||
ctx.obj["mqtt_password"] = mqtt_password
|
||||
ctx.obj["prefix"] = prefix
|
||||
ctx.obj["mqtt_tls"] = mqtt_tls
|
||||
ctx.obj["mqtt_transport"] = mqtt_transport
|
||||
ctx.obj["mqtt_ws_path"] = mqtt_ws_path
|
||||
ctx.obj["collector_ingest_mode"] = collector_ingest_mode
|
||||
ctx.obj["data_home"] = data_home or settings.data_home
|
||||
ctx.obj["seed_home"] = settings.effective_seed_home
|
||||
ctx.obj["database_url"] = effective_db_url
|
||||
@@ -180,9 +149,6 @@ def collector(
|
||||
mqtt_password=mqtt_password,
|
||||
prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
ingest_mode=collector_ingest_mode,
|
||||
database_url=effective_db_url,
|
||||
log_level=log_level,
|
||||
data_home=data_home or settings.data_home,
|
||||
@@ -197,9 +163,6 @@ def _run_collector_service(
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
mqtt_transport: str,
|
||||
mqtt_ws_path: str,
|
||||
ingest_mode: str,
|
||||
database_url: str,
|
||||
log_level: str,
|
||||
data_home: str,
|
||||
@@ -228,8 +191,6 @@ def _run_collector_service(
|
||||
click.echo(f"Data home: {data_home}")
|
||||
click.echo(f"Seed home: {seed_home}")
|
||||
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {prefix})")
|
||||
click.echo(f"MQTT transport: {mqtt_transport} (ws_path: {mqtt_ws_path})")
|
||||
click.echo(f"Ingest mode: {ingest_mode}")
|
||||
click.echo(f"Database: {database_url}")
|
||||
|
||||
# Load webhook configuration from settings
|
||||
@@ -237,7 +198,6 @@ def _run_collector_service(
|
||||
WebhookDispatcher,
|
||||
create_webhooks_from_settings,
|
||||
)
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
from meshcore_hub.common.config import get_collector_settings
|
||||
|
||||
settings = get_collector_settings()
|
||||
@@ -274,24 +234,6 @@ def _run_collector_service(
|
||||
if settings.data_retention_enabled or settings.node_cleanup_enabled:
|
||||
click.echo(f" Interval: {settings.data_retention_interval_hours} hours")
|
||||
|
||||
if ingest_mode.lower() == "letsmesh_upload":
|
||||
click.echo("")
|
||||
click.echo("LetsMesh decode configuration:")
|
||||
if settings.collector_letsmesh_decoder_enabled:
|
||||
builtin_keys = len(LetsMeshPacketDecoder.BUILTIN_CHANNEL_KEYS)
|
||||
env_keys = len(settings.collector_letsmesh_decoder_keys_list)
|
||||
click.echo(
|
||||
" Decoder: Enabled " f"({settings.collector_letsmesh_decoder_command})"
|
||||
)
|
||||
click.echo(f" Built-in keys: {builtin_keys}")
|
||||
click.echo(" Additional keys from .env: " f"{env_keys} configured")
|
||||
click.echo(
|
||||
" Timeout: "
|
||||
f"{settings.collector_letsmesh_decoder_timeout_seconds:.2f}s"
|
||||
)
|
||||
else:
|
||||
click.echo(" Decoder: Disabled")
|
||||
|
||||
click.echo("")
|
||||
click.echo("Starting MQTT subscriber...")
|
||||
run_collector(
|
||||
@@ -301,9 +243,6 @@ def _run_collector_service(
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
ingest_mode=ingest_mode,
|
||||
database_url=database_url,
|
||||
webhook_dispatcher=webhook_dispatcher,
|
||||
cleanup_enabled=settings.data_retention_enabled,
|
||||
@@ -311,12 +250,6 @@ def _run_collector_service(
|
||||
cleanup_interval_hours=settings.data_retention_interval_hours,
|
||||
node_cleanup_enabled=settings.node_cleanup_enabled,
|
||||
node_cleanup_days=settings.node_cleanup_days,
|
||||
letsmesh_decoder_enabled=settings.collector_letsmesh_decoder_enabled,
|
||||
letsmesh_decoder_command=settings.collector_letsmesh_decoder_command,
|
||||
letsmesh_decoder_channel_keys=settings.collector_letsmesh_decoder_keys_list,
|
||||
letsmesh_decoder_timeout_seconds=(
|
||||
settings.collector_letsmesh_decoder_timeout_seconds
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -334,9 +267,6 @@ def run_cmd(ctx: click.Context) -> None:
|
||||
mqtt_password=ctx.obj["mqtt_password"],
|
||||
prefix=ctx.obj["prefix"],
|
||||
mqtt_tls=ctx.obj["mqtt_tls"],
|
||||
mqtt_transport=ctx.obj["mqtt_transport"],
|
||||
mqtt_ws_path=ctx.obj["mqtt_ws_path"],
|
||||
ingest_mode=ctx.obj["collector_ingest_mode"],
|
||||
database_url=ctx.obj["database_url"],
|
||||
log_level=ctx.obj["log_level"],
|
||||
data_home=ctx.obj["data_home"],
|
||||
|
||||
@@ -14,20 +14,6 @@ from meshcore_hub.common.models import Advertisement, Node, add_event_receiver
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _coerce_float(value: Any) -> float | None:
|
||||
"""Convert int/float/string values to float when possible."""
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (int, float)):
|
||||
return float(value)
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return float(value.strip())
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def handle_advertisement(
|
||||
public_key: str,
|
||||
event_type: str,
|
||||
@@ -54,22 +40,6 @@ def handle_advertisement(
|
||||
name = payload.get("name")
|
||||
adv_type = payload.get("adv_type")
|
||||
flags = payload.get("flags")
|
||||
lat = payload.get("lat")
|
||||
lon = payload.get("lon")
|
||||
|
||||
if lat is None:
|
||||
lat = payload.get("adv_lat")
|
||||
if lon is None:
|
||||
lon = payload.get("adv_lon")
|
||||
|
||||
location = payload.get("location")
|
||||
if isinstance(location, dict):
|
||||
if lat is None:
|
||||
lat = location.get("latitude")
|
||||
if lon is None:
|
||||
lon = location.get("longitude")
|
||||
lat = _coerce_float(lat)
|
||||
lon = _coerce_float(lon)
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Compute event hash for deduplication (30-second time bucket)
|
||||
@@ -109,10 +79,6 @@ def handle_advertisement(
|
||||
node_query = select(Node).where(Node.public_key == adv_public_key)
|
||||
node = session.execute(node_query).scalar_one_or_none()
|
||||
if node:
|
||||
if lat is not None:
|
||||
node.lat = lat
|
||||
if lon is not None:
|
||||
node.lon = lon
|
||||
node.last_seen = now
|
||||
|
||||
# Add this receiver to the junction table
|
||||
@@ -144,10 +110,6 @@ def handle_advertisement(
|
||||
node.adv_type = adv_type
|
||||
if flags is not None:
|
||||
node.flags = flags
|
||||
if lat is not None:
|
||||
node.lat = lat
|
||||
if lon is not None:
|
||||
node.lon = lon
|
||||
node.last_seen = now
|
||||
else:
|
||||
# Create new node
|
||||
@@ -158,8 +120,6 @@ def handle_advertisement(
|
||||
flags=flags,
|
||||
first_seen=now,
|
||||
last_seen=now,
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
)
|
||||
session.add(node)
|
||||
session.flush()
|
||||
|
||||
@@ -70,7 +70,7 @@ def _handle_message(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Extract fields based on message type
|
||||
pubkey_prefix = payload.get("pubkey_prefix")
|
||||
pubkey_prefix = payload.get("pubkey_prefix") if message_type == "contact" else None
|
||||
channel_idx = payload.get("channel_idx") if message_type == "channel" else None
|
||||
path_len = payload.get("path_len")
|
||||
txt_type = payload.get("txt_type")
|
||||
|
||||
@@ -1,275 +0,0 @@
|
||||
"""LetsMesh packet decoder integration.
|
||||
|
||||
Provides an optional bridge to the external `meshcore-decoder` CLI so the
|
||||
collector can turn LetsMesh upload `raw` packet hex into decoded message data.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import shlex
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LetsMeshPacketDecoder:
|
||||
"""Decode LetsMesh packet payloads with `meshcore-decoder` CLI."""
|
||||
|
||||
class ChannelKey(NamedTuple):
|
||||
"""Channel key metadata for decryption and channel labeling."""
|
||||
|
||||
label: str | None
|
||||
key_hex: str
|
||||
channel_hash: str
|
||||
|
||||
# Built-in keys required by your deployment.
|
||||
# - Public channel
|
||||
# - #test channel
|
||||
BUILTIN_CHANNEL_KEYS: tuple[tuple[str, str], ...] = (
|
||||
("Public", "8B3387E9C5CDEA6AC9E5EDBAA115CD72"),
|
||||
("test", "9CD8FCF22A47333B591D96A2B848B73F"),
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
enabled: bool = True,
|
||||
command: str = "meshcore-decoder",
|
||||
channel_keys: list[str] | None = None,
|
||||
timeout_seconds: float = 2.0,
|
||||
) -> None:
|
||||
self._enabled = enabled
|
||||
self._command_tokens = shlex.split(command.strip()) if command.strip() else []
|
||||
self._channel_key_infos = self._normalize_channel_keys(channel_keys or [])
|
||||
self._channel_keys = [info.key_hex for info in self._channel_key_infos]
|
||||
self._channel_names_by_hash = {
|
||||
info.channel_hash: info.label
|
||||
for info in self._channel_key_infos
|
||||
if info.label
|
||||
}
|
||||
self._decode_cache: dict[str, dict[str, Any] | None] = {}
|
||||
self._decode_cache_maxsize = 2048
|
||||
self._timeout_seconds = timeout_seconds
|
||||
self._checked_command = False
|
||||
self._command_available = False
|
||||
self._warned_unavailable = False
|
||||
|
||||
@classmethod
|
||||
def _normalize_channel_keys(cls, values: list[str]) -> list[ChannelKey]:
|
||||
"""Normalize key list (labels + key + channel hash, deduplicated)."""
|
||||
normalized: list[LetsMeshPacketDecoder.ChannelKey] = []
|
||||
seen_keys: set[str] = set()
|
||||
|
||||
for label, key in cls.BUILTIN_CHANNEL_KEYS:
|
||||
entry = cls._normalize_channel_entry(f"{label}={key}")
|
||||
if not entry:
|
||||
continue
|
||||
if entry.key_hex in seen_keys:
|
||||
continue
|
||||
normalized.append(entry)
|
||||
seen_keys.add(entry.key_hex)
|
||||
|
||||
for value in values:
|
||||
entry = cls._normalize_channel_entry(value)
|
||||
if not entry:
|
||||
continue
|
||||
if entry.key_hex in seen_keys:
|
||||
continue
|
||||
normalized.append(entry)
|
||||
seen_keys.add(entry.key_hex)
|
||||
|
||||
return normalized
|
||||
|
||||
@classmethod
|
||||
def _normalize_channel_entry(cls, value: str | None) -> ChannelKey | None:
|
||||
"""Normalize one key entry (`label=hex`, `label:hex`, or `hex`)."""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
candidate = value.strip()
|
||||
if not candidate:
|
||||
return None
|
||||
|
||||
label: str | None = None
|
||||
key_candidate = candidate
|
||||
for separator in ("=", ":"):
|
||||
if separator not in candidate:
|
||||
continue
|
||||
left, right = candidate.split(separator, 1)
|
||||
right = right.strip()
|
||||
right = right.removeprefix("0x").removeprefix("0X").strip()
|
||||
if right and cls._is_hex(right):
|
||||
label = left.strip().lstrip("#")
|
||||
key_candidate = right
|
||||
break
|
||||
|
||||
key_candidate = key_candidate.strip()
|
||||
key_candidate = key_candidate.removeprefix("0x").removeprefix("0X").strip()
|
||||
if not key_candidate or not cls._is_hex(key_candidate):
|
||||
return None
|
||||
|
||||
key_hex = key_candidate.upper()
|
||||
channel_hash = cls._compute_channel_hash(key_hex)
|
||||
normalized_label = label.strip() if label and label.strip() else None
|
||||
return cls.ChannelKey(
|
||||
label=normalized_label,
|
||||
key_hex=key_hex,
|
||||
channel_hash=channel_hash,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _is_hex(value: str) -> bool:
|
||||
"""Return True if string contains only hex digits."""
|
||||
return bool(value) and all(char in string.hexdigits for char in value)
|
||||
|
||||
@staticmethod
|
||||
def _compute_channel_hash(key_hex: str) -> str:
|
||||
"""Compute channel hash (first byte of SHA-256 of channel key)."""
|
||||
return hashlib.sha256(bytes.fromhex(key_hex)).digest()[:1].hex().upper()
|
||||
|
||||
def channel_name_from_decoded(
|
||||
self,
|
||||
decoded_packet: dict[str, Any] | None,
|
||||
) -> str | None:
|
||||
"""Resolve channel label from decoded payload channel hash."""
|
||||
if not isinstance(decoded_packet, dict):
|
||||
return None
|
||||
|
||||
payload = decoded_packet.get("payload")
|
||||
if not isinstance(payload, dict):
|
||||
return None
|
||||
|
||||
decoded = payload.get("decoded")
|
||||
if not isinstance(decoded, dict):
|
||||
return None
|
||||
|
||||
channel_hash = decoded.get("channelHash")
|
||||
if not isinstance(channel_hash, str):
|
||||
return None
|
||||
|
||||
return self._channel_names_by_hash.get(channel_hash.upper())
|
||||
|
||||
def channel_labels_by_index(self) -> dict[int, str]:
|
||||
"""Return channel labels keyed by numeric channel index (0-255)."""
|
||||
labels: dict[int, str] = {}
|
||||
for info in self._channel_key_infos:
|
||||
if not info.label:
|
||||
continue
|
||||
|
||||
label = info.label.strip()
|
||||
if not label:
|
||||
continue
|
||||
|
||||
if label.lower() == "public":
|
||||
normalized_label = "Public"
|
||||
else:
|
||||
normalized_label = label if label.startswith("#") else f"#{label}"
|
||||
|
||||
channel_idx = int(info.channel_hash, 16)
|
||||
labels.setdefault(channel_idx, normalized_label)
|
||||
|
||||
return labels
|
||||
|
||||
def decode_payload(self, payload: dict[str, Any]) -> dict[str, Any] | None:
|
||||
"""Decode packet payload `raw` hex and return decoded JSON if available."""
|
||||
raw_hex = payload.get("raw")
|
||||
if not isinstance(raw_hex, str):
|
||||
return None
|
||||
clean_hex = raw_hex.strip()
|
||||
if not clean_hex:
|
||||
return None
|
||||
if not self._is_hex(clean_hex):
|
||||
logger.debug("LetsMesh decoder skipped non-hex raw payload")
|
||||
return None
|
||||
cached = self._decode_cache.get(clean_hex)
|
||||
if clean_hex in self._decode_cache:
|
||||
return cached
|
||||
|
||||
decoded = self._decode_raw(clean_hex)
|
||||
self._decode_cache[clean_hex] = decoded
|
||||
if len(self._decode_cache) > self._decode_cache_maxsize:
|
||||
# Drop oldest cached payload (insertion-order dict).
|
||||
self._decode_cache.pop(next(iter(self._decode_cache)))
|
||||
return decoded
|
||||
|
||||
def _decode_raw(self, raw_hex: str) -> dict[str, Any] | None:
|
||||
"""Decode raw packet hex with decoder CLI (cached per packet hex)."""
|
||||
if not self._enabled:
|
||||
return None
|
||||
if not self._is_command_available():
|
||||
return None
|
||||
|
||||
command = [*self._command_tokens, "decode", raw_hex, "--json"]
|
||||
if self._channel_keys:
|
||||
command.append("--key")
|
||||
command.extend(self._channel_keys)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=self._timeout_seconds,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.debug(
|
||||
"LetsMesh decoder timed out after %.2fs",
|
||||
self._timeout_seconds,
|
||||
)
|
||||
return None
|
||||
except OSError as exc:
|
||||
logger.debug("LetsMesh decoder failed to execute: %s", exc)
|
||||
return None
|
||||
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr.strip() if result.stderr else ""
|
||||
logger.debug(
|
||||
"LetsMesh decoder exited with code %s%s",
|
||||
result.returncode,
|
||||
f": {stderr}" if stderr else "",
|
||||
)
|
||||
return None
|
||||
|
||||
output = result.stdout.strip()
|
||||
if not output:
|
||||
return None
|
||||
|
||||
try:
|
||||
decoded = json.loads(output)
|
||||
except json.JSONDecodeError:
|
||||
logger.debug("LetsMesh decoder returned non-JSON output")
|
||||
return None
|
||||
|
||||
return decoded if isinstance(decoded, dict) else None
|
||||
|
||||
def _is_command_available(self) -> bool:
|
||||
"""Check decoder command availability once."""
|
||||
if self._checked_command:
|
||||
return self._command_available
|
||||
|
||||
self._checked_command = True
|
||||
if not self._command_tokens:
|
||||
self._command_available = False
|
||||
else:
|
||||
command = self._command_tokens[0]
|
||||
if "/" in command:
|
||||
self._command_available = shutil.which(command) is not None
|
||||
else:
|
||||
self._command_available = shutil.which(command) is not None
|
||||
|
||||
if not self._command_available and not self._warned_unavailable:
|
||||
self._warned_unavailable = True
|
||||
command_text = " ".join(self._command_tokens) or "<empty>"
|
||||
logger.warning(
|
||||
"LetsMesh decoder command not found (%s). "
|
||||
"Messages will remain encrypted placeholders until decoder is installed.",
|
||||
command_text,
|
||||
)
|
||||
|
||||
return self._command_available
|
||||
File diff suppressed because it is too large
Load Diff
@@ -21,8 +21,6 @@ from typing import Any, Callable, Optional, TYPE_CHECKING
|
||||
from meshcore_hub.common.database import DatabaseManager
|
||||
from meshcore_hub.common.health import HealthReporter
|
||||
from meshcore_hub.common.mqtt import MQTTClient, MQTTConfig
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
from meshcore_hub.collector.letsmesh_normalizer import LetsMeshNormalizer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from meshcore_hub.collector.webhook import WebhookDispatcher
|
||||
@@ -34,12 +32,9 @@ logger = logging.getLogger(__name__)
|
||||
EventHandler = Callable[[str, str, dict[str, Any], DatabaseManager], None]
|
||||
|
||||
|
||||
class Subscriber(LetsMeshNormalizer):
|
||||
class Subscriber:
|
||||
"""MQTT Subscriber for collecting and storing MeshCore events."""
|
||||
|
||||
INGEST_MODE_NATIVE = "native"
|
||||
INGEST_MODE_LETSMESH_UPLOAD = "letsmesh_upload"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mqtt_client: MQTTClient,
|
||||
@@ -50,11 +45,6 @@ class Subscriber(LetsMeshNormalizer):
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
ingest_mode: str = INGEST_MODE_NATIVE,
|
||||
letsmesh_decoder_enabled: bool = True,
|
||||
letsmesh_decoder_command: str = "meshcore-decoder",
|
||||
letsmesh_decoder_channel_keys: list[str] | None = None,
|
||||
letsmesh_decoder_timeout_seconds: float = 2.0,
|
||||
):
|
||||
"""Initialize subscriber.
|
||||
|
||||
@@ -67,11 +57,6 @@ class Subscriber(LetsMeshNormalizer):
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
|
||||
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
|
||||
letsmesh_decoder_command: Decoder CLI command
|
||||
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
|
||||
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
|
||||
"""
|
||||
self.mqtt = mqtt_client
|
||||
self.db = db_manager
|
||||
@@ -94,18 +79,6 @@ class Subscriber(LetsMeshNormalizer):
|
||||
self._node_cleanup_days = node_cleanup_days
|
||||
self._cleanup_thread: Optional[threading.Thread] = None
|
||||
self._last_cleanup: Optional[datetime] = None
|
||||
self._ingest_mode = ingest_mode.lower()
|
||||
if self._ingest_mode not in {
|
||||
self.INGEST_MODE_NATIVE,
|
||||
self.INGEST_MODE_LETSMESH_UPLOAD,
|
||||
}:
|
||||
raise ValueError(f"Unsupported collector ingest mode: {ingest_mode}")
|
||||
self._letsmesh_decoder = LetsMeshPacketDecoder(
|
||||
enabled=letsmesh_decoder_enabled,
|
||||
command=letsmesh_decoder_command,
|
||||
channel_keys=letsmesh_decoder_channel_keys,
|
||||
timeout_seconds=letsmesh_decoder_timeout_seconds,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_healthy(self) -> bool:
|
||||
@@ -152,34 +125,14 @@ class Subscriber(LetsMeshNormalizer):
|
||||
pattern: Subscription pattern
|
||||
payload: Message payload
|
||||
"""
|
||||
parsed: tuple[str, str, dict[str, Any]] | None
|
||||
if self._ingest_mode == self.INGEST_MODE_LETSMESH_UPLOAD:
|
||||
parsed = self._normalize_letsmesh_event(topic, payload)
|
||||
else:
|
||||
parsed_event = self.mqtt.topic_builder.parse_event_topic(topic)
|
||||
parsed = (
|
||||
(parsed_event[0], parsed_event[1], payload) if parsed_event else None
|
||||
)
|
||||
|
||||
# Parse event from topic
|
||||
parsed = self.mqtt.topic_builder.parse_event_topic(topic)
|
||||
if not parsed:
|
||||
logger.warning(
|
||||
"Could not parse topic for ingest mode %s: %s",
|
||||
self._ingest_mode,
|
||||
topic,
|
||||
)
|
||||
logger.warning(f"Could not parse event topic: {topic}")
|
||||
return
|
||||
|
||||
public_key, event_type, normalized_payload = parsed
|
||||
logger.debug("Received event: %s from %s...", event_type, public_key[:12])
|
||||
self._dispatch_event(public_key, event_type, normalized_payload)
|
||||
|
||||
def _dispatch_event(
|
||||
self,
|
||||
public_key: str,
|
||||
event_type: str,
|
||||
payload: dict[str, Any],
|
||||
) -> None:
|
||||
"""Route a normalized event to the appropriate handler."""
|
||||
public_key, event_type = parsed
|
||||
logger.debug(f"Received event: {event_type} from {public_key[:12]}...")
|
||||
|
||||
# Find and call handler
|
||||
handler = self._handlers.get(event_type)
|
||||
@@ -405,20 +358,10 @@ class Subscriber(LetsMeshNormalizer):
|
||||
logger.error(f"Failed to connect to MQTT broker: {e}")
|
||||
raise
|
||||
|
||||
# Subscribe to topics based on ingest mode
|
||||
if self._ingest_mode == self.INGEST_MODE_LETSMESH_UPLOAD:
|
||||
letsmesh_topics = [
|
||||
f"{self.mqtt.topic_builder.prefix}/+/packets",
|
||||
f"{self.mqtt.topic_builder.prefix}/+/status",
|
||||
f"{self.mqtt.topic_builder.prefix}/+/internal",
|
||||
]
|
||||
for letsmesh_topic in letsmesh_topics:
|
||||
self.mqtt.subscribe(letsmesh_topic, self._handle_mqtt_message)
|
||||
logger.info(f"Subscribed to LetsMesh upload topic: {letsmesh_topic}")
|
||||
else:
|
||||
event_topic = self.mqtt.topic_builder.all_events_topic()
|
||||
self.mqtt.subscribe(event_topic, self._handle_mqtt_message)
|
||||
logger.info(f"Subscribed to event topic: {event_topic}")
|
||||
# Subscribe to all event topics
|
||||
event_topic = self.mqtt.topic_builder.all_events_topic()
|
||||
self.mqtt.subscribe(event_topic, self._handle_mqtt_message)
|
||||
logger.info(f"Subscribed to event topic: {event_topic}")
|
||||
|
||||
self._running = True
|
||||
|
||||
@@ -486,9 +429,6 @@ def create_subscriber(
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
mqtt_transport: str = "tcp",
|
||||
mqtt_ws_path: str = "/mqtt",
|
||||
ingest_mode: str = "native",
|
||||
database_url: str = "sqlite:///./meshcore.db",
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
@@ -496,10 +436,6 @@ def create_subscriber(
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
letsmesh_decoder_enabled: bool = True,
|
||||
letsmesh_decoder_command: str = "meshcore-decoder",
|
||||
letsmesh_decoder_channel_keys: list[str] | None = None,
|
||||
letsmesh_decoder_timeout_seconds: float = 2.0,
|
||||
) -> Subscriber:
|
||||
"""Create a configured subscriber instance.
|
||||
|
||||
@@ -510,9 +446,6 @@ def create_subscriber(
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
mqtt_transport: MQTT transport protocol (tcp or websockets)
|
||||
mqtt_ws_path: WebSocket path (used when transport=websockets)
|
||||
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
|
||||
database_url: Database connection URL
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
@@ -520,10 +453,6 @@ def create_subscriber(
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
|
||||
letsmesh_decoder_command: Decoder CLI command
|
||||
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
|
||||
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
|
||||
|
||||
Returns:
|
||||
Configured Subscriber instance
|
||||
@@ -538,8 +467,6 @@ def create_subscriber(
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-collector-{unique_id}",
|
||||
tls=mqtt_tls,
|
||||
transport=mqtt_transport,
|
||||
ws_path=mqtt_ws_path,
|
||||
)
|
||||
mqtt_client = MQTTClient(mqtt_config)
|
||||
|
||||
@@ -556,11 +483,6 @@ def create_subscriber(
|
||||
cleanup_interval_hours=cleanup_interval_hours,
|
||||
node_cleanup_enabled=node_cleanup_enabled,
|
||||
node_cleanup_days=node_cleanup_days,
|
||||
ingest_mode=ingest_mode,
|
||||
letsmesh_decoder_enabled=letsmesh_decoder_enabled,
|
||||
letsmesh_decoder_command=letsmesh_decoder_command,
|
||||
letsmesh_decoder_channel_keys=letsmesh_decoder_channel_keys,
|
||||
letsmesh_decoder_timeout_seconds=letsmesh_decoder_timeout_seconds,
|
||||
)
|
||||
|
||||
# Register handlers
|
||||
@@ -578,9 +500,6 @@ def run_collector(
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
mqtt_transport: str = "tcp",
|
||||
mqtt_ws_path: str = "/mqtt",
|
||||
ingest_mode: str = "native",
|
||||
database_url: str = "sqlite:///./meshcore.db",
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
@@ -588,10 +507,6 @@ def run_collector(
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
letsmesh_decoder_enabled: bool = True,
|
||||
letsmesh_decoder_command: str = "meshcore-decoder",
|
||||
letsmesh_decoder_channel_keys: list[str] | None = None,
|
||||
letsmesh_decoder_timeout_seconds: float = 2.0,
|
||||
) -> None:
|
||||
"""Run the collector (blocking).
|
||||
|
||||
@@ -602,9 +517,6 @@ def run_collector(
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
mqtt_transport: MQTT transport protocol (tcp or websockets)
|
||||
mqtt_ws_path: WebSocket path (used when transport=websockets)
|
||||
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
|
||||
database_url: Database connection URL
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
@@ -612,10 +524,6 @@ def run_collector(
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
|
||||
letsmesh_decoder_command: Decoder CLI command
|
||||
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
|
||||
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
|
||||
"""
|
||||
subscriber = create_subscriber(
|
||||
mqtt_host=mqtt_host,
|
||||
@@ -624,9 +532,6 @@ def run_collector(
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
ingest_mode=ingest_mode,
|
||||
database_url=database_url,
|
||||
webhook_dispatcher=webhook_dispatcher,
|
||||
cleanup_enabled=cleanup_enabled,
|
||||
@@ -634,10 +539,6 @@ def run_collector(
|
||||
cleanup_interval_hours=cleanup_interval_hours,
|
||||
node_cleanup_enabled=node_cleanup_enabled,
|
||||
node_cleanup_days=node_cleanup_days,
|
||||
letsmesh_decoder_enabled=letsmesh_decoder_enabled,
|
||||
letsmesh_decoder_command=letsmesh_decoder_command,
|
||||
letsmesh_decoder_channel_keys=letsmesh_decoder_channel_keys,
|
||||
letsmesh_decoder_timeout_seconds=letsmesh_decoder_timeout_seconds,
|
||||
)
|
||||
|
||||
# Set up signal handlers
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Pydantic Settings for MeshCore Hub configuration."""
|
||||
|
||||
from enum import Enum
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
@@ -25,20 +24,6 @@ class InterfaceMode(str, Enum):
|
||||
SENDER = "SENDER"
|
||||
|
||||
|
||||
class MQTTTransport(str, Enum):
|
||||
"""MQTT transport type."""
|
||||
|
||||
TCP = "tcp"
|
||||
WEBSOCKETS = "websockets"
|
||||
|
||||
|
||||
class CollectorIngestMode(str, Enum):
|
||||
"""Collector MQTT ingest mode."""
|
||||
|
||||
NATIVE = "native"
|
||||
LETSMESH_UPLOAD = "letsmesh_upload"
|
||||
|
||||
|
||||
class CommonSettings(BaseSettings):
|
||||
"""Common settings shared by all components."""
|
||||
|
||||
@@ -70,14 +55,6 @@ class CommonSettings(BaseSettings):
|
||||
mqtt_tls: bool = Field(
|
||||
default=False, description="Enable TLS/SSL for MQTT connection"
|
||||
)
|
||||
mqtt_transport: MQTTTransport = Field(
|
||||
default=MQTTTransport.TCP,
|
||||
description="MQTT transport protocol (tcp or websockets)",
|
||||
)
|
||||
mqtt_ws_path: str = Field(
|
||||
default="/mqtt",
|
||||
description="WebSocket path for MQTT transport (used when MQTT_TRANSPORT=websockets)",
|
||||
)
|
||||
|
||||
|
||||
class InterfaceSettings(CommonSettings):
|
||||
@@ -185,42 +162,6 @@ class CollectorSettings(CommonSettings):
|
||||
description="Remove nodes not seen for this many days (last_seen)",
|
||||
ge=1,
|
||||
)
|
||||
collector_ingest_mode: CollectorIngestMode = Field(
|
||||
default=CollectorIngestMode.NATIVE,
|
||||
description=(
|
||||
"Collector MQTT ingest mode. "
|
||||
"'native' expects <prefix>/<pubkey>/event/<event_name>. "
|
||||
"'letsmesh_upload' expects LetsMesh observer uploads on "
|
||||
"<prefix>/<pubkey>/(packets|status|internal)."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_enabled: bool = Field(
|
||||
default=True,
|
||||
description=(
|
||||
"Enable external LetsMesh packet decoding via meshcore-decoder. "
|
||||
"Only applies when COLLECTOR_INGEST_MODE=letsmesh_upload."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_command: str = Field(
|
||||
default="meshcore-decoder",
|
||||
description=(
|
||||
"Command used to run LetsMesh packet decoder CLI "
|
||||
"(for example: meshcore-decoder, /usr/local/bin/meshcore-decoder, "
|
||||
"or 'npx meshcore-decoder')."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_keys: Optional[str] = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional channel secret keys for LetsMesh message decryption. "
|
||||
"Provide as comma/space separated hex values."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_timeout_seconds: float = Field(
|
||||
default=2.0,
|
||||
description="Timeout in seconds for each decoder invocation.",
|
||||
ge=0.1,
|
||||
)
|
||||
|
||||
@property
|
||||
def collector_data_dir(self) -> str:
|
||||
@@ -260,17 +201,6 @@ class CollectorSettings(CommonSettings):
|
||||
|
||||
return str(Path(self.effective_seed_home) / "members.yaml")
|
||||
|
||||
@property
|
||||
def collector_letsmesh_decoder_keys_list(self) -> list[str]:
|
||||
"""Parse configured LetsMesh decoder keys into a normalized list."""
|
||||
if not self.collector_letsmesh_decoder_keys:
|
||||
return []
|
||||
return [
|
||||
part.strip()
|
||||
for part in re.split(r"[,\s]+", self.collector_letsmesh_decoder_keys)
|
||||
if part.strip()
|
||||
]
|
||||
|
||||
@field_validator("database_url")
|
||||
@classmethod
|
||||
def validate_database_url(cls, v: Optional[str]) -> Optional[str]:
|
||||
@@ -337,13 +267,6 @@ class WebSettings(CommonSettings):
|
||||
default="en",
|
||||
description="Locale/language for the web dashboard (e.g. 'en')",
|
||||
)
|
||||
web_datetime_locale: str = Field(
|
||||
default="en-US",
|
||||
description=(
|
||||
"Locale used for date/time formatting in the web dashboard "
|
||||
"(e.g. 'en-US', 'en-GB')."
|
||||
),
|
||||
)
|
||||
|
||||
# Auto-refresh interval for list pages
|
||||
web_auto_refresh_seconds: int = Field(
|
||||
@@ -352,12 +275,6 @@ class WebSettings(CommonSettings):
|
||||
ge=0,
|
||||
)
|
||||
|
||||
# Trusted proxy hosts for X-Forwarded-For header processing
|
||||
web_trusted_proxy_hosts: str = Field(
|
||||
default="*",
|
||||
description="Comma-separated list of trusted proxy hosts or '*' for all",
|
||||
)
|
||||
|
||||
# Admin interface (disabled by default for security)
|
||||
web_admin_enabled: bool = Field(
|
||||
default=False,
|
||||
|
||||
@@ -24,8 +24,6 @@ class MQTTConfig:
|
||||
keepalive: int = 60
|
||||
clean_session: bool = True
|
||||
tls: bool = False
|
||||
transport: str = "tcp"
|
||||
ws_path: str = "/mqtt"
|
||||
|
||||
|
||||
class TopicBuilder:
|
||||
@@ -39,10 +37,6 @@ class TopicBuilder:
|
||||
"""
|
||||
self.prefix = prefix
|
||||
|
||||
def _prefix_parts(self) -> list[str]:
|
||||
"""Split configured prefix into path segments."""
|
||||
return [part for part in self.prefix.strip("/").split("/") if part]
|
||||
|
||||
def event_topic(self, public_key: str, event_name: str) -> str:
|
||||
"""Build an event topic.
|
||||
|
||||
@@ -92,16 +86,10 @@ class TopicBuilder:
|
||||
Returns:
|
||||
Tuple of (public_key, event_name) or None if invalid
|
||||
"""
|
||||
parts = [part for part in topic.strip("/").split("/") if part]
|
||||
prefix_parts = self._prefix_parts()
|
||||
prefix_len = len(prefix_parts)
|
||||
if (
|
||||
len(parts) >= prefix_len + 3
|
||||
and parts[:prefix_len] == prefix_parts
|
||||
and parts[prefix_len + 1] == "event"
|
||||
):
|
||||
public_key = parts[prefix_len]
|
||||
event_name = "/".join(parts[prefix_len + 2 :])
|
||||
parts = topic.split("/")
|
||||
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "event":
|
||||
public_key = parts[1]
|
||||
event_name = "/".join(parts[3:])
|
||||
return (public_key, event_name)
|
||||
return None
|
||||
|
||||
@@ -114,39 +102,13 @@ class TopicBuilder:
|
||||
Returns:
|
||||
Tuple of (public_key, command_name) or None if invalid
|
||||
"""
|
||||
parts = [part for part in topic.strip("/").split("/") if part]
|
||||
prefix_parts = self._prefix_parts()
|
||||
prefix_len = len(prefix_parts)
|
||||
if (
|
||||
len(parts) >= prefix_len + 3
|
||||
and parts[:prefix_len] == prefix_parts
|
||||
and parts[prefix_len + 1] == "command"
|
||||
):
|
||||
public_key = parts[prefix_len]
|
||||
command_name = "/".join(parts[prefix_len + 2 :])
|
||||
parts = topic.split("/")
|
||||
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "command":
|
||||
public_key = parts[1]
|
||||
command_name = "/".join(parts[3:])
|
||||
return (public_key, command_name)
|
||||
return None
|
||||
|
||||
def parse_letsmesh_upload_topic(self, topic: str) -> tuple[str, str] | None:
|
||||
"""Parse a LetsMesh upload topic to extract public key and feed type.
|
||||
|
||||
LetsMesh upload topics are expected in this form:
|
||||
<prefix>/<public_key>/(packets|status|internal)
|
||||
"""
|
||||
parts = [part for part in topic.strip("/").split("/") if part]
|
||||
prefix_parts = self._prefix_parts()
|
||||
prefix_len = len(prefix_parts)
|
||||
|
||||
if len(parts) != prefix_len + 2 or parts[:prefix_len] != prefix_parts:
|
||||
return None
|
||||
|
||||
public_key = parts[prefix_len]
|
||||
feed_type = parts[prefix_len + 1]
|
||||
if feed_type not in {"packets", "status", "internal"}:
|
||||
return None
|
||||
|
||||
return (public_key, feed_type)
|
||||
|
||||
|
||||
MessageHandler = Callable[[str, str, dict[str, Any]], None]
|
||||
|
||||
@@ -162,24 +124,14 @@ class MQTTClient:
|
||||
"""
|
||||
self.config = config
|
||||
self.topic_builder = TopicBuilder(config.prefix)
|
||||
transport = config.transport.lower()
|
||||
if transport not in {"tcp", "websockets"}:
|
||||
raise ValueError(f"Unsupported MQTT transport: {config.transport}")
|
||||
|
||||
self._client = mqtt.Client(
|
||||
callback_api_version=CallbackAPIVersion.VERSION2, # type: ignore[call-arg]
|
||||
client_id=config.client_id,
|
||||
clean_session=config.clean_session,
|
||||
transport=transport,
|
||||
)
|
||||
self._connected = False
|
||||
self._message_handlers: dict[str, list[MessageHandler]] = {}
|
||||
|
||||
# Set WebSocket path when using MQTT over WebSockets.
|
||||
if transport == "websockets":
|
||||
self._client.ws_set_options(path=config.ws_path)
|
||||
logger.debug("MQTT WebSocket transport enabled (path=%s)", config.ws_path)
|
||||
|
||||
# Set up TLS if enabled
|
||||
if config.tls:
|
||||
self._client.tls_set()
|
||||
|
||||
@@ -28,14 +28,6 @@ class AdvertisementEvent(BaseModel):
|
||||
default=None,
|
||||
description="Capability/status flags bitmask",
|
||||
)
|
||||
lat: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Node latitude when location metadata is available",
|
||||
)
|
||||
lon: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Node longitude when location metadata is available",
|
||||
)
|
||||
|
||||
|
||||
class ContactMessageEvent(BaseModel):
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
@@ -18,7 +16,6 @@ from fastapi.templating import Jinja2Templates
|
||||
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
|
||||
|
||||
from meshcore_hub import __version__
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
from meshcore_hub.common.i18n import load_locale, t
|
||||
from meshcore_hub.common.schemas import RadioConfig
|
||||
from meshcore_hub.web.middleware import CacheControlMiddleware
|
||||
@@ -32,60 +29,6 @@ TEMPLATES_DIR = PACKAGE_DIR / "templates"
|
||||
STATIC_DIR = PACKAGE_DIR / "static"
|
||||
|
||||
|
||||
def _parse_decoder_key_entries(raw: str | None) -> list[str]:
|
||||
"""Parse COLLECTOR_LETSMESH_DECODER_KEYS into key entries."""
|
||||
if not raw:
|
||||
return []
|
||||
return [part.strip() for part in re.split(r"[,\s]+", raw) if part.strip()]
|
||||
|
||||
|
||||
def _build_channel_labels() -> dict[str, str]:
|
||||
"""Build UI channel labels from built-in + configured decoder keys."""
|
||||
raw_keys = os.getenv("COLLECTOR_LETSMESH_DECODER_KEYS")
|
||||
decoder = LetsMeshPacketDecoder(
|
||||
enabled=False,
|
||||
channel_keys=_parse_decoder_key_entries(raw_keys),
|
||||
)
|
||||
labels = decoder.channel_labels_by_index()
|
||||
return {str(idx): label for idx, label in sorted(labels.items())}
|
||||
|
||||
|
||||
def _resolve_logo(media_home: Path) -> tuple[str, bool, Path | None]:
|
||||
"""Resolve logo URL and whether light-mode inversion should be applied.
|
||||
|
||||
Returns:
|
||||
tuple of (logo_url, invert_in_light_mode, resolved_path)
|
||||
"""
|
||||
custom_logo_candidates = (
|
||||
("logo-invert.svg", "/media/images/logo-invert.svg", True),
|
||||
("logo.svg", "/media/images/logo.svg", False),
|
||||
)
|
||||
for filename, url, invert_in_light_mode in custom_logo_candidates:
|
||||
path = media_home / "images" / filename
|
||||
if path.exists():
|
||||
cache_buster = int(path.stat().st_mtime)
|
||||
return f"{url}?v={cache_buster}", invert_in_light_mode, path
|
||||
|
||||
# Default packaged logo is monochrome and needs darkening in light mode.
|
||||
return "/static/img/logo.svg", True, None
|
||||
|
||||
|
||||
def _is_authenticated_proxy_request(request: Request) -> bool:
|
||||
"""Check whether request is authenticated by an upstream auth proxy.
|
||||
|
||||
Supported patterns:
|
||||
- OAuth2/OIDC proxy headers: X-Forwarded-User, X-Auth-Request-User
|
||||
- Forwarded Basic auth header: Authorization: Basic ...
|
||||
"""
|
||||
if request.headers.get("x-forwarded-user"):
|
||||
return True
|
||||
if request.headers.get("x-auth-request-user"):
|
||||
return True
|
||||
|
||||
auth_header = request.headers.get("authorization", "")
|
||||
return auth_header.lower().startswith("basic ")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Application lifespan handler."""
|
||||
@@ -171,20 +114,13 @@ def _build_config_json(app: FastAPI, request: Request) -> str:
|
||||
"version": __version__,
|
||||
"timezone": app.state.timezone_abbr,
|
||||
"timezone_iana": app.state.timezone,
|
||||
"is_authenticated": _is_authenticated_proxy_request(request),
|
||||
"is_authenticated": bool(request.headers.get("X-Forwarded-User")),
|
||||
"default_theme": app.state.web_theme,
|
||||
"locale": app.state.web_locale,
|
||||
"datetime_locale": app.state.web_datetime_locale,
|
||||
"auto_refresh_seconds": app.state.auto_refresh_seconds,
|
||||
"channel_labels": app.state.channel_labels,
|
||||
"logo_invert_light": app.state.logo_invert_light,
|
||||
}
|
||||
|
||||
# Escape "</script>" sequences to prevent XSS breakout from the
|
||||
# <script> block where this JSON is embedded via |safe in the
|
||||
# Jinja2 template. "<\/" is valid JSON per the spec and parsed
|
||||
# correctly by JavaScript's JSON.parse().
|
||||
return json.dumps(config).replace("</", "<\\/")
|
||||
return json.dumps(config)
|
||||
|
||||
|
||||
def create_app(
|
||||
@@ -240,36 +176,17 @@ def create_app(
|
||||
)
|
||||
|
||||
# Trust proxy headers (X-Forwarded-Proto, X-Forwarded-For) for HTTPS detection
|
||||
trusted_hosts_raw = settings.web_trusted_proxy_hosts
|
||||
if trusted_hosts_raw == "*":
|
||||
trusted_hosts: str | list[str] = "*"
|
||||
else:
|
||||
trusted_hosts = [h.strip() for h in trusted_hosts_raw.split(",") if h.strip()]
|
||||
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts=trusted_hosts)
|
||||
|
||||
# Compute effective admin flag (parameter overrides setting)
|
||||
effective_admin = (
|
||||
admin_enabled if admin_enabled is not None else settings.web_admin_enabled
|
||||
)
|
||||
|
||||
# Warn when admin is enabled but proxy trust is wide open
|
||||
if effective_admin and settings.web_trusted_proxy_hosts == "*":
|
||||
logger.warning(
|
||||
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*' (trust all). "
|
||||
"Consider restricting to your reverse proxy IP for production deployments."
|
||||
)
|
||||
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts="*")
|
||||
|
||||
# Add cache control headers based on resource type
|
||||
app.add_middleware(CacheControlMiddleware)
|
||||
|
||||
# Load i18n translations
|
||||
app.state.web_locale = settings.web_locale or "en"
|
||||
app.state.web_datetime_locale = settings.web_datetime_locale or "en-US"
|
||||
load_locale(app.state.web_locale)
|
||||
|
||||
# Auto-refresh interval
|
||||
app.state.auto_refresh_seconds = settings.web_auto_refresh_seconds
|
||||
app.state.channel_labels = _build_channel_labels()
|
||||
|
||||
# Store configuration in app state (use args if provided, else settings)
|
||||
app.state.web_theme = (
|
||||
@@ -277,7 +194,9 @@ def create_app(
|
||||
)
|
||||
app.state.api_url = api_url or settings.api_base_url
|
||||
app.state.api_key = api_key or settings.api_key
|
||||
app.state.admin_enabled = effective_admin
|
||||
app.state.admin_enabled = (
|
||||
admin_enabled if admin_enabled is not None else settings.web_admin_enabled
|
||||
)
|
||||
app.state.network_name = network_name or settings.network_name
|
||||
app.state.network_city = network_city or settings.network_city
|
||||
app.state.network_country = network_country or settings.network_country
|
||||
@@ -340,11 +259,12 @@ def create_app(
|
||||
|
||||
# Check for custom logo and store media path
|
||||
media_home = Path(settings.effective_media_home)
|
||||
logo_url, logo_invert_light, logo_path = _resolve_logo(media_home)
|
||||
app.state.logo_url = logo_url
|
||||
app.state.logo_invert_light = logo_invert_light
|
||||
if logo_path is not None:
|
||||
logger.info("Using custom logo from %s", logo_path)
|
||||
custom_logo_path = media_home / "images" / "logo.svg"
|
||||
if custom_logo_path.exists():
|
||||
app.state.logo_url = "/media/images/logo.svg"
|
||||
logger.info(f"Using custom logo from {custom_logo_path}")
|
||||
else:
|
||||
app.state.logo_url = "/static/img/logo.svg"
|
||||
|
||||
# Mount static files
|
||||
if STATIC_DIR.exists():
|
||||
@@ -390,7 +310,7 @@ def create_app(
|
||||
if (
|
||||
request.method in ("POST", "PUT", "DELETE", "PATCH")
|
||||
and request.app.state.admin_enabled
|
||||
and not _is_authenticated_proxy_request(request)
|
||||
and not request.headers.get("x-forwarded-user")
|
||||
):
|
||||
return JSONResponse(
|
||||
{"detail": "Authentication required"},
|
||||
@@ -736,7 +656,6 @@ def create_app(
|
||||
"features": features,
|
||||
"custom_pages": custom_pages,
|
||||
"logo_url": request.app.state.logo_url,
|
||||
"logo_invert_light": request.app.state.logo_invert_light,
|
||||
"version": __version__,
|
||||
"default_theme": request.app.state.web_theme,
|
||||
"config_json": config_json,
|
||||
|
||||
@@ -46,8 +46,8 @@
|
||||
/* Spacing between horizontal nav items */
|
||||
.menu-horizontal { gap: 0.125rem; }
|
||||
|
||||
/* Invert monochrome logos to dark for light mode */
|
||||
[data-theme="light"] .theme-logo--invert-light {
|
||||
/* Invert white logos/images to dark for light mode */
|
||||
[data-theme="light"] .theme-logo {
|
||||
filter: brightness(0.15);
|
||||
}
|
||||
|
||||
|
||||
@@ -22,65 +22,6 @@ export function getConfig() {
|
||||
return window.__APP_CONFIG__ || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build channel label map from app config.
|
||||
* Keys are numeric channel indexes and values are non-empty labels.
|
||||
*
|
||||
* @param {Object} [config]
|
||||
* @returns {Map<number, string>}
|
||||
*/
|
||||
export function getChannelLabelsMap(config = getConfig()) {
|
||||
return new Map(
|
||||
Object.entries(config.channel_labels || {})
|
||||
.map(([idx, label]) => [parseInt(idx, 10), typeof label === 'string' ? label.trim() : ''])
|
||||
.filter(([idx, label]) => Number.isInteger(idx) && label.length > 0),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a channel label from a numeric index.
|
||||
*
|
||||
* @param {number|string} channelIdx
|
||||
* @param {Map<number, string>} [channelLabels]
|
||||
* @returns {string|null}
|
||||
*/
|
||||
export function resolveChannelLabel(channelIdx, channelLabels = getChannelLabelsMap()) {
|
||||
const parsed = parseInt(String(channelIdx), 10);
|
||||
if (!Number.isInteger(parsed)) return null;
|
||||
return channelLabels.get(parsed) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse API datetime strings reliably.
|
||||
* MeshCore API often returns UTC timestamps without an explicit timezone suffix.
|
||||
* In that case, treat them as UTC by appending 'Z' before Date parsing.
|
||||
*
|
||||
* @param {string|null} isoString
|
||||
* @returns {Date|null}
|
||||
*/
|
||||
export function parseAppDate(isoString) {
|
||||
if (!isoString || typeof isoString !== 'string') return null;
|
||||
|
||||
let value = isoString.trim();
|
||||
if (!value) return null;
|
||||
|
||||
// Normalize "YYYY-MM-DD HH:MM:SS" to ISO separator.
|
||||
if (/^\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}/.test(value)) {
|
||||
value = value.replace(/\s+/, 'T');
|
||||
}
|
||||
|
||||
// If no timezone suffix is present, treat as UTC.
|
||||
const hasTimePart = /T\d{2}:\d{2}/.test(value);
|
||||
const hasTimezoneSuffix = /(Z|[+-]\d{2}:\d{2}|[+-]\d{4})$/i.test(value);
|
||||
if (hasTimePart && !hasTimezoneSuffix) {
|
||||
value += 'Z';
|
||||
}
|
||||
|
||||
const parsed = new Date(value);
|
||||
if (isNaN(parsed.getTime())) return null;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Page color palette - reads from CSS custom properties (defined in app.css :root).
|
||||
* Use for inline styles or dynamic coloring in page modules.
|
||||
@@ -101,21 +42,10 @@ export const pageColors = {
|
||||
* @param {string|null} advType
|
||||
* @returns {string} Emoji character
|
||||
*/
|
||||
function inferNodeType(value) {
|
||||
const normalized = (value || '').toLowerCase();
|
||||
if (!normalized) return null;
|
||||
if (normalized.includes('room')) return 'room';
|
||||
if (normalized.includes('repeater') || normalized.includes('relay')) return 'repeater';
|
||||
if (normalized.includes('companion') || normalized.includes('observer')) return 'companion';
|
||||
if (normalized.includes('chat')) return 'chat';
|
||||
return null;
|
||||
}
|
||||
|
||||
export function typeEmoji(advType) {
|
||||
switch (inferNodeType(advType) || (advType || '').toLowerCase()) {
|
||||
switch ((advType || '').toLowerCase()) {
|
||||
case 'chat': return '\u{1F4AC}'; // 💬
|
||||
case 'repeater': return '\u{1F4E1}'; // 📡
|
||||
case 'companion': return '\u{1F4F1}'; // 📱
|
||||
case 'room': return '\u{1FAA7}'; // 🪧
|
||||
default: return '\u{1F4CD}'; // 📍
|
||||
}
|
||||
@@ -144,9 +74,7 @@ export function extractFirstEmoji(str) {
|
||||
*/
|
||||
export function getNodeEmoji(nodeName, advType) {
|
||||
const nameEmoji = extractFirstEmoji(nodeName);
|
||||
if (nameEmoji) return nameEmoji;
|
||||
const inferred = inferNodeType(advType) || inferNodeType(nodeName);
|
||||
return typeEmoji(inferred || advType);
|
||||
return nameEmoji || typeEmoji(advType);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -160,9 +88,8 @@ export function formatDateTime(isoString, options) {
|
||||
try {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone_iana || 'UTC';
|
||||
const locale = config.datetime_locale || 'en-US';
|
||||
const date = parseAppDate(isoString);
|
||||
if (!date) return '-';
|
||||
const date = new Date(isoString);
|
||||
if (isNaN(date.getTime())) return '-';
|
||||
const opts = options || {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
@@ -170,7 +97,7 @@ export function formatDateTime(isoString, options) {
|
||||
hour12: false,
|
||||
};
|
||||
if (!opts.timeZone) opts.timeZone = tz;
|
||||
return date.toLocaleString(locale, opts);
|
||||
return date.toLocaleString('en-GB', opts);
|
||||
} catch {
|
||||
return isoString ? isoString.slice(0, 19).replace('T', ' ') : '-';
|
||||
}
|
||||
@@ -186,10 +113,9 @@ export function formatDateTimeShort(isoString) {
|
||||
try {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone_iana || 'UTC';
|
||||
const locale = config.datetime_locale || 'en-US';
|
||||
const date = parseAppDate(isoString);
|
||||
if (!date) return '-';
|
||||
return date.toLocaleString(locale, {
|
||||
const date = new Date(isoString);
|
||||
if (isNaN(date.getTime())) return '-';
|
||||
return date.toLocaleString('en-GB', {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
hour: '2-digit', minute: '2-digit',
|
||||
@@ -207,8 +133,8 @@ export function formatDateTimeShort(isoString) {
|
||||
*/
|
||||
export function formatRelativeTime(isoString) {
|
||||
if (!isoString) return '';
|
||||
const date = parseAppDate(isoString);
|
||||
if (!date) return '';
|
||||
const date = new Date(isoString);
|
||||
if (isNaN(date.getTime())) return '';
|
||||
const now = new Date();
|
||||
const diffMs = now - date;
|
||||
const diffSec = Math.floor(diffMs / 1000);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { apiGet, apiPost, apiPut, apiDelete } from '../../api.js';
|
||||
import {
|
||||
html, litRender, nothing,
|
||||
getConfig, errorAlert, successAlert, t, escapeHtml,
|
||||
getConfig, errorAlert, successAlert, t,
|
||||
} from '../../components.js';
|
||||
import { iconLock } from '../../icons.js';
|
||||
|
||||
@@ -304,7 +304,7 @@ ${flashHtml}
|
||||
const memberName = row.dataset.memberName;
|
||||
const confirmMsg = t('common.delete_entity_confirm', {
|
||||
entity: t('entities.member').toLowerCase(),
|
||||
name: escapeHtml(memberName)
|
||||
name: memberName
|
||||
});
|
||||
container.querySelector('#delete_confirm_message').innerHTML = confirmMsg;
|
||||
container.querySelector('#deleteModal').showModal();
|
||||
|
||||
@@ -2,7 +2,7 @@ import { apiGet, apiPost, apiPut, apiDelete } from '../../api.js';
|
||||
import {
|
||||
html, litRender, nothing, unsafeHTML,
|
||||
getConfig, typeEmoji, formatDateTimeShort, errorAlert,
|
||||
successAlert, truncateKey, t, escapeHtml,
|
||||
successAlert, truncateKey, t,
|
||||
} from '../../components.js';
|
||||
import { iconTag, iconLock } from '../../icons.js';
|
||||
|
||||
@@ -240,8 +240,7 @@ export async function render(container, params, router) {
|
||||
<div class="modal-box">
|
||||
<h3 class="font-bold text-lg">${t('common.copy_all_entity_to_another_node', { entity: t('entities.tags') })}</h3>
|
||||
<form id="copy-all-form" class="py-4">
|
||||
<!-- unsafeHTML needed for translation HTML tags; nodeName is pre-escaped -->
|
||||
<p class="mb-4">${unsafeHTML(t('common.copy_all_entity_description', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: escapeHtml(nodeName) }))}</p>
|
||||
<p class="mb-4">${unsafeHTML(t('common.copy_all_entity_description', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
|
||||
<div class="form-control mb-4">
|
||||
<label class="label"><span class="label-text">${t('admin_node_tags.destination_node')}</span></label>
|
||||
<select id="copyAllDestination" class="select select-bordered w-full" required>
|
||||
@@ -270,8 +269,7 @@ export async function render(container, params, router) {
|
||||
<div class="modal-box">
|
||||
<h3 class="font-bold text-lg">${t('common.delete_all_entity', { entity: t('entities.tags') })}</h3>
|
||||
<div class="py-4">
|
||||
<!-- unsafeHTML needed for translation HTML tags; nodeName is pre-escaped -->
|
||||
<p class="mb-4">${unsafeHTML(t('common.delete_all_entity_confirm', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: escapeHtml(nodeName) }))}</p>
|
||||
<p class="mb-4">${unsafeHTML(t('common.delete_all_entity_confirm', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
|
||||
<div class="alert alert-error mb-4">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" /></svg>
|
||||
<span>${t('admin_node_tags.delete_all_warning')}</span>
|
||||
@@ -451,7 +449,7 @@ ${contentHtml}`, container);
|
||||
activeTagKey = row.dataset.tagKey;
|
||||
const confirmMsg = t('common.delete_entity_confirm', {
|
||||
entity: t('entities.tag').toLowerCase(),
|
||||
name: `"<span class="font-mono font-semibold">${escapeHtml(activeTagKey)}</span>"`
|
||||
name: `"<span class="font-mono font-semibold">${activeTagKey}</span>"`
|
||||
});
|
||||
container.querySelector('#delete_tag_confirm_message').innerHTML = confirmMsg;
|
||||
container.querySelector('#deleteModal').showModal();
|
||||
|
||||
@@ -1,33 +1,44 @@
|
||||
import { apiGet } from '../api.js';
|
||||
import {
|
||||
html, litRender, nothing,
|
||||
getConfig, getChannelLabelsMap, resolveChannelLabel,
|
||||
typeEmoji, errorAlert, pageColors, t, formatDateTime,
|
||||
getConfig, typeEmoji, errorAlert, pageColors, t,
|
||||
} from '../components.js';
|
||||
import {
|
||||
iconNodes, iconAdvertisements, iconMessages, iconChannel,
|
||||
} from '../icons.js';
|
||||
|
||||
function channelLabel(channel, channelLabels) {
|
||||
const idx = parseInt(String(channel), 10);
|
||||
if (Number.isInteger(idx)) {
|
||||
return resolveChannelLabel(idx, channelLabels) || `Ch ${idx}`;
|
||||
}
|
||||
return String(channel);
|
||||
}
|
||||
|
||||
function formatTimeOnly(isoString) {
|
||||
return formatDateTime(isoString, {
|
||||
hour: '2-digit', minute: '2-digit', second: '2-digit',
|
||||
hour12: false,
|
||||
});
|
||||
if (!isoString) return '-';
|
||||
try {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone_iana || 'UTC';
|
||||
const date = new Date(isoString);
|
||||
if (isNaN(date.getTime())) return '-';
|
||||
return date.toLocaleString('en-GB', {
|
||||
timeZone: tz,
|
||||
hour: '2-digit', minute: '2-digit', second: '2-digit',
|
||||
hour12: false,
|
||||
});
|
||||
} catch {
|
||||
return '-';
|
||||
}
|
||||
}
|
||||
|
||||
function formatTimeShort(isoString) {
|
||||
return formatDateTime(isoString, {
|
||||
hour: '2-digit', minute: '2-digit',
|
||||
hour12: false,
|
||||
});
|
||||
if (!isoString) return '-';
|
||||
try {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone_iana || 'UTC';
|
||||
const date = new Date(isoString);
|
||||
if (isNaN(date.getTime())) return '-';
|
||||
return date.toLocaleString('en-GB', {
|
||||
timeZone: tz,
|
||||
hour: '2-digit', minute: '2-digit',
|
||||
hour12: false,
|
||||
});
|
||||
} catch {
|
||||
return '-';
|
||||
}
|
||||
}
|
||||
|
||||
function renderRecentAds(ads) {
|
||||
@@ -66,11 +77,10 @@ function renderRecentAds(ads) {
|
||||
</div>`;
|
||||
}
|
||||
|
||||
function renderChannelMessages(channelMessages, channelLabels) {
|
||||
function renderChannelMessages(channelMessages) {
|
||||
if (!channelMessages || Object.keys(channelMessages).length === 0) return nothing;
|
||||
|
||||
const channels = Object.entries(channelMessages).map(([channel, messages]) => {
|
||||
const label = channelLabel(channel, channelLabels);
|
||||
const msgLines = messages.map(msg => html`
|
||||
<div class="text-sm">
|
||||
<span class="text-xs opacity-50">${formatTimeShort(msg.received_at)}</span>
|
||||
@@ -79,7 +89,8 @@ function renderChannelMessages(channelMessages, channelLabels) {
|
||||
|
||||
return html`<div>
|
||||
<h3 class="font-semibold text-sm mb-2 flex items-center gap-2">
|
||||
<span class="badge badge-info badge-sm">${label}</span>
|
||||
<span class="badge badge-info badge-sm">CH${String(channel)}</span>
|
||||
${t('dashboard.channel', { number: String(channel) })}
|
||||
</h3>
|
||||
<div class="space-y-1 pl-2 border-l-2 border-base-300">
|
||||
${msgLines}
|
||||
@@ -109,7 +120,6 @@ function gridCols(count) {
|
||||
export async function render(container, params, router) {
|
||||
try {
|
||||
const config = getConfig();
|
||||
const channelLabels = getChannelLabelsMap(config);
|
||||
const features = config.features || {};
|
||||
const showNodes = features.nodes !== false;
|
||||
const showAdverts = features.advertisements !== false;
|
||||
@@ -225,7 +235,7 @@ ${bottomCount > 0 ? html`
|
||||
</div>
|
||||
</div>` : nothing}
|
||||
|
||||
${showMessages ? renderChannelMessages(stats.channel_messages, channelLabels) : nothing}
|
||||
${showMessages ? renderChannelMessages(stats.channel_messages) : nothing}
|
||||
</div>` : nothing}`, container);
|
||||
|
||||
window.initDashboardCharts(
|
||||
|
||||
@@ -33,7 +33,6 @@ export async function render(container, params, router) {
|
||||
const features = config.features || {};
|
||||
const networkName = config.network_name || 'MeshCore Network';
|
||||
const logoUrl = config.logo_url || '/static/img/logo.svg';
|
||||
const logoInvertLight = config.logo_invert_light !== false;
|
||||
const customPages = config.custom_pages || [];
|
||||
const rc = config.network_radio_config;
|
||||
|
||||
@@ -70,7 +69,7 @@ export async function render(container, params, router) {
|
||||
<div class="${showStats ? 'grid grid-cols-1 lg:grid-cols-3 gap-6' : ''} bg-base-100 rounded-box shadow-xl p-6">
|
||||
<div class="${showStats ? 'lg:col-span-2' : ''} flex flex-col items-center text-center">
|
||||
<div class="flex flex-col sm:flex-row items-center gap-4 sm:gap-8 mb-4">
|
||||
<img src="${logoUrl}" alt="${networkName}" class="theme-logo ${logoInvertLight ? 'theme-logo--invert-light' : ''} h-24 w-24 sm:h-36 sm:w-36" />
|
||||
<img src="${logoUrl}" alt="${networkName}" class="theme-logo h-24 w-24 sm:h-36 sm:w-36" />
|
||||
<div class="flex flex-col justify-center">
|
||||
<h1 class="hero-title text-3xl sm:text-5xl lg:text-6xl font-black tracking-tight">${networkName}</h1>
|
||||
${cityCountry}
|
||||
@@ -159,7 +158,7 @@ export async function render(container, params, router) {
|
||||
<div class="card-body flex flex-col items-center justify-center">
|
||||
<p class="text-sm opacity-70 mb-4 text-center">${t('home.meshcore_attribution')}</p>
|
||||
<a href="https://meshcore.co.uk/" target="_blank" rel="noopener noreferrer" class="hover:opacity-80 transition-opacity">
|
||||
<img src="/static/img/meshcore.svg" alt="MeshCore" class="theme-logo theme-logo--invert-light h-8" />
|
||||
<img src="/static/img/meshcore.svg" alt="MeshCore" class="theme-logo h-8" />
|
||||
</a>
|
||||
<p class="text-xs opacity-50 mt-4 text-center">Connecting people and things, without using the internet</p>
|
||||
<div class="flex gap-2 mt-4">
|
||||
|
||||
@@ -2,7 +2,6 @@ import { apiGet } from '../api.js';
|
||||
import {
|
||||
html, litRender, nothing, t,
|
||||
getConfig, formatDateTime, formatDateTimeShort,
|
||||
getChannelLabelsMap, resolveChannelLabel,
|
||||
truncateKey, errorAlert,
|
||||
pagination, timezoneIndicator,
|
||||
createFilterHandler, autoSubmit, submitOnEnter
|
||||
@@ -17,156 +16,10 @@ export async function render(container, params, router) {
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
const config = getConfig();
|
||||
const channelLabels = getChannelLabelsMap(config);
|
||||
const tz = config.timezone || '';
|
||||
const tzBadge = tz && tz !== 'UTC' ? html`<span class="text-sm opacity-60">${tz}</span>` : nothing;
|
||||
const navigate = (url) => router.navigate(url);
|
||||
|
||||
function channelInfo(msg) {
|
||||
if (msg.message_type !== 'channel') {
|
||||
return { label: null, text: msg.text || '-' };
|
||||
}
|
||||
const rawText = msg.text || '';
|
||||
const match = rawText.match(/^\[([^\]]+)\]\s+([\s\S]*)$/);
|
||||
if (msg.channel_idx !== null && msg.channel_idx !== undefined) {
|
||||
const knownLabel = resolveChannelLabel(msg.channel_idx, channelLabels);
|
||||
if (knownLabel) {
|
||||
return {
|
||||
label: knownLabel,
|
||||
text: match ? (match[2] || '-') : (rawText || '-'),
|
||||
};
|
||||
}
|
||||
}
|
||||
if (msg.channel_name) {
|
||||
return { label: msg.channel_name, text: msg.text || '-' };
|
||||
}
|
||||
if (match) {
|
||||
return {
|
||||
label: match[1],
|
||||
text: match[2] || '-',
|
||||
};
|
||||
}
|
||||
if (msg.channel_idx !== null && msg.channel_idx !== undefined) {
|
||||
const knownLabel = resolveChannelLabel(msg.channel_idx, channelLabels);
|
||||
return { label: knownLabel || `Ch ${msg.channel_idx}`, text: rawText || '-' };
|
||||
}
|
||||
return { label: t('messages.type_channel'), text: rawText || '-' };
|
||||
}
|
||||
|
||||
function senderBlock(msg, emphasize = false) {
|
||||
const senderName = msg.sender_tag_name || msg.sender_name;
|
||||
if (senderName) {
|
||||
return emphasize
|
||||
? html`<span class="font-medium">${senderName}</span>`
|
||||
: html`${senderName}`;
|
||||
}
|
||||
const prefix = (msg.pubkey_prefix || '').slice(0, 12);
|
||||
if (prefix) {
|
||||
return html`<span class="font-mono text-xs">${prefix}</span>`;
|
||||
}
|
||||
return html`<span class="opacity-50">-</span>`;
|
||||
}
|
||||
|
||||
function parseSenderFromText(text) {
|
||||
if (!text || typeof text !== 'string') {
|
||||
return { sender: null, text: text || '-' };
|
||||
}
|
||||
const patterns = [
|
||||
/^\s*ack\s+@\[(.+?)\]\s*:\s*([\s\S]+)$/i,
|
||||
/^\s*@\[(.+?)\]\s*:\s*([\s\S]+)$/i,
|
||||
/^\s*ack\s+([^:|\n]{1,80})\s*:\s*([\s\S]+)$/i,
|
||||
];
|
||||
for (const pattern of patterns) {
|
||||
const match = text.match(pattern);
|
||||
if (!match) continue;
|
||||
const sender = (match[1] || '').trim();
|
||||
const remaining = (match[2] || '').trim();
|
||||
if (!sender) continue;
|
||||
return {
|
||||
sender,
|
||||
text: remaining || text,
|
||||
};
|
||||
}
|
||||
return { sender: null, text };
|
||||
}
|
||||
|
||||
function messageTextWithSender(msg, text) {
|
||||
const parsed = parseSenderFromText(text || '-');
|
||||
const explicitSender = msg.sender_tag_name || msg.sender_name || (msg.pubkey_prefix || '').slice(0, 12) || null;
|
||||
const sender = explicitSender || parsed.sender;
|
||||
const body = (parsed.text || text || '-').trim() || '-';
|
||||
if (!sender) {
|
||||
return body;
|
||||
}
|
||||
if (body.toLowerCase().startsWith(`${sender.toLowerCase()}:`)) {
|
||||
return body;
|
||||
}
|
||||
return `${sender}: ${body}`;
|
||||
}
|
||||
|
||||
function dedupeBySignature(items) {
|
||||
const deduped = [];
|
||||
const bySignature = new Map();
|
||||
|
||||
for (const msg of items) {
|
||||
const signature = typeof msg.signature === 'string' ? msg.signature.trim().toUpperCase() : '';
|
||||
const canDedupe = msg.message_type === 'channel' && signature.length >= 8;
|
||||
if (!canDedupe) {
|
||||
deduped.push(msg);
|
||||
continue;
|
||||
}
|
||||
|
||||
const existing = bySignature.get(signature);
|
||||
if (!existing) {
|
||||
const clone = {
|
||||
...msg,
|
||||
receivers: [...(msg.receivers || [])],
|
||||
};
|
||||
bySignature.set(signature, clone);
|
||||
deduped.push(clone);
|
||||
continue;
|
||||
}
|
||||
|
||||
const combined = [...(existing.receivers || []), ...(msg.receivers || [])];
|
||||
const seenReceivers = new Set();
|
||||
existing.receivers = combined.filter((recv) => {
|
||||
const key = recv?.public_key || recv?.node_id || `${recv?.received_at || ''}:${recv?.snr || ''}`;
|
||||
if (seenReceivers.has(key)) return false;
|
||||
seenReceivers.add(key);
|
||||
return true;
|
||||
});
|
||||
|
||||
if (!existing.received_by && msg.received_by) existing.received_by = msg.received_by;
|
||||
if (!existing.receiver_name && msg.receiver_name) existing.receiver_name = msg.receiver_name;
|
||||
if (!existing.receiver_tag_name && msg.receiver_tag_name) existing.receiver_tag_name = msg.receiver_tag_name;
|
||||
if (!existing.pubkey_prefix && msg.pubkey_prefix) existing.pubkey_prefix = msg.pubkey_prefix;
|
||||
if (!existing.sender_name && msg.sender_name) existing.sender_name = msg.sender_name;
|
||||
if (!existing.sender_tag_name && msg.sender_tag_name) existing.sender_tag_name = msg.sender_tag_name;
|
||||
if (!existing.channel_name && msg.channel_name) existing.channel_name = msg.channel_name;
|
||||
if (
|
||||
existing.channel_name === 'Public'
|
||||
&& msg.channel_name
|
||||
&& msg.channel_name !== 'Public'
|
||||
) {
|
||||
existing.channel_name = msg.channel_name;
|
||||
}
|
||||
if (existing.channel_idx === null || existing.channel_idx === undefined) {
|
||||
if (msg.channel_idx !== null && msg.channel_idx !== undefined) {
|
||||
existing.channel_idx = msg.channel_idx;
|
||||
}
|
||||
} else if (
|
||||
existing.channel_idx === 17
|
||||
&& msg.channel_idx !== null
|
||||
&& msg.channel_idx !== undefined
|
||||
&& msg.channel_idx !== 17
|
||||
) {
|
||||
existing.channel_idx = msg.channel_idx;
|
||||
}
|
||||
}
|
||||
|
||||
return deduped;
|
||||
}
|
||||
|
||||
function renderPage(content, { total = null } = {}) {
|
||||
litRender(html`
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
@@ -186,7 +39,7 @@ ${content}`, container);
|
||||
async function fetchAndRenderData() {
|
||||
try {
|
||||
const data = await apiGet('/api/v1/messages', { limit, offset, message_type });
|
||||
const messages = dedupeBySignature(data.items || []);
|
||||
const messages = data.items || [];
|
||||
const total = data.total || 0;
|
||||
const totalPages = Math.ceil(total / limit);
|
||||
|
||||
@@ -196,12 +49,17 @@ ${content}`, container);
|
||||
const isChannel = msg.message_type === 'channel';
|
||||
const typeIcon = isChannel ? '\u{1F4FB}' : '\u{1F464}';
|
||||
const typeTitle = isChannel ? t('messages.type_channel') : t('messages.type_contact');
|
||||
const chInfo = channelInfo(msg);
|
||||
const sender = senderBlock(msg);
|
||||
const displayMessage = messageTextWithSender(msg, chInfo.text);
|
||||
const fromPrimary = isChannel
|
||||
? html`<span class="font-medium">${chInfo.label || t('messages.type_channel')}</span>`
|
||||
: sender;
|
||||
let senderBlock;
|
||||
if (isChannel) {
|
||||
senderBlock = html`<span class="opacity-60">${t('messages.type_public')}</span>`;
|
||||
} else {
|
||||
const senderName = msg.sender_tag_name || msg.sender_name;
|
||||
if (senderName) {
|
||||
senderBlock = senderName;
|
||||
} else {
|
||||
senderBlock = html`<span class="font-mono text-xs">${(msg.pubkey_prefix || '-').slice(0, 12)}</span>`;
|
||||
}
|
||||
}
|
||||
let receiversBlock = nothing;
|
||||
if (msg.receivers && msg.receivers.length >= 1) {
|
||||
receiversBlock = html`<div class="flex gap-0.5">
|
||||
@@ -223,7 +81,7 @@ ${content}`, container);
|
||||
</span>
|
||||
<div class="min-w-0">
|
||||
<div class="font-medium text-sm truncate">
|
||||
${fromPrimary}
|
||||
${senderBlock}
|
||||
</div>
|
||||
<div class="text-xs opacity-60">
|
||||
${formatDateTimeShort(msg.received_at)}
|
||||
@@ -234,7 +92,7 @@ ${content}`, container);
|
||||
${receiversBlock}
|
||||
</div>
|
||||
</div>
|
||||
<p class="text-sm mt-2 break-words whitespace-pre-wrap">${displayMessage}</p>
|
||||
<p class="text-sm mt-2 break-words whitespace-pre-wrap">${msg.text || '-'}</p>
|
||||
</div>
|
||||
</div>`;
|
||||
});
|
||||
@@ -245,12 +103,17 @@ ${content}`, container);
|
||||
const isChannel = msg.message_type === 'channel';
|
||||
const typeIcon = isChannel ? '\u{1F4FB}' : '\u{1F464}';
|
||||
const typeTitle = isChannel ? t('messages.type_channel') : t('messages.type_contact');
|
||||
const chInfo = channelInfo(msg);
|
||||
const sender = senderBlock(msg, true);
|
||||
const displayMessage = messageTextWithSender(msg, chInfo.text);
|
||||
const fromPrimary = isChannel
|
||||
? html`<span class="font-medium">${chInfo.label || t('messages.type_channel')}</span>`
|
||||
: sender;
|
||||
let senderBlock;
|
||||
if (isChannel) {
|
||||
senderBlock = html`<span class="opacity-60">${t('messages.type_public')}</span>`;
|
||||
} else {
|
||||
const senderName = msg.sender_tag_name || msg.sender_name;
|
||||
if (senderName) {
|
||||
senderBlock = html`<span class="font-medium">${senderName}</span>`;
|
||||
} else {
|
||||
senderBlock = html`<span class="font-mono text-xs">${(msg.pubkey_prefix || '-').slice(0, 12)}</span>`;
|
||||
}
|
||||
}
|
||||
let receiversBlock;
|
||||
if (msg.receivers && msg.receivers.length >= 1) {
|
||||
receiversBlock = html`<div class="flex gap-1">
|
||||
@@ -268,10 +131,8 @@ ${content}`, container);
|
||||
return html`<tr class="hover align-top">
|
||||
<td class="text-lg" title=${typeTitle}>${typeIcon}</td>
|
||||
<td class="text-sm whitespace-nowrap">${formatDateTime(msg.received_at)}</td>
|
||||
<td class="text-sm whitespace-nowrap">
|
||||
<div>${fromPrimary}</div>
|
||||
</td>
|
||||
<td class="break-words max-w-md" style="white-space: pre-wrap;">${displayMessage}</td>
|
||||
<td class="text-sm whitespace-nowrap">${senderBlock}</td>
|
||||
<td class="break-words max-w-md" style="white-space: pre-wrap;">${msg.text || '-'}</td>
|
||||
<td>${receiversBlock}</td>
|
||||
</tr>`;
|
||||
});
|
||||
|
||||
@@ -209,7 +209,7 @@ ${heroHtml}
|
||||
const initQr = () => {
|
||||
const qrEl = document.getElementById('qr-code');
|
||||
if (!qrEl || typeof QRCode === 'undefined') return false;
|
||||
const typeMap = { chat: 1, repeater: 2, room: 3, companion: 1, sensor: 4 };
|
||||
const typeMap = { chat: 1, repeater: 2, room: 3, sensor: 4 };
|
||||
const typeNum = typeMap[(node.adv_type || '').toLowerCase()] || 1;
|
||||
const url = 'meshcore://contact/add?name=' + encodeURIComponent(displayName) + '&public_key=' + node.public_key + '&type=' + typeNum;
|
||||
new QRCode(qrEl, {
|
||||
|
||||
@@ -159,7 +159,6 @@ ${content}`, container);
|
||||
<option value="">${t('common.all_types')}</option>
|
||||
<option value="chat" ?selected=${adv_type === 'chat'}>${t('node_types.chat')}</option>
|
||||
<option value="repeater" ?selected=${adv_type === 'repeater'}>${t('node_types.repeater')}</option>
|
||||
<option value="companion" ?selected=${adv_type === 'companion'}>${t('node_types.companion')}</option>
|
||||
<option value="room" ?selected=${adv_type === 'room'}>${t('node_types.room')}</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
@@ -122,8 +122,7 @@
|
||||
"node_types": {
|
||||
"chat": "Chat",
|
||||
"repeater": "Repeater",
|
||||
"companion": "Companion",
|
||||
"room": "Room Server",
|
||||
"room": "Room",
|
||||
"unknown": "Unknown"
|
||||
},
|
||||
"home": {
|
||||
|
||||
@@ -223,8 +223,7 @@ Mesh network node type labels:
|
||||
|-----|---------|---------|
|
||||
| `chat` | Chat | Chat node type |
|
||||
| `repeater` | Repeater | Repeater/relay node type |
|
||||
| `companion` | Companion | Companion/observer node type |
|
||||
| `room` | Room Server | Room server/group node type |
|
||||
| `room` | Room | Room/group node type |
|
||||
| `unknown` | Unknown | Unknown node type fallback |
|
||||
|
||||
### 7. `home`
|
||||
|
||||
@@ -30,12 +30,6 @@
|
||||
|
||||
<!-- Favicon -->
|
||||
<link rel="icon" type="image/svg+xml" href="{{ logo_url }}">
|
||||
{% if not logo_invert_light %}
|
||||
<style>
|
||||
/* Keep custom network logos full-color in light mode */
|
||||
[data-theme="light"] img[src="{{ logo_url }}"] { filter: none !important; }
|
||||
</style>
|
||||
{% endif %}
|
||||
|
||||
<!-- Tailwind CSS with DaisyUI -->
|
||||
<link href="https://cdn.jsdelivr.net/npm/daisyui@4.4.19/dist/full.min.css" rel="stylesheet" type="text/css" />
|
||||
@@ -93,7 +87,7 @@
|
||||
</ul>
|
||||
</div>
|
||||
<a href="/" class="btn btn-ghost text-xl">
|
||||
<img src="{{ logo_url }}" alt="{{ network_name }}" class="theme-logo{% if logo_invert_light %} theme-logo--invert-light{% endif %} h-6 w-6 mr-2" />
|
||||
<img src="{{ logo_url }}" alt="{{ network_name }}" class="theme-logo h-6 w-6 mr-2" />
|
||||
{{ network_name }}
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@@ -1,28 +1,8 @@
|
||||
"""Tests for API authentication.
|
||||
|
||||
Verifies that constant-time key comparison (hmac.compare_digest) works
|
||||
correctly with no behavioral regressions from the original == operator.
|
||||
"""
|
||||
|
||||
import base64
|
||||
"""Tests for API authentication."""
|
||||
|
||||
|
||||
def _make_basic_auth(username: str, password: str) -> str:
|
||||
"""Create a Basic auth header value."""
|
||||
credentials = base64.b64encode(f"{username}:{password}".encode()).decode()
|
||||
return f"Basic {credentials}"
|
||||
|
||||
|
||||
def _clear_metrics_cache() -> None:
|
||||
"""Clear the metrics module cache."""
|
||||
from meshcore_hub.api.metrics import _cache
|
||||
|
||||
_cache["output"] = b""
|
||||
_cache["expires_at"] = 0.0
|
||||
|
||||
|
||||
class TestReadAuthentication:
|
||||
"""Tests for read-level authentication (require_read)."""
|
||||
class TestAuthenticationFlow:
|
||||
"""Tests for authentication behavior."""
|
||||
|
||||
def test_no_auth_when_keys_not_configured(self, client_no_auth):
|
||||
"""Test that no auth is required when keys are not configured."""
|
||||
@@ -50,47 +30,46 @@ class TestReadAuthentication:
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_read_key_accepted_on_multiple_endpoints(self, client_with_auth):
|
||||
"""Test that read key is accepted across different read endpoints."""
|
||||
for endpoint in ["/api/v1/nodes", "/api/v1/messages"]:
|
||||
response = client_with_auth.get(
|
||||
endpoint,
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
assert response.status_code == 200, f"Read key rejected on {endpoint}"
|
||||
|
||||
def test_read_endpoints_accept_admin_key(self, client_with_auth):
|
||||
"""Test that admin key also grants read access."""
|
||||
"""Test that read endpoints accept admin key."""
|
||||
response = client_with_auth.get(
|
||||
"/api/v1/nodes",
|
||||
headers={"Authorization": "Bearer test-admin-key"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_admin_key_grants_read_on_multiple_endpoints(self, client_with_auth):
|
||||
"""Test that admin key grants read access across different endpoints."""
|
||||
for endpoint in ["/api/v1/nodes", "/api/v1/messages"]:
|
||||
response = client_with_auth.get(
|
||||
endpoint,
|
||||
headers={"Authorization": "Bearer test-admin-key"},
|
||||
)
|
||||
assert (
|
||||
response.status_code == 200
|
||||
), f"Admin key rejected on read endpoint {endpoint}"
|
||||
def test_admin_endpoints_reject_read_key(self, client_with_auth):
|
||||
"""Test that admin endpoints reject read key."""
|
||||
response = client_with_auth.post(
|
||||
"/api/v1/commands/send-message",
|
||||
json={
|
||||
"destination": "abc123def456abc123def456abc123de",
|
||||
"text": "Test",
|
||||
},
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_invalid_key_rejected_on_read_endpoint(self, client_with_auth):
|
||||
"""Test that invalid keys are rejected with 401 on read endpoints."""
|
||||
def test_admin_endpoints_accept_admin_key(self, client_with_auth):
|
||||
"""Test that admin endpoints accept admin key."""
|
||||
response = client_with_auth.post(
|
||||
"/api/v1/commands/send-message",
|
||||
json={
|
||||
"destination": "abc123def456abc123def456abc123de",
|
||||
"text": "Test",
|
||||
},
|
||||
headers={"Authorization": "Bearer test-admin-key"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_invalid_key_rejected(self, client_with_auth):
|
||||
"""Test that invalid keys are rejected."""
|
||||
response = client_with_auth.get(
|
||||
"/api/v1/nodes",
|
||||
headers={"Authorization": "Bearer invalid-key"},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_no_auth_header_rejected_on_read_endpoint(self, client_with_auth):
|
||||
"""Test that missing auth header is rejected on read endpoints."""
|
||||
response = client_with_auth.get("/api/v1/nodes")
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_missing_bearer_prefix_rejected(self, client_with_auth):
|
||||
"""Test that tokens without Bearer prefix are rejected."""
|
||||
response = client_with_auth.get(
|
||||
@@ -108,124 +87,6 @@ class TestReadAuthentication:
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
class TestAdminAuthentication:
|
||||
"""Tests for admin-level authentication (require_admin)."""
|
||||
|
||||
def test_admin_endpoints_accept_admin_key(self, client_with_auth):
|
||||
"""Test that admin endpoints accept admin key."""
|
||||
response = client_with_auth.post(
|
||||
"/api/v1/commands/send-message",
|
||||
json={
|
||||
"destination": "abc123def456abc123def456abc123de",
|
||||
"text": "Test",
|
||||
},
|
||||
headers={"Authorization": "Bearer test-admin-key"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_admin_endpoints_reject_read_key(self, client_with_auth):
|
||||
"""Test that admin endpoints reject read key with 403."""
|
||||
response = client_with_auth.post(
|
||||
"/api/v1/commands/send-message",
|
||||
json={
|
||||
"destination": "abc123def456abc123def456abc123de",
|
||||
"text": "Test",
|
||||
},
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_admin_endpoints_reject_invalid_key(self, client_with_auth):
|
||||
"""Test that admin endpoints reject invalid keys with 403."""
|
||||
response = client_with_auth.post(
|
||||
"/api/v1/commands/send-message",
|
||||
json={
|
||||
"destination": "abc123def456abc123def456abc123de",
|
||||
"text": "Test",
|
||||
},
|
||||
headers={"Authorization": "Bearer completely-wrong-key"},
|
||||
)
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_admin_endpoints_reject_no_auth_header(self, client_with_auth):
|
||||
"""Test that admin endpoints reject missing auth header with 401."""
|
||||
response = client_with_auth.post(
|
||||
"/api/v1/commands/send-message",
|
||||
json={
|
||||
"destination": "abc123def456abc123def456abc123de",
|
||||
"text": "Test",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
class TestMetricsAuthentication:
|
||||
"""Tests for metrics endpoint authentication (Basic auth with hmac.compare_digest)."""
|
||||
|
||||
def test_metrics_no_auth_when_no_read_key(self, client_no_auth):
|
||||
"""Test that metrics requires no auth when no read key is configured."""
|
||||
_clear_metrics_cache()
|
||||
response = client_no_auth.get("/metrics")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_metrics_accepts_valid_basic_auth(self, client_with_auth):
|
||||
"""Test that metrics accepts correct Basic credentials."""
|
||||
_clear_metrics_cache()
|
||||
response = client_with_auth.get(
|
||||
"/metrics",
|
||||
headers={"Authorization": _make_basic_auth("metrics", "test-read-key")},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_metrics_rejects_no_auth_when_key_set(self, client_with_auth):
|
||||
"""Test 401 when read key is set but no auth provided."""
|
||||
_clear_metrics_cache()
|
||||
response = client_with_auth.get("/metrics")
|
||||
assert response.status_code == 401
|
||||
assert "WWW-Authenticate" in response.headers
|
||||
|
||||
def test_metrics_rejects_wrong_password(self, client_with_auth):
|
||||
"""Test that metrics rejects incorrect password."""
|
||||
_clear_metrics_cache()
|
||||
response = client_with_auth.get(
|
||||
"/metrics",
|
||||
headers={"Authorization": _make_basic_auth("metrics", "wrong-key")},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_metrics_rejects_wrong_username(self, client_with_auth):
|
||||
"""Test that metrics rejects incorrect username."""
|
||||
_clear_metrics_cache()
|
||||
response = client_with_auth.get(
|
||||
"/metrics",
|
||||
headers={"Authorization": _make_basic_auth("admin", "test-read-key")},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_metrics_rejects_bearer_auth(self, client_with_auth):
|
||||
"""Test that Bearer auth does not work for metrics."""
|
||||
_clear_metrics_cache()
|
||||
response = client_with_auth.get(
|
||||
"/metrics",
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_metrics_rejects_admin_key_as_password(self, client_with_auth):
|
||||
"""Test that admin key is not accepted as metrics password.
|
||||
|
||||
Metrics uses only the read key for Basic auth, not the admin key.
|
||||
"""
|
||||
_clear_metrics_cache()
|
||||
response = client_with_auth.get(
|
||||
"/metrics",
|
||||
headers={
|
||||
"Authorization": _make_basic_auth("metrics", "test-admin-key"),
|
||||
},
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
class TestHealthEndpoint:
|
||||
"""Tests for health check endpoint."""
|
||||
|
||||
|
||||
@@ -35,71 +35,35 @@ class TestDashboardStats:
|
||||
assert data["total_advertisements"] == 1
|
||||
|
||||
|
||||
class TestDashboardHtmlRemoved:
|
||||
"""Tests that legacy HTML dashboard endpoint has been removed."""
|
||||
class TestDashboardHtml:
|
||||
"""Tests for GET /dashboard endpoint."""
|
||||
|
||||
def test_dashboard_html_endpoint_removed(self, client_no_auth):
|
||||
"""Test that GET /dashboard no longer returns HTML (legacy endpoint removed)."""
|
||||
def test_dashboard_html_response(self, client_no_auth):
|
||||
"""Test dashboard returns HTML."""
|
||||
response = client_no_auth.get("/api/v1/dashboard")
|
||||
assert response.status_code in (404, 405)
|
||||
|
||||
def test_dashboard_html_endpoint_removed_trailing_slash(self, client_no_auth):
|
||||
"""Test that GET /dashboard/ also returns 404/405."""
|
||||
response = client_no_auth.get("/api/v1/dashboard/")
|
||||
assert response.status_code in (404, 405)
|
||||
|
||||
|
||||
class TestDashboardAuthenticatedJsonRoutes:
|
||||
"""Tests that dashboard JSON sub-routes return valid JSON with authentication."""
|
||||
|
||||
def test_stats_returns_json_when_authenticated(self, client_with_auth):
|
||||
"""Test GET /dashboard/stats returns 200 with valid JSON when authenticated."""
|
||||
response = client_with_auth.get(
|
||||
"/api/v1/dashboard/stats",
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "total_nodes" in data
|
||||
assert "active_nodes" in data
|
||||
assert "total_messages" in data
|
||||
assert "total_advertisements" in data
|
||||
assert "text/html" in response.headers["content-type"]
|
||||
assert "<!DOCTYPE html>" in response.text
|
||||
assert "MeshCore Hub Dashboard" in response.text
|
||||
|
||||
def test_activity_returns_json_when_authenticated(self, client_with_auth):
|
||||
"""Test GET /dashboard/activity returns 200 with valid JSON when authenticated."""
|
||||
response = client_with_auth.get(
|
||||
"/api/v1/dashboard/activity",
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
def test_dashboard_contains_stats(
|
||||
self, client_no_auth, sample_node, sample_message
|
||||
):
|
||||
"""Test dashboard HTML contains stat values."""
|
||||
response = client_no_auth.get("/api/v1/dashboard")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "days" in data
|
||||
assert "data" in data
|
||||
assert isinstance(data["data"], list)
|
||||
# Check that stats are present
|
||||
assert "Total Nodes" in response.text
|
||||
assert "Active Nodes" in response.text
|
||||
assert "Total Messages" in response.text
|
||||
|
||||
def test_message_activity_returns_json_when_authenticated(self, client_with_auth):
|
||||
"""Test GET /dashboard/message-activity returns 200 with valid JSON when authenticated."""
|
||||
response = client_with_auth.get(
|
||||
"/api/v1/dashboard/message-activity",
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
def test_dashboard_contains_recent_data(self, client_no_auth, sample_node):
|
||||
"""Test dashboard HTML contains recent nodes."""
|
||||
response = client_no_auth.get("/api/v1/dashboard")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "days" in data
|
||||
assert "data" in data
|
||||
assert isinstance(data["data"], list)
|
||||
|
||||
def test_node_count_returns_json_when_authenticated(self, client_with_auth):
|
||||
"""Test GET /dashboard/node-count returns 200 with valid JSON when authenticated."""
|
||||
response = client_with_auth.get(
|
||||
"/api/v1/dashboard/node-count",
|
||||
headers={"Authorization": "Bearer test-read-key"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "days" in data
|
||||
assert "data" in data
|
||||
assert isinstance(data["data"], list)
|
||||
assert "Recent Nodes" in response.text
|
||||
# The node name should appear in the table
|
||||
assert sample_node.name in response.text
|
||||
|
||||
|
||||
class TestDashboardActivity:
|
||||
|
||||
@@ -102,57 +102,6 @@ class TestListNodesFilters:
|
||||
data = response.json()
|
||||
assert len(data["items"]) == 0
|
||||
|
||||
def test_filter_by_adv_type_matches_legacy_labels(
|
||||
self, client_no_auth, api_db_session
|
||||
):
|
||||
"""Canonical adv_type filters match legacy LetsMesh adv_type values only."""
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from meshcore_hub.common.models import Node
|
||||
|
||||
repeater_node = Node(
|
||||
public_key="ab" * 32,
|
||||
adv_type="PyMC-Repeater",
|
||||
first_seen=datetime.now(timezone.utc),
|
||||
)
|
||||
companion_node = Node(
|
||||
public_key="cd" * 32,
|
||||
adv_type="offline companion",
|
||||
first_seen=datetime.now(timezone.utc),
|
||||
)
|
||||
room_node = Node(
|
||||
public_key="ef" * 32,
|
||||
adv_type="room server",
|
||||
first_seen=datetime.now(timezone.utc),
|
||||
)
|
||||
name_only_room_node = Node(
|
||||
public_key="12" * 32,
|
||||
name="WAL-SE Room Server",
|
||||
adv_type="unknown",
|
||||
first_seen=datetime.now(timezone.utc),
|
||||
)
|
||||
api_db_session.add(repeater_node)
|
||||
api_db_session.add(companion_node)
|
||||
api_db_session.add(room_node)
|
||||
api_db_session.add(name_only_room_node)
|
||||
api_db_session.commit()
|
||||
|
||||
response = client_no_auth.get("/api/v1/nodes?adv_type=repeater")
|
||||
assert response.status_code == 200
|
||||
repeater_keys = {item["public_key"] for item in response.json()["items"]}
|
||||
assert repeater_node.public_key in repeater_keys
|
||||
|
||||
response = client_no_auth.get("/api/v1/nodes?adv_type=companion")
|
||||
assert response.status_code == 200
|
||||
companion_keys = {item["public_key"] for item in response.json()["items"]}
|
||||
assert companion_node.public_key in companion_keys
|
||||
|
||||
response = client_no_auth.get("/api/v1/nodes?adv_type=room")
|
||||
assert response.status_code == 200
|
||||
room_keys = {item["public_key"] for item in response.json()["items"]}
|
||||
assert room_node.public_key in room_keys
|
||||
assert name_only_room_node.public_key not in room_keys
|
||||
|
||||
def test_filter_by_member_id(self, client_no_auth, sample_node_with_member_tag):
|
||||
"""Test filtering nodes by member_id tag."""
|
||||
# Match alice
|
||||
|
||||
@@ -71,26 +71,6 @@ class TestHandleAdvertisement:
|
||||
assert ad.public_key == "a" * 64
|
||||
assert ad.name == "TestNode"
|
||||
|
||||
def test_updates_node_location_fields(self, db_manager, db_session):
|
||||
"""Advertisement payload lat/lon updates node coordinates."""
|
||||
payload = {
|
||||
"public_key": "a" * 64,
|
||||
"name": "LocNode",
|
||||
"adv_type": "repeater",
|
||||
"lat": 42.1234,
|
||||
"lon": -71.9876,
|
||||
}
|
||||
|
||||
handle_advertisement("b" * 64, "advertisement", payload, db_manager)
|
||||
|
||||
node = db_session.execute(
|
||||
select(Node).where(Node.public_key == "a" * 64)
|
||||
).scalar_one_or_none()
|
||||
|
||||
assert node is not None
|
||||
assert node.lat == 42.1234
|
||||
assert node.lon == -71.9876
|
||||
|
||||
def test_handles_missing_public_key(self, db_manager, db_session):
|
||||
"""Test that missing public_key is handled gracefully."""
|
||||
payload = {
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
"""Tests for LetsMesh packet decoder integration."""
|
||||
|
||||
import subprocess
|
||||
from unittest.mock import patch
|
||||
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
|
||||
|
||||
def test_decode_payload_returns_none_without_raw() -> None:
|
||||
"""Decoder returns None when packet has no raw hex."""
|
||||
decoder = LetsMeshPacketDecoder(enabled=True)
|
||||
assert decoder.decode_payload({"packet_type": 5}) is None
|
||||
|
||||
|
||||
def test_decode_payload_rejects_non_hex_raw_without_invoking_decoder() -> None:
|
||||
"""Decoder returns None and does not execute subprocess for invalid raw hex."""
|
||||
decoder = LetsMeshPacketDecoder(enabled=True, command="meshcore-decoder")
|
||||
|
||||
with (
|
||||
patch("meshcore_hub.collector.letsmesh_decoder.shutil.which", return_value="1"),
|
||||
patch("meshcore_hub.collector.letsmesh_decoder.subprocess.run") as mock_run,
|
||||
):
|
||||
assert decoder.decode_payload({"raw": "ZZ-not-hex"}) is None
|
||||
|
||||
mock_run.assert_not_called()
|
||||
|
||||
|
||||
def test_decode_payload_invokes_decoder_with_keys() -> None:
|
||||
"""Decoder command includes channel keys and returns parsed JSON."""
|
||||
decoder = LetsMeshPacketDecoder(
|
||||
enabled=True,
|
||||
command="meshcore-decoder",
|
||||
channel_keys=["0xABCDEF", "name=012345", "abcDEF"],
|
||||
timeout_seconds=1.5,
|
||||
)
|
||||
completed = subprocess.CompletedProcess(
|
||||
args=["meshcore-decoder"],
|
||||
returncode=0,
|
||||
stdout='{"payload":{"decoded":{"decrypted":{"message":"hello"}}}}',
|
||||
stderr="",
|
||||
)
|
||||
|
||||
with (
|
||||
patch("meshcore_hub.collector.letsmesh_decoder.shutil.which", return_value="1"),
|
||||
patch(
|
||||
"meshcore_hub.collector.letsmesh_decoder.subprocess.run",
|
||||
return_value=completed,
|
||||
) as mock_run,
|
||||
):
|
||||
decoded = decoder.decode_payload({"raw": "A1B2C3"})
|
||||
|
||||
assert isinstance(decoded, dict)
|
||||
payload = decoded.get("payload")
|
||||
assert isinstance(payload, dict)
|
||||
decoded_payload = payload.get("decoded")
|
||||
assert isinstance(decoded_payload, dict)
|
||||
decrypted = decoded_payload.get("decrypted")
|
||||
assert isinstance(decrypted, dict)
|
||||
assert decrypted.get("message") == "hello"
|
||||
command = mock_run.call_args.args[0]
|
||||
assert command == [
|
||||
"meshcore-decoder",
|
||||
"decode",
|
||||
"A1B2C3",
|
||||
"--json",
|
||||
"--key",
|
||||
"8B3387E9C5CDEA6AC9E5EDBAA115CD72",
|
||||
"9CD8FCF22A47333B591D96A2B848B73F",
|
||||
"ABCDEF",
|
||||
"012345",
|
||||
]
|
||||
assert mock_run.call_args.kwargs["timeout"] == 1.5
|
||||
|
||||
|
||||
def test_decode_payload_returns_none_for_decoder_error() -> None:
|
||||
"""Decoder returns None when decoder exits with failure."""
|
||||
decoder = LetsMeshPacketDecoder(enabled=True, command="meshcore-decoder")
|
||||
completed = subprocess.CompletedProcess(
|
||||
args=["meshcore-decoder"],
|
||||
returncode=1,
|
||||
stdout="",
|
||||
stderr="decode error",
|
||||
)
|
||||
|
||||
with (
|
||||
patch("meshcore_hub.collector.letsmesh_decoder.shutil.which", return_value="1"),
|
||||
patch(
|
||||
"meshcore_hub.collector.letsmesh_decoder.subprocess.run",
|
||||
return_value=completed,
|
||||
),
|
||||
):
|
||||
assert decoder.decode_payload({"raw": "A1B2C3"}) is None
|
||||
|
||||
|
||||
def test_builtin_channel_keys_present_by_default() -> None:
|
||||
"""Public and #test keys are always present even without .env keys."""
|
||||
decoder = LetsMeshPacketDecoder(enabled=True, command="meshcore-decoder")
|
||||
assert decoder._channel_keys == [
|
||||
"8B3387E9C5CDEA6AC9E5EDBAA115CD72",
|
||||
"9CD8FCF22A47333B591D96A2B848B73F",
|
||||
]
|
||||
|
||||
|
||||
def test_channel_name_lookup_from_decoded_hash() -> None:
|
||||
"""Decoder resolves channel names from configured label=key entries."""
|
||||
key_hex = "EB50A1BCB3E4E5D7BF69A57C9DADA211"
|
||||
decoder = LetsMeshPacketDecoder(
|
||||
enabled=False,
|
||||
channel_keys=[f"#bot={key_hex}"],
|
||||
)
|
||||
channel_hash = decoder._compute_channel_hash(key_hex)
|
||||
decoded_packet = {
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"channelHash": channel_hash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert decoder.channel_name_from_decoded(decoded_packet) == "bot"
|
||||
|
||||
|
||||
def test_channel_labels_by_index_includes_labeled_entries() -> None:
|
||||
"""Channel labels map includes built-ins and label=key env entries."""
|
||||
decoder = LetsMeshPacketDecoder(
|
||||
enabled=False,
|
||||
channel_keys=[
|
||||
"bot=EB50A1BCB3E4E5D7BF69A57C9DADA211",
|
||||
"chat=D0BDD6D71538138ED979EEC00D98AD97",
|
||||
],
|
||||
)
|
||||
|
||||
labels = decoder.channel_labels_by_index()
|
||||
|
||||
assert labels[17] == "Public"
|
||||
assert labels[217] == "#test"
|
||||
assert labels[202] == "#bot"
|
||||
assert labels[184] == "#chat"
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Tests for the collector subscriber."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, call, patch
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from meshcore_hub.collector.subscriber import Subscriber, create_subscriber
|
||||
|
||||
@@ -14,16 +14,11 @@ class TestSubscriber:
|
||||
"""Create a mock MQTT client."""
|
||||
client = MagicMock()
|
||||
client.topic_builder = MagicMock()
|
||||
client.topic_builder.prefix = "meshcore/BOS"
|
||||
client.topic_builder.all_events_topic.return_value = "meshcore/+/event/#"
|
||||
client.topic_builder.parse_event_topic.return_value = (
|
||||
"a" * 64,
|
||||
"advertisement",
|
||||
)
|
||||
client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"status",
|
||||
)
|
||||
return client
|
||||
|
||||
@pytest.fixture
|
||||
@@ -71,692 +66,6 @@ class TestSubscriber:
|
||||
|
||||
handler.assert_called_once()
|
||||
|
||||
def test_start_subscribes_to_letsmesh_topics(self, mock_mqtt_client, db_manager):
|
||||
"""LetsMesh ingest mode subscribes to packets/status/internal feeds."""
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
|
||||
subscriber.start()
|
||||
|
||||
expected_calls = [
|
||||
call("meshcore/BOS/+/packets", subscriber._handle_mqtt_message),
|
||||
call("meshcore/BOS/+/status", subscriber._handle_mqtt_message),
|
||||
call("meshcore/BOS/+/internal", subscriber._handle_mqtt_message),
|
||||
]
|
||||
mock_mqtt_client.subscribe.assert_has_calls(expected_calls, any_order=False)
|
||||
assert mock_mqtt_client.subscribe.call_count == 3
|
||||
|
||||
def test_letsmesh_status_maps_to_letsmesh_status(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""LetsMesh status payloads are stored as informational status events."""
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
advert_handler = MagicMock()
|
||||
status_handler = MagicMock()
|
||||
subscriber.register_handler("advertisement", advert_handler)
|
||||
subscriber.register_handler("letsmesh_status", status_handler)
|
||||
subscriber.start()
|
||||
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/status",
|
||||
pattern="meshcore/BOS/+/status",
|
||||
payload={
|
||||
"origin": "Observer Node",
|
||||
"origin_id": "b" * 64,
|
||||
"model": "Heltec V3",
|
||||
"mode": "repeater",
|
||||
"flags": 7,
|
||||
},
|
||||
)
|
||||
|
||||
advert_handler.assert_not_called()
|
||||
status_handler.assert_called_once()
|
||||
public_key, event_type, payload, _db = status_handler.call_args.args
|
||||
assert public_key == "a" * 64
|
||||
assert event_type == "letsmesh_status"
|
||||
assert payload["origin_id"] == "b" * 64
|
||||
assert payload["origin"] == "Observer Node"
|
||||
assert payload["mode"] == "repeater"
|
||||
|
||||
def test_letsmesh_status_with_debug_flags_does_not_emit_advertisement(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Status debug metadata should remain informational only."""
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
advert_handler = MagicMock()
|
||||
status_handler = MagicMock()
|
||||
subscriber.register_handler("advertisement", advert_handler)
|
||||
subscriber.register_handler("letsmesh_status", status_handler)
|
||||
subscriber.start()
|
||||
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/status",
|
||||
pattern="meshcore/BOS/+/status",
|
||||
payload={
|
||||
"origin": "Observer Node",
|
||||
"origin_id": "b" * 64,
|
||||
"mode": "repeater",
|
||||
"stats": {"debug_flags": 7},
|
||||
},
|
||||
)
|
||||
|
||||
advert_handler.assert_not_called()
|
||||
status_handler.assert_called_once()
|
||||
_public_key, _event_type, payload, _db = status_handler.call_args.args
|
||||
assert payload["stats"]["debug_flags"] == 7
|
||||
|
||||
def test_letsmesh_status_without_identity_maps_to_letsmesh_status(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Status heartbeat payloads without identity metadata stay informational."""
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
advert_handler = MagicMock()
|
||||
status_handler = MagicMock()
|
||||
subscriber.register_handler("advertisement", advert_handler)
|
||||
subscriber.register_handler("letsmesh_status", status_handler)
|
||||
subscriber.start()
|
||||
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/status",
|
||||
pattern="meshcore/BOS/+/status",
|
||||
payload={
|
||||
"origin_id": "b" * 64,
|
||||
"stats": {"cpu": 27, "mem": 91, "debug_flags": 7},
|
||||
},
|
||||
)
|
||||
|
||||
advert_handler.assert_not_called()
|
||||
status_handler.assert_called_once()
|
||||
|
||||
def test_invalid_ingest_mode_raises(self, mock_mqtt_client, db_manager) -> None:
|
||||
"""Invalid ingest mode values are rejected."""
|
||||
with pytest.raises(ValueError):
|
||||
Subscriber(mock_mqtt_client, db_manager, ingest_mode="invalid_mode")
|
||||
|
||||
def test_letsmesh_packet_maps_to_channel_message(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""LetsMesh packets are mapped to channel messages when text is available."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
handler = MagicMock()
|
||||
subscriber.register_handler("channel_msg_recv", handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 5,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"decrypted": {
|
||||
"message": "hello channel",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "5",
|
||||
"hash": "ABCDEF1234",
|
||||
"timestamp": "2026-02-21T17:42:39.897932",
|
||||
"SNR": "12.5",
|
||||
"path": "91CBC3",
|
||||
},
|
||||
)
|
||||
|
||||
handler.assert_called_once()
|
||||
public_key, event_type, payload, _db = handler.call_args.args
|
||||
assert public_key == "a" * 64
|
||||
assert event_type == "channel_msg_recv"
|
||||
assert payload["text"] == "hello channel"
|
||||
assert payload["txt_type"] == 5
|
||||
assert "sender_timestamp" not in payload
|
||||
assert payload["SNR"] == 12.5
|
||||
assert payload["path_len"] == 3
|
||||
|
||||
def test_letsmesh_packet_without_decrypted_text_is_not_shown_as_message(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Undecodable LetsMesh packets are kept as informational events, not messages."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
letsmesh_packet_handler = MagicMock()
|
||||
channel_handler = MagicMock()
|
||||
subscriber.register_handler("letsmesh_packet", letsmesh_packet_handler)
|
||||
subscriber.register_handler("channel_msg_recv", channel_handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value=None,
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "5",
|
||||
"hash": "ABCDEF1234",
|
||||
"raw": "15040791959fd9",
|
||||
},
|
||||
)
|
||||
|
||||
letsmesh_packet_handler.assert_called_once()
|
||||
channel_handler.assert_not_called()
|
||||
|
||||
def test_letsmesh_packet_uses_decoder_text_when_available(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""LetsMesh packet decoder output is used for message text and timestamp."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
handler = MagicMock()
|
||||
subscriber.register_handler("channel_msg_recv", handler)
|
||||
subscriber.start()
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 5,
|
||||
"pathLength": 4,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"channelHash": "AA",
|
||||
"decrypted": {
|
||||
"sender": "ABCD1234",
|
||||
"timestamp": 1771695860,
|
||||
"message": "decoded hello",
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
),
|
||||
patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"channel_name_from_decoded",
|
||||
return_value="test",
|
||||
),
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "5",
|
||||
"hash": "ABCDEF1234",
|
||||
"raw": "15040791959fd9",
|
||||
"SNR": "9.0",
|
||||
},
|
||||
)
|
||||
|
||||
handler.assert_called_once()
|
||||
public_key, event_type, payload, _db = handler.call_args.args
|
||||
assert public_key == "a" * 64
|
||||
assert event_type == "channel_msg_recv"
|
||||
assert payload["text"] == "decoded hello"
|
||||
assert payload["channel_name"] == "#test"
|
||||
assert payload["sender_timestamp"] == 1771695860
|
||||
assert payload["txt_type"] == 5
|
||||
assert payload["path_len"] == 4
|
||||
assert payload["channel_idx"] == 170
|
||||
assert payload["pubkey_prefix"] == "ABCD1234"
|
||||
|
||||
def test_letsmesh_packet_type_1_maps_to_contact_message(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""LetsMesh packet type 1 is treated as direct/contact message traffic."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
handler = MagicMock()
|
||||
subscriber.register_handler("contact_msg_recv", handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 1,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"sourceHash": "7CAF1337A58D",
|
||||
"decrypted": {
|
||||
"message": "hello dm",
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "1",
|
||||
"hash": "ABABAB1234",
|
||||
"raw": "010203",
|
||||
},
|
||||
)
|
||||
|
||||
handler.assert_called_once()
|
||||
public_key, event_type, payload, _db = handler.call_args.args
|
||||
assert public_key == "a" * 64
|
||||
assert event_type == "contact_msg_recv"
|
||||
assert payload["text"] == "hello dm"
|
||||
assert payload["pubkey_prefix"] == "7CAF1337A58D"
|
||||
|
||||
def test_letsmesh_decoder_sender_name_prefixes_message_text(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Non-hex decoder sender names are rendered as `Name: Message`."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
handler = MagicMock()
|
||||
subscriber.register_handler("channel_msg_recv", handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 5,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"channelHash": "D9",
|
||||
"decrypted": {
|
||||
"sender": "Stephenbarz",
|
||||
"message": "hello mesh",
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "5",
|
||||
"hash": "FEEDC0DE",
|
||||
"raw": "AABBCC",
|
||||
},
|
||||
)
|
||||
|
||||
handler.assert_called_once()
|
||||
_public_key, event_type, payload, _db = handler.call_args.args
|
||||
assert event_type == "channel_msg_recv"
|
||||
assert payload["text"] == "Stephenbarz: hello mesh"
|
||||
assert payload["channel_idx"] == 217
|
||||
assert "pubkey_prefix" not in payload
|
||||
|
||||
def test_letsmesh_packet_type_4_maps_to_advertisement_with_location(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Decoder packet type 4 is mapped to advertisement with GPS coordinates."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
handler = MagicMock()
|
||||
subscriber.register_handler("advertisement", handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 4,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"type": 4,
|
||||
"publicKey": "B" * 64,
|
||||
"appData": {
|
||||
"flags": 146,
|
||||
"deviceRole": 2,
|
||||
"location": {
|
||||
"latitude": 42.470001,
|
||||
"longitude": -71.330001,
|
||||
},
|
||||
"name": "Concord Attic G2",
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "4",
|
||||
"hash": "A1B2C3D4",
|
||||
"raw": "010203",
|
||||
},
|
||||
)
|
||||
|
||||
handler.assert_called_once()
|
||||
public_key, event_type, payload, _db = handler.call_args.args
|
||||
assert public_key == "a" * 64
|
||||
assert event_type == "advertisement"
|
||||
assert payload["public_key"] == "B" * 64
|
||||
assert payload["name"] == "Concord Attic G2"
|
||||
assert payload["adv_type"] == "repeater"
|
||||
assert payload["flags"] == 146
|
||||
assert payload["lat"] == 42.470001
|
||||
assert payload["lon"] == -71.330001
|
||||
|
||||
def test_letsmesh_packet_type_11_maps_to_contact(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Decoder packet type 11 is mapped to native contact events."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
contact_handler = MagicMock()
|
||||
advert_handler = MagicMock()
|
||||
subscriber.register_handler("contact", contact_handler)
|
||||
subscriber.register_handler("advertisement", advert_handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 11,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"type": 11,
|
||||
"publicKey": "C" * 64,
|
||||
"nodeType": 2,
|
||||
"nodeTypeName": "Repeater",
|
||||
"rawFlags": 146,
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "11",
|
||||
"hash": "E5F6A7B8",
|
||||
"raw": "040506",
|
||||
},
|
||||
)
|
||||
|
||||
advert_handler.assert_not_called()
|
||||
contact_handler.assert_called_once()
|
||||
_public_key, event_type, payload, _db = contact_handler.call_args.args
|
||||
assert event_type == "contact"
|
||||
assert payload["public_key"] == "C" * 64
|
||||
assert payload["type"] == 2
|
||||
assert payload["flags"] == 146
|
||||
|
||||
def test_letsmesh_packet_type_9_maps_to_trace_data(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Decoder packet type 9 is mapped to native trace_data events."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
trace_handler = MagicMock()
|
||||
subscriber.register_handler("trace_data", trace_handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 9,
|
||||
"pathLength": 4,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"type": 9,
|
||||
"traceTag": "DF9D7A20",
|
||||
"authCode": 0,
|
||||
"flags": 0,
|
||||
"pathHashes": ["71", "0B", "24", "0B"],
|
||||
"snrValues": [12.5, 11.5, 10, 6.25],
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "9",
|
||||
"hash": "99887766",
|
||||
"raw": "ABCDEF",
|
||||
},
|
||||
)
|
||||
|
||||
trace_handler.assert_called_once()
|
||||
_public_key, event_type, payload, _db = trace_handler.call_args.args
|
||||
assert event_type == "trace_data"
|
||||
assert payload["initiator_tag"] == int("DF9D7A20", 16)
|
||||
assert payload["path_hashes"] == ["71", "0B", "24", "0B"]
|
||||
assert payload["hop_count"] == 4
|
||||
assert payload["snr_values"] == [12.5, 11.5, 10.0, 6.25]
|
||||
|
||||
def test_letsmesh_packet_type_8_maps_to_path_updated(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Decoder packet type 8 is mapped to native path_updated events."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
path_handler = MagicMock()
|
||||
packet_handler = MagicMock()
|
||||
subscriber.register_handler("path_updated", path_handler)
|
||||
subscriber.register_handler("letsmesh_packet", packet_handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 8,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"type": 8,
|
||||
"isValid": True,
|
||||
"pathLength": 2,
|
||||
"pathHashes": ["AA", "BB"],
|
||||
"extraType": 244,
|
||||
"extraData": "D" * 64,
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "8",
|
||||
"hash": "99887766",
|
||||
"raw": "ABCDEF",
|
||||
},
|
||||
)
|
||||
|
||||
packet_handler.assert_not_called()
|
||||
path_handler.assert_called_once()
|
||||
_public_key, event_type, payload, _db = path_handler.call_args.args
|
||||
assert event_type == "path_updated"
|
||||
assert payload["hop_count"] == 2
|
||||
assert payload["path_hashes"] == ["AA", "BB"]
|
||||
assert payload["extra_type"] == 244
|
||||
assert payload["node_public_key"] == "D" * 64
|
||||
|
||||
def test_letsmesh_packet_fallback_logs_decoded_payload(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Unmapped packets include decoder output in letsmesh_packet payload."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
packet_handler = MagicMock()
|
||||
subscriber.register_handler("letsmesh_packet", packet_handler)
|
||||
subscriber.start()
|
||||
|
||||
decoded_packet = {
|
||||
"payloadType": 10,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"type": 10,
|
||||
"isValid": True,
|
||||
}
|
||||
},
|
||||
}
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value=decoded_packet,
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "10",
|
||||
"hash": "99887766",
|
||||
"raw": "ABCDEF",
|
||||
},
|
||||
)
|
||||
|
||||
packet_handler.assert_called_once()
|
||||
_public_key, event_type, payload, _db = packet_handler.call_args.args
|
||||
assert event_type == "letsmesh_packet"
|
||||
assert payload["decoded_payload_type"] == 10
|
||||
assert payload["decoded_packet"] == decoded_packet
|
||||
|
||||
def test_letsmesh_packet_sender_fallback_from_payload_fields(
|
||||
self, mock_mqtt_client, db_manager
|
||||
) -> None:
|
||||
"""Sender prefix falls back to payload sourceHash when decoder has no sender."""
|
||||
mock_mqtt_client.topic_builder.parse_letsmesh_upload_topic.return_value = (
|
||||
"a" * 64,
|
||||
"packets",
|
||||
)
|
||||
subscriber = Subscriber(
|
||||
mock_mqtt_client,
|
||||
db_manager,
|
||||
ingest_mode="letsmesh_upload",
|
||||
)
|
||||
handler = MagicMock()
|
||||
subscriber.register_handler("channel_msg_recv", handler)
|
||||
subscriber.start()
|
||||
|
||||
with patch.object(
|
||||
subscriber._letsmesh_decoder,
|
||||
"decode_payload",
|
||||
return_value={
|
||||
"payloadType": 5,
|
||||
"payload": {
|
||||
"decoded": {
|
||||
"decrypted": {
|
||||
"message": "hello from payload sender",
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
subscriber._handle_mqtt_message(
|
||||
topic=f"meshcore/BOS/{'a' * 64}/packets",
|
||||
pattern="meshcore/BOS/+/packets",
|
||||
payload={
|
||||
"packet_type": "5",
|
||||
"hash": "ABABAB1234",
|
||||
"sourceHash": "1A2B3C4D5E6F",
|
||||
"raw": "010203",
|
||||
},
|
||||
)
|
||||
|
||||
handler.assert_called_once()
|
||||
_public_key, _event_type, payload, _db = handler.call_args.args
|
||||
assert payload["text"] == "hello from payload sender"
|
||||
assert payload["pubkey_prefix"] == "1A2B3C4D5E6F"
|
||||
|
||||
|
||||
class TestCreateSubscriber:
|
||||
"""Tests for create_subscriber factory function."""
|
||||
|
||||
@@ -18,17 +18,6 @@ class TestCommonSettings:
|
||||
|
||||
assert settings.data_home == "/custom/data"
|
||||
|
||||
def test_websocket_transport_settings(self) -> None:
|
||||
"""Test MQTT websocket transport settings."""
|
||||
settings = CommonSettings(
|
||||
_env_file=None,
|
||||
mqtt_transport="websockets",
|
||||
mqtt_ws_path="/",
|
||||
)
|
||||
|
||||
assert settings.mqtt_transport.value == "websockets"
|
||||
assert settings.mqtt_ws_path == "/"
|
||||
|
||||
|
||||
class TestInterfaceSettings:
|
||||
"""Tests for InterfaceSettings."""
|
||||
@@ -74,28 +63,6 @@ class TestCollectorSettings:
|
||||
assert settings.node_tags_file == "/seed/data/node_tags.yaml"
|
||||
assert settings.members_file == "/seed/data/members.yaml"
|
||||
|
||||
def test_collector_ingest_mode_letsmesh_upload(self) -> None:
|
||||
"""Test collector ingest mode can be set to LetsMesh upload."""
|
||||
settings = CollectorSettings(
|
||||
_env_file=None,
|
||||
collector_ingest_mode="letsmesh_upload",
|
||||
)
|
||||
|
||||
assert settings.collector_ingest_mode.value == "letsmesh_upload"
|
||||
|
||||
def test_collector_letsmesh_decoder_keys_list(self) -> None:
|
||||
"""LetsMesh decoder keys are parsed from comma/space-separated env values."""
|
||||
settings = CollectorSettings(
|
||||
_env_file=None,
|
||||
collector_letsmesh_decoder_keys="aa11, bb22 cc33",
|
||||
)
|
||||
|
||||
assert settings.collector_letsmesh_decoder_keys_list == [
|
||||
"aa11",
|
||||
"bb22",
|
||||
"cc33",
|
||||
]
|
||||
|
||||
|
||||
class TestAPISettings:
|
||||
"""Tests for APISettings."""
|
||||
@@ -125,11 +92,3 @@ class TestWebSettings:
|
||||
settings = WebSettings(_env_file=None, data_home="/custom/data")
|
||||
|
||||
assert settings.web_data_dir == "/custom/data/web"
|
||||
|
||||
def test_web_datetime_locale_default_and_override(self) -> None:
|
||||
"""Date formatting locale has sensible default and can be overridden."""
|
||||
default_settings = WebSettings(_env_file=None)
|
||||
custom_settings = WebSettings(_env_file=None, web_datetime_locale="en-GB")
|
||||
|
||||
assert default_settings.web_datetime_locale == "en-US"
|
||||
assert custom_settings.web_datetime_locale == "en-GB"
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
"""Tests for MQTT topic parsing utilities."""
|
||||
|
||||
from meshcore_hub.common.mqtt import TopicBuilder
|
||||
|
||||
|
||||
class TestTopicBuilder:
|
||||
"""Tests for MQTT topic builder parsing helpers."""
|
||||
|
||||
def test_parse_event_topic_with_single_segment_prefix(self) -> None:
|
||||
"""Event topics are parsed correctly with a simple prefix."""
|
||||
builder = TopicBuilder(prefix="meshcore")
|
||||
|
||||
parsed = builder.parse_event_topic(
|
||||
"meshcore/ABCDEF1234567890/event/advertisement"
|
||||
)
|
||||
|
||||
assert parsed == ("ABCDEF1234567890", "advertisement")
|
||||
|
||||
def test_parse_event_topic_with_multi_segment_prefix(self) -> None:
|
||||
"""Event topics are parsed correctly with a slash-delimited prefix."""
|
||||
builder = TopicBuilder(prefix="meshcore/BOS")
|
||||
|
||||
parsed = builder.parse_event_topic(
|
||||
"meshcore/BOS/ABCDEF1234567890/event/channel_msg_recv"
|
||||
)
|
||||
|
||||
assert parsed == ("ABCDEF1234567890", "channel_msg_recv")
|
||||
|
||||
def test_parse_command_topic_with_multi_segment_prefix(self) -> None:
|
||||
"""Command topics are parsed correctly with a slash-delimited prefix."""
|
||||
builder = TopicBuilder(prefix="meshcore/BOS")
|
||||
|
||||
parsed = builder.parse_command_topic(
|
||||
"meshcore/BOS/ABCDEF123456/command/send_msg"
|
||||
)
|
||||
|
||||
assert parsed == ("ABCDEF123456", "send_msg")
|
||||
|
||||
def test_parse_letsmesh_upload_topic(self) -> None:
|
||||
"""LetsMesh upload topics map to public key and feed type."""
|
||||
builder = TopicBuilder(prefix="meshcore/BOS")
|
||||
|
||||
parsed = builder.parse_letsmesh_upload_topic(
|
||||
"meshcore/BOS/ABCDEF1234567890/status"
|
||||
)
|
||||
|
||||
assert parsed == ("ABCDEF1234567890", "status")
|
||||
|
||||
def test_parse_letsmesh_upload_topic_rejects_unknown_feed(self) -> None:
|
||||
"""Unknown LetsMesh feed topics are rejected."""
|
||||
builder = TopicBuilder(prefix="meshcore/BOS")
|
||||
|
||||
parsed = builder.parse_letsmesh_upload_topic(
|
||||
"meshcore/BOS/ABCDEF1234567890/something_else"
|
||||
)
|
||||
|
||||
assert parsed is None
|
||||
@@ -59,22 +59,6 @@ def auth_headers() -> dict:
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers_basic() -> dict[str, str]:
|
||||
"""Basic auth header forwarded by reverse proxy."""
|
||||
return {
|
||||
"Authorization": "Basic dGVzdDp0ZXN0",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers_auth_request() -> dict[str, str]:
|
||||
"""Auth-request style header from upstream proxy."""
|
||||
return {
|
||||
"X-Auth-Request-User": "test-user-id",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def admin_client(admin_app: Any, mock_http_client: MockHttpClient) -> TestClient:
|
||||
"""Create a test client with admin enabled."""
|
||||
@@ -129,34 +113,6 @@ class TestAdminHome:
|
||||
|
||||
assert config["is_authenticated"] is True
|
||||
|
||||
def test_admin_home_config_authenticated_with_basic_auth(
|
||||
self, admin_client, auth_headers_basic
|
||||
):
|
||||
"""Test admin config shows is_authenticated: true with basic auth header."""
|
||||
response = admin_client.get("/a/", headers=auth_headers_basic)
|
||||
text = response.text
|
||||
config_start = text.find("window.__APP_CONFIG__ = ") + len(
|
||||
"window.__APP_CONFIG__ = "
|
||||
)
|
||||
config_end = text.find(";", config_start)
|
||||
config = json.loads(text[config_start:config_end])
|
||||
|
||||
assert config["is_authenticated"] is True
|
||||
|
||||
def test_admin_home_config_authenticated_with_auth_request_header(
|
||||
self, admin_client, auth_headers_auth_request
|
||||
):
|
||||
"""Test admin config shows is_authenticated with X-Auth-Request-User."""
|
||||
response = admin_client.get("/a/", headers=auth_headers_auth_request)
|
||||
text = response.text
|
||||
config_start = text.find("window.__APP_CONFIG__ = ") + len(
|
||||
"window.__APP_CONFIG__ = "
|
||||
)
|
||||
config_end = text.find(";", config_start)
|
||||
config = json.loads(text[config_start:config_end])
|
||||
|
||||
assert config["is_authenticated"] is True
|
||||
|
||||
def test_admin_home_disabled_returns_spa_shell(
|
||||
self, admin_client_disabled, auth_headers
|
||||
):
|
||||
@@ -292,18 +248,6 @@ class TestAdminApiProxyAuth:
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
def test_proxy_post_allowed_with_basic_auth(
|
||||
self, admin_client, auth_headers_basic, mock_http_client
|
||||
):
|
||||
"""POST to API proxy succeeds with basic auth header."""
|
||||
mock_http_client.set_response("POST", "/api/v1/members", 201, {"id": "new"})
|
||||
response = admin_client.post(
|
||||
"/api/v1/members",
|
||||
json={"name": "Test", "member_id": "test"},
|
||||
headers=auth_headers_basic,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
def test_proxy_put_allowed_with_auth(
|
||||
self, admin_client, auth_headers, mock_http_client
|
||||
):
|
||||
|
||||
@@ -1,279 +0,0 @@
|
||||
"""Tests for web app: config JSON escaping and trusted proxy hosts warnings."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from meshcore_hub.web.app import _build_config_json, create_app
|
||||
|
||||
from .conftest import ALL_FEATURES_ENABLED, MockHttpClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def xss_app(mock_http_client: MockHttpClient) -> Any:
|
||||
"""Create a web app with a network name containing a script injection payload."""
|
||||
app = create_app(
|
||||
api_url="http://localhost:8000",
|
||||
api_key="test-api-key",
|
||||
network_name="</script><script>alert(1)</script>",
|
||||
network_city="Test City",
|
||||
network_country="Test Country",
|
||||
network_radio_config="Test Radio Config",
|
||||
network_contact_email="test@example.com",
|
||||
features=ALL_FEATURES_ENABLED,
|
||||
)
|
||||
app.state.http_client = mock_http_client
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def xss_client(xss_app: Any, mock_http_client: MockHttpClient) -> TestClient:
|
||||
"""Create a test client whose network_name contains a script injection payload."""
|
||||
xss_app.state.http_client = mock_http_client
|
||||
return TestClient(xss_app, raise_server_exceptions=True)
|
||||
|
||||
|
||||
class TestConfigJsonXssEscaping:
|
||||
"""Tests that _build_config_json escapes </script> to prevent XSS breakout."""
|
||||
|
||||
def test_script_tag_escaped_in_rendered_html(self, xss_client: TestClient) -> None:
|
||||
"""Config value containing </script> is escaped to <\\/script> in the HTML."""
|
||||
response = xss_client.get("/")
|
||||
assert response.status_code == 200
|
||||
|
||||
html = response.text
|
||||
|
||||
# The literal "</script>" must NOT appear inside the config JSON block.
|
||||
# Find the config JSON assignment to isolate the embedded block.
|
||||
config_marker = "window.__APP_CONFIG__ = "
|
||||
start = html.find(config_marker)
|
||||
assert start != -1, "Config JSON block not found in rendered HTML"
|
||||
start += len(config_marker)
|
||||
end = html.find(";", start)
|
||||
config_block = html[start:end]
|
||||
|
||||
# The raw closing tag must be escaped
|
||||
assert "</script>" not in config_block
|
||||
assert "<\\/script>" in config_block
|
||||
|
||||
def test_normal_config_values_unaffected(self, client: TestClient) -> None:
|
||||
"""Config values without special characters render unchanged."""
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
|
||||
html = response.text
|
||||
config_marker = "window.__APP_CONFIG__ = "
|
||||
start = html.find(config_marker)
|
||||
assert start != -1
|
||||
start += len(config_marker)
|
||||
end = html.find(";", start)
|
||||
config_block = html[start:end]
|
||||
|
||||
config = json.loads(config_block)
|
||||
assert config["network_name"] == "Test Network"
|
||||
assert config["network_city"] == "Test City"
|
||||
assert config["network_country"] == "Test Country"
|
||||
|
||||
def test_escaped_json_is_parseable(self, xss_client: TestClient) -> None:
|
||||
"""The escaped JSON is still valid and parseable by json.loads."""
|
||||
response = xss_client.get("/")
|
||||
assert response.status_code == 200
|
||||
|
||||
html = response.text
|
||||
config_marker = "window.__APP_CONFIG__ = "
|
||||
start = html.find(config_marker)
|
||||
assert start != -1
|
||||
start += len(config_marker)
|
||||
end = html.find(";", start)
|
||||
config_block = html[start:end]
|
||||
|
||||
# json.loads handles <\/ sequences correctly (they are valid JSON)
|
||||
config = json.loads(config_block)
|
||||
assert isinstance(config, dict)
|
||||
# The parsed value should contain the original unescaped string
|
||||
assert config["network_name"] == "</script><script>alert(1)</script>"
|
||||
|
||||
def test_build_config_json_direct_escaping(self, web_app: Any) -> None:
|
||||
"""Calling _build_config_json directly escapes </ sequences."""
|
||||
from starlette.requests import Request
|
||||
|
||||
# Inject a malicious value into the app state
|
||||
web_app.state.network_name = "</script><script>alert(1)</script>"
|
||||
|
||||
scope = {
|
||||
"type": "http",
|
||||
"method": "GET",
|
||||
"path": "/",
|
||||
"query_string": b"",
|
||||
"headers": [],
|
||||
}
|
||||
request = Request(scope)
|
||||
|
||||
result = _build_config_json(web_app, request)
|
||||
|
||||
# Raw output must not contain literal "</script>"
|
||||
assert "</script>" not in result
|
||||
assert "<\\/script>" in result
|
||||
|
||||
# Result must still be valid JSON
|
||||
parsed = json.loads(result)
|
||||
assert parsed["network_name"] == "</script><script>alert(1)</script>"
|
||||
|
||||
def test_build_config_json_no_escaping_needed(self, web_app: Any) -> None:
|
||||
"""_build_config_json leaves normal values intact when no </ present."""
|
||||
from starlette.requests import Request
|
||||
|
||||
scope = {
|
||||
"type": "http",
|
||||
"method": "GET",
|
||||
"path": "/",
|
||||
"query_string": b"",
|
||||
"headers": [],
|
||||
}
|
||||
request = Request(scope)
|
||||
|
||||
result = _build_config_json(web_app, request)
|
||||
|
||||
# No escaping artifacts for normal values
|
||||
assert "<\\/" not in result
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert parsed["network_name"] == "Test Network"
|
||||
assert parsed["network_city"] == "Test City"
|
||||
|
||||
|
||||
class TestTrustedProxyHostsWarning:
|
||||
"""Tests for trusted proxy hosts startup warning in create_app."""
|
||||
|
||||
def test_warning_logged_when_admin_enabled_and_wildcard_hosts(
|
||||
self, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""A warning is logged when WEB_ADMIN_ENABLED=true and WEB_TRUSTED_PROXY_HOSTS is '*'."""
|
||||
with patch("meshcore_hub.common.config.get_web_settings") as mock_get_settings:
|
||||
from meshcore_hub.common.config import WebSettings
|
||||
|
||||
settings = WebSettings(
|
||||
_env_file=None,
|
||||
web_admin_enabled=True,
|
||||
web_trusted_proxy_hosts="*",
|
||||
)
|
||||
mock_get_settings.return_value = settings
|
||||
|
||||
with caplog.at_level(logging.WARNING, logger="meshcore_hub.web.app"):
|
||||
create_app(
|
||||
api_url="http://localhost:8000",
|
||||
admin_enabled=True,
|
||||
features=ALL_FEATURES_ENABLED,
|
||||
)
|
||||
|
||||
assert any(
|
||||
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*'" in msg
|
||||
for msg in caplog.messages
|
||||
), f"Expected warning not found in log messages: {caplog.messages}"
|
||||
|
||||
def test_no_warning_when_trusted_proxy_hosts_is_specific(
|
||||
self, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""No warning is logged when WEB_TRUSTED_PROXY_HOSTS is set to a specific value."""
|
||||
with patch("meshcore_hub.common.config.get_web_settings") as mock_get_settings:
|
||||
from meshcore_hub.common.config import WebSettings
|
||||
|
||||
settings = WebSettings(
|
||||
_env_file=None,
|
||||
web_admin_enabled=True,
|
||||
web_trusted_proxy_hosts="10.0.0.1",
|
||||
)
|
||||
mock_get_settings.return_value = settings
|
||||
|
||||
with caplog.at_level(logging.WARNING, logger="meshcore_hub.web.app"):
|
||||
create_app(
|
||||
api_url="http://localhost:8000",
|
||||
admin_enabled=True,
|
||||
features=ALL_FEATURES_ENABLED,
|
||||
)
|
||||
|
||||
assert not any(
|
||||
"WEB_TRUSTED_PROXY_HOSTS" in msg for msg in caplog.messages
|
||||
), f"Unexpected warning found in log messages: {caplog.messages}"
|
||||
|
||||
def test_no_warning_when_admin_disabled(
|
||||
self, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""No warning is logged when WEB_ADMIN_ENABLED is false even with wildcard hosts."""
|
||||
with patch("meshcore_hub.common.config.get_web_settings") as mock_get_settings:
|
||||
from meshcore_hub.common.config import WebSettings
|
||||
|
||||
settings = WebSettings(
|
||||
_env_file=None,
|
||||
web_admin_enabled=False,
|
||||
web_trusted_proxy_hosts="*",
|
||||
)
|
||||
mock_get_settings.return_value = settings
|
||||
|
||||
with caplog.at_level(logging.WARNING, logger="meshcore_hub.web.app"):
|
||||
create_app(
|
||||
api_url="http://localhost:8000",
|
||||
features=ALL_FEATURES_ENABLED,
|
||||
)
|
||||
|
||||
assert not any(
|
||||
"WEB_TRUSTED_PROXY_HOSTS" in msg for msg in caplog.messages
|
||||
), f"Unexpected warning found in log messages: {caplog.messages}"
|
||||
|
||||
def test_proxy_hosts_comma_list_parsed_correctly(self) -> None:
|
||||
"""A comma-separated WEB_TRUSTED_PROXY_HOSTS is split into a list for middleware."""
|
||||
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
|
||||
|
||||
with patch("meshcore_hub.common.config.get_web_settings") as mock_get_settings:
|
||||
from meshcore_hub.common.config import WebSettings
|
||||
|
||||
settings = WebSettings(
|
||||
_env_file=None,
|
||||
web_trusted_proxy_hosts="10.0.0.1, 10.0.0.2, 172.16.0.1",
|
||||
)
|
||||
mock_get_settings.return_value = settings
|
||||
|
||||
app = create_app(
|
||||
api_url="http://localhost:8000",
|
||||
features=ALL_FEATURES_ENABLED,
|
||||
)
|
||||
|
||||
# Find the ProxyHeadersMiddleware entry in app.user_middleware
|
||||
proxy_entries = [
|
||||
m for m in app.user_middleware if m.cls is ProxyHeadersMiddleware
|
||||
]
|
||||
assert len(proxy_entries) == 1, "ProxyHeadersMiddleware not found in middleware"
|
||||
assert proxy_entries[0].kwargs["trusted_hosts"] == [
|
||||
"10.0.0.1",
|
||||
"10.0.0.2",
|
||||
"172.16.0.1",
|
||||
]
|
||||
|
||||
def test_wildcard_proxy_hosts_passed_as_string(self) -> None:
|
||||
"""Wildcard WEB_TRUSTED_PROXY_HOSTS='*' is passed as a string to middleware."""
|
||||
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
|
||||
|
||||
with patch("meshcore_hub.common.config.get_web_settings") as mock_get_settings:
|
||||
from meshcore_hub.common.config import WebSettings
|
||||
|
||||
settings = WebSettings(
|
||||
_env_file=None,
|
||||
web_trusted_proxy_hosts="*",
|
||||
)
|
||||
mock_get_settings.return_value = settings
|
||||
|
||||
app = create_app(
|
||||
api_url="http://localhost:8000",
|
||||
features=ALL_FEATURES_ENABLED,
|
||||
)
|
||||
|
||||
# Find the ProxyHeadersMiddleware entry in app.user_middleware
|
||||
proxy_entries = [
|
||||
m for m in app.user_middleware if m.cls is ProxyHeadersMiddleware
|
||||
]
|
||||
assert len(proxy_entries) == 1, "ProxyHeadersMiddleware not found in middleware"
|
||||
assert proxy_entries[0].kwargs["trusted_hosts"] == "*"
|
||||
@@ -88,17 +88,3 @@ class TestMessagesConfig:
|
||||
config = json.loads(text[config_start:config_end])
|
||||
|
||||
assert config["network_name"] == "Test Network"
|
||||
assert config["datetime_locale"] == "en-US"
|
||||
|
||||
def test_messages_config_has_channel_labels(self, client: TestClient) -> None:
|
||||
"""Test that SPA config includes known channel labels."""
|
||||
response = client.get("/messages")
|
||||
text = response.text
|
||||
config_start = text.find("window.__APP_CONFIG__ = ") + len(
|
||||
"window.__APP_CONFIG__ = "
|
||||
)
|
||||
config_end = text.find(";", config_start)
|
||||
config = json.loads(text[config_start:config_end])
|
||||
|
||||
assert config["channel_labels"]["17"] == "Public"
|
||||
assert config["channel_labels"]["217"] == "#test"
|
||||
|
||||
Reference in New Issue
Block a user