mirror of
https://github.com/ipnet-mesh/meshcore-hub.git
synced 2026-03-28 17:42:56 +01:00
Compare commits
222 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
92ff1ab306 | ||
|
|
0e2a24caa6 | ||
|
|
ff36a991af | ||
|
|
fa1a2ecc17 | ||
|
|
9099ffb0cb | ||
|
|
f8219b4626 | ||
|
|
27b78d6904 | ||
|
|
d4c3e127a2 | ||
|
|
92e9ccdbfa | ||
|
|
29b5820ed1 | ||
|
|
889aa32e3a | ||
|
|
3c3873951d | ||
|
|
4b58160f31 | ||
|
|
a32255e110 | ||
|
|
59a1898824 | ||
|
|
9256f8375d | ||
|
|
e9b25c1ca7 | ||
|
|
749bed6d5b | ||
|
|
97539cb960 | ||
|
|
c418959e5d | ||
|
|
14fac89f49 | ||
|
|
8201be5a39 | ||
|
|
17fa2f1005 | ||
|
|
535186efb1 | ||
|
|
fa1db5e709 | ||
|
|
840b8636a2 | ||
|
|
cb305083e7 | ||
|
|
d475a12292 | ||
|
|
53f0ce7225 | ||
|
|
90268e9b98 | ||
|
|
18edcfe9bf | ||
|
|
2a380f88b4 | ||
|
|
c22274c4e5 | ||
|
|
54449aa5fb | ||
|
|
15556c3eb9 | ||
|
|
6a66eab663 | ||
|
|
2f40b4a730 | ||
|
|
3eff7f03db | ||
|
|
905ea0190b | ||
|
|
86cc7edca3 | ||
|
|
eb3f8508b7 | ||
|
|
74a34fdcba | ||
|
|
175fc8c524 | ||
|
|
2a153a5239 | ||
|
|
de85e0cd7a | ||
|
|
5a20da3afa | ||
|
|
dcd33711db | ||
|
|
a8cb20fea5 | ||
|
|
3ac5667d7a | ||
|
|
c8c53b25bd | ||
|
|
e4a1b005dc | ||
|
|
27adc6e2de | ||
|
|
835fb1c094 | ||
|
|
d7a351a803 | ||
|
|
317627833c | ||
|
|
f4514d1150 | ||
|
|
7be5f6afdf | ||
|
|
54695ab9e2 | ||
|
|
189eb3a139 | ||
|
|
96ca6190db | ||
|
|
baf08a9545 | ||
|
|
1d3e649ce0 | ||
|
|
45abc66816 | ||
|
|
9c8eb27455 | ||
|
|
e6c6d4aecc | ||
|
|
19bb06953e | ||
|
|
1f55d912ea | ||
|
|
5272a72647 | ||
|
|
b2f8e18f13 | ||
|
|
a15e91c754 | ||
|
|
85129e528e | ||
|
|
127cd7adf6 | ||
|
|
91b3f1926f | ||
|
|
3ef94a21df | ||
|
|
19e724fcc8 | ||
|
|
7b7910b42e | ||
|
|
c711a0eb9b | ||
|
|
dcd7ed248d | ||
|
|
b0ea6bcc0e | ||
|
|
7ef41a3671 | ||
|
|
a7611dd8d4 | ||
|
|
8f907edce6 | ||
|
|
95d1b260ab | ||
|
|
fba2656268 | ||
|
|
69adca09e3 | ||
|
|
9c2a0527ff | ||
|
|
c0db5b1da5 | ||
|
|
77dcbb77ba | ||
|
|
5bf0265fd9 | ||
|
|
1adef40fdc | ||
|
|
c9beb7e801 | ||
|
|
cd14c23cf2 | ||
|
|
708bfd1811 | ||
|
|
afdc76e546 | ||
|
|
e07b9ee2ab | ||
|
|
00851bfcaa | ||
|
|
6a035e41c0 | ||
|
|
2ffc78fda2 | ||
|
|
3f341a4031 | ||
|
|
1ea729bd51 | ||
|
|
d329f67ba8 | ||
|
|
c42a2deffb | ||
|
|
dfa4157c9c | ||
|
|
b52fd32106 | ||
|
|
4bbf43a078 | ||
|
|
deae9c67fe | ||
|
|
ceee27a3af | ||
|
|
f478096bc2 | ||
|
|
8ae94a7763 | ||
|
|
fb6cc6f5a9 | ||
|
|
a98b295618 | ||
|
|
da512c0d9f | ||
|
|
652486aa15 | ||
|
|
947c12bfe1 | ||
|
|
e80cd3a83c | ||
|
|
70ecb5e4da | ||
|
|
565e0ffc7b | ||
|
|
bdc3b867ea | ||
|
|
48786a18f9 | ||
|
|
706c32ae01 | ||
|
|
bafc16d746 | ||
|
|
9b09e32d41 | ||
|
|
2b9f83e55e | ||
|
|
75c1966385 | ||
|
|
3089ff46a8 | ||
|
|
f1bceb5780 | ||
|
|
caf88bdba1 | ||
|
|
9eb1acfc02 | ||
|
|
62e0568646 | ||
|
|
b4da93e4f0 | ||
|
|
981402f7aa | ||
|
|
76717179c2 | ||
|
|
f42987347e | ||
|
|
25831f14e6 | ||
|
|
0e6cbc8094 | ||
|
|
76630f0bb0 | ||
|
|
8fbac2cbd6 | ||
|
|
fcac5e01dc | ||
|
|
b6f3b2d864 | ||
|
|
7de6520ae7 | ||
|
|
5b8b2eda10 | ||
|
|
042a1b04fa | ||
|
|
5832cbf53a | ||
|
|
c540e15432 | ||
|
|
6b1b277c6c | ||
|
|
470c374f11 | ||
|
|
71859b2168 | ||
|
|
3d7ed53df3 | ||
|
|
ceaef9178a | ||
|
|
5ccb077188 | ||
|
|
8f660d6b94 | ||
|
|
6e40be6487 | ||
|
|
d79e29bc0a | ||
|
|
2758cf4dd5 | ||
|
|
f37e993ede | ||
|
|
b18b3c9aa4 | ||
|
|
9d99262401 | ||
|
|
adfe5bc503 | ||
|
|
deaab9b9de | ||
|
|
95636ef580 | ||
|
|
5831592f88 | ||
|
|
bc7bff8b82 | ||
|
|
9445d2150c | ||
|
|
3e9f478a65 | ||
|
|
6656bd8214 | ||
|
|
0f50bf4a41 | ||
|
|
99206f7467 | ||
|
|
3a89daa9c0 | ||
|
|
86c5ff8f1c | ||
|
|
59d0edc96f | ||
|
|
b01611e0e8 | ||
|
|
1e077f50f7 | ||
|
|
09146a2e94 | ||
|
|
56487597b7 | ||
|
|
de968f397d | ||
|
|
3ca5284c11 | ||
|
|
75d7e5bdfa | ||
|
|
927fcd6efb | ||
|
|
3132d296bb | ||
|
|
96e4215c29 | ||
|
|
fd3c3171ce | ||
|
|
345ffd219b | ||
|
|
9661b22390 | ||
|
|
31aa48c9a0 | ||
|
|
1a3649b3be | ||
|
|
33649a065b | ||
|
|
fd582bda35 | ||
|
|
c42b26c8f3 | ||
|
|
d52163949a | ||
|
|
ca101583f0 | ||
|
|
4af0f2ea80 | ||
|
|
0b3ac64845 | ||
|
|
3c7a8981ee | ||
|
|
238e28ae41 | ||
|
|
68d5049963 | ||
|
|
624fa458ac | ||
|
|
309d575fc0 | ||
|
|
f7b4df13a7 | ||
|
|
13bae5c8d7 | ||
|
|
8a6b4d8e88 | ||
|
|
b67e1b5b2b | ||
|
|
d4e3dc0399 | ||
|
|
7f0adfa6a7 | ||
|
|
94b03b49d9 | ||
|
|
20d75fe041 | ||
|
|
307f3935e0 | ||
|
|
6901bafb02 | ||
|
|
e595dc2b27 | ||
|
|
ed2cf09ff3 | ||
|
|
bec736a894 | ||
|
|
1457360703 | ||
|
|
d8a0f2abb8 | ||
|
|
367f838371 | ||
|
|
741dd3ce84 | ||
|
|
0a12f389df | ||
|
|
8240c2fd57 | ||
|
|
38f7fe291e | ||
|
|
e4087efbf0 | ||
|
|
3051984fb9 | ||
|
|
eea2c90ea4 | ||
|
|
d52c23fc29 | ||
|
|
a1fb71ce65 |
103
.agentmap.yaml
Normal file
103
.agentmap.yaml
Normal file
@@ -0,0 +1,103 @@
|
||||
# MeshCore Hub — codebase orientation map
|
||||
# See: https://github.com/anthropics/agentmap
|
||||
|
||||
meta:
|
||||
project: meshcore-hub
|
||||
version: 1
|
||||
updated: "2026-02-27"
|
||||
stack:
|
||||
- python 3.13
|
||||
- fastapi
|
||||
- sqlalchemy (async)
|
||||
- paho-mqtt
|
||||
- click
|
||||
- lit-html SPA
|
||||
- tailwind + daisyui
|
||||
- sqlite
|
||||
|
||||
tasks:
|
||||
install: "pip install -e '.[dev]'"
|
||||
test: "pytest"
|
||||
run: "meshcore-hub api --reload"
|
||||
lint: "pre-commit run --all-files"
|
||||
|
||||
tree:
|
||||
src/meshcore_hub/:
|
||||
__main__.py: "Click CLI entry point, registers subcommands"
|
||||
common/:
|
||||
config.py: "pydantic-settings, all env vars [config]"
|
||||
database.py: "async SQLAlchemy session management"
|
||||
mqtt.py: "MQTT client helpers"
|
||||
i18n.py: "translation loader, t() function"
|
||||
models/:
|
||||
base.py: "Base, UUIDMixin, TimestampMixin"
|
||||
node.py: null
|
||||
member.py: null
|
||||
advertisement.py: null
|
||||
message.py: null
|
||||
telemetry.py: null
|
||||
node_tag.py: null
|
||||
schemas/:
|
||||
events.py: "inbound MQTT event schemas"
|
||||
commands.py: "outbound command schemas"
|
||||
nodes.py: "API request/response schemas"
|
||||
members.py: null
|
||||
messages.py: null
|
||||
interface/:
|
||||
receiver.py: "reads device events, publishes to MQTT"
|
||||
sender.py: "subscribes MQTT commands, writes to device"
|
||||
device.py: "meshcore library wrapper"
|
||||
mock_device.py: "fake device for testing"
|
||||
collector/:
|
||||
subscriber.py: "MQTT subscriber, routes events to handlers"
|
||||
handlers/: "per-event-type DB persistence"
|
||||
cleanup.py: "data retention and node cleanup"
|
||||
webhook.py: "forward events to HTTP endpoints"
|
||||
tag_import.py: "seed node tags from YAML"
|
||||
member_import.py: "seed members from YAML"
|
||||
api/:
|
||||
app.py: "FastAPI app factory"
|
||||
auth.py: "API key authentication"
|
||||
dependencies.py: "DI for db session and auth"
|
||||
metrics.py: "Prometheus /metrics endpoint"
|
||||
routes/: "REST endpoints per resource"
|
||||
web/:
|
||||
app.py: "FastAPI app factory, SPA shell"
|
||||
pages.py: "custom markdown page loader"
|
||||
middleware.py: null
|
||||
templates/:
|
||||
spa.html: "single Jinja2 shell template"
|
||||
static/js/spa/:
|
||||
app.js: "SPA entry, route registration"
|
||||
router.js: "History API client-side router"
|
||||
api.js: "fetch wrapper for API calls"
|
||||
components.js: "shared lit-html helpers, t() re-export"
|
||||
icons.js: "SVG icon functions"
|
||||
pages/: "lazy-loaded page modules"
|
||||
alembic/: "DB migrations"
|
||||
etc/:
|
||||
prometheus/: "Prometheus scrape + alert rules"
|
||||
alertmanager/: null
|
||||
seed/: "YAML seed data (node_tags, members)"
|
||||
tests/:
|
||||
|
||||
key_symbols:
|
||||
- src/meshcore_hub/__main__.py::cli — Click root group [entry-point]
|
||||
- src/meshcore_hub/common/config.py::CommonSettings — shared env config base
|
||||
- src/meshcore_hub/common/database.py::DatabaseManager — async session factory
|
||||
- src/meshcore_hub/common/models/base.py::Base — declarative base for all models
|
||||
- src/meshcore_hub/api/app.py::create_app — API FastAPI factory
|
||||
- src/meshcore_hub/web/app.py::create_app — Web FastAPI factory
|
||||
- src/meshcore_hub/api/auth.py::require_read — read-key auth dependency
|
||||
- src/meshcore_hub/api/auth.py::require_admin — admin-key auth dependency
|
||||
- src/meshcore_hub/collector/subscriber.py::MQTTSubscriber — event ingestion loop
|
||||
- src/meshcore_hub/interface/receiver.py::Receiver — device→MQTT bridge
|
||||
- src/meshcore_hub/interface/sender.py::Sender — MQTT→device bridge
|
||||
|
||||
conventions:
|
||||
- four Click subcommands: interface, collector, api, web
|
||||
- "MQTT topic pattern: {prefix}/{pubkey}/event/{name} and .../command/{name}"
|
||||
- env config via pydantic-settings, no manual os.environ
|
||||
- web SPA: ES modules + lit-html, pages export async render()
|
||||
- i18n via t() with JSON locale files in static/locales/
|
||||
- node tags are freeform key-value pairs, standard keys in AGENTS.md
|
||||
60
.claude/commands/label-issue.md
Normal file
60
.claude/commands/label-issue.md
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
allowed-tools: Bash(gh label list:*),Bash(gh issue view:*),Bash(gh issue edit:*),Bash(gh search:*)
|
||||
description: Apply labels to GitHub issues
|
||||
---
|
||||
|
||||
You're an issue triage assistant for GitHub issues. Your task is to analyze the issue and select appropriate labels from the provided list.
|
||||
|
||||
IMPORTANT: Don't post any comments or messages to the issue. Your only action should be to apply labels.
|
||||
|
||||
Issue Information:
|
||||
|
||||
- REPO: ${{ github.repository }}
|
||||
- ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
|
||||
TASK OVERVIEW:
|
||||
|
||||
1. First, fetch the list of labels available in this repository by running: `gh label list`. Run exactly this command with nothing else.
|
||||
|
||||
2. Next, use gh commands to get context about the issue:
|
||||
|
||||
- Use `gh issue view ${{ github.event.issue.number }}` to retrieve the current issue's details
|
||||
- Use `gh search issues` to find similar issues that might provide context for proper categorization
|
||||
- You have access to these Bash commands:
|
||||
- Bash(gh label list:\*) - to get available labels
|
||||
- Bash(gh issue view:\*) - to view issue details
|
||||
- Bash(gh issue edit:\*) - to apply labels to the issue
|
||||
- Bash(gh search:\*) - to search for similar issues
|
||||
|
||||
3. Analyze the issue content, considering:
|
||||
|
||||
- The issue title and description
|
||||
- The type of issue (bug report, feature request, question, etc.)
|
||||
- Technical areas mentioned
|
||||
- Severity or priority indicators
|
||||
- User impact
|
||||
- Components affected
|
||||
|
||||
4. Select appropriate labels from the available labels list provided above:
|
||||
|
||||
- Choose labels that accurately reflect the issue's nature
|
||||
- Be specific but comprehensive
|
||||
- IMPORTANT: Add a priority label (P1, P2, or P3) based on the label descriptions from gh label list
|
||||
- Consider platform labels (android, ios) if applicable
|
||||
- If you find similar issues using gh search, consider using a "duplicate" label if appropriate. Only do so if the issue is a duplicate of another OPEN issue.
|
||||
|
||||
5. Apply the selected labels:
|
||||
- Use `gh issue edit` to apply your selected labels
|
||||
- DO NOT post any comments explaining your decision
|
||||
- DO NOT communicate directly with users
|
||||
- If no labels are clearly applicable, do not apply any labels
|
||||
|
||||
IMPORTANT GUIDELINES:
|
||||
|
||||
- Be thorough in your analysis
|
||||
- Only select labels from the provided list above
|
||||
- DO NOT post any comments to the issue
|
||||
- Your ONLY action should be to apply labels using gh issue edit
|
||||
- It's okay to not add any labels if none are clearly applicable
|
||||
|
||||
---
|
||||
44
.claude/skills/documentation/SKILL.md
Normal file
44
.claude/skills/documentation/SKILL.md
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
name: documentation
|
||||
description: Audit and update project documentation to accurately reflect the current codebase. Use when documentation may be outdated, after significant code changes, or when the user asks to review or update docs.
|
||||
---
|
||||
|
||||
# Documentation Audit
|
||||
|
||||
Audit and update all project documentation so it accurately reflects the current state of the codebase. Documentation must only describe features, options, configurations, and functionality that actually exist in the code.
|
||||
|
||||
## Files to Review
|
||||
|
||||
- **README.md** - Project overview, setup instructions, usage examples
|
||||
- **AGENTS.md** - AI coding assistant guidelines, project structure, conventions
|
||||
- **.env.example** - Example environment variables
|
||||
|
||||
Also check for substantial comments or inline instructions within the codebase that may be outdated.
|
||||
|
||||
## Process
|
||||
|
||||
1. **Read all documentation files** listed above in full.
|
||||
|
||||
2. **Cross-reference against the codebase.** For every documented item (features, env vars, CLI commands, routes, models, directory paths, conventions), search the code to verify:
|
||||
- It actually exists.
|
||||
- Its described behavior matches the implementation.
|
||||
- File paths and directory structures are accurate.
|
||||
|
||||
3. **Identify and fix discrepancies:**
|
||||
- **Version updates** — ensure documentation reflects any new/updated/removed versions. Check .python-version, pyproject.toml, etc.
|
||||
- **Stale/legacy content** — documented but no longer in the code. Remove it.
|
||||
- **Missing content** — exists in the code but not documented. Add it.
|
||||
- **Inaccurate descriptions** — documented behavior doesn't match implementation. Correct it.
|
||||
|
||||
4. **Apply updates** to each file. Preserve existing style and structure.
|
||||
|
||||
5. **Verify consistency** across all documentation files — they must not contradict each other.
|
||||
|
||||
## Rules
|
||||
|
||||
- Do NOT invent features or options that don't exist in the code.
|
||||
- Do NOT remove documentation for features that DO exist.
|
||||
- Do NOT change the fundamental structure or style of the docs.
|
||||
- Do NOT modify CLAUDE.md.
|
||||
- Focus on accuracy, not cosmetic changes.
|
||||
- When in doubt, check the source code.
|
||||
49
.claude/skills/git-branch/SKILL.md
Normal file
49
.claude/skills/git-branch/SKILL.md
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
name: git-branch
|
||||
description: Create a new branch from latest main with the project's naming convention (feat/fix/chore). Use when starting new work on a feature, bug fix, or chore.
|
||||
---
|
||||
|
||||
# Git Branch
|
||||
|
||||
Create a new branch from the latest `main` branch using the project's naming convention.
|
||||
|
||||
## Arguments
|
||||
|
||||
The user may provide arguments in the format: `<type>/<description>`
|
||||
|
||||
- `type` — one of `feat`, `fix`, or `chore`
|
||||
- `description` — short kebab-case description (e.g., `add-map-clustering`)
|
||||
|
||||
If not provided, ask the user for the branch type and description.
|
||||
|
||||
## Process
|
||||
|
||||
1. **Fetch latest main:**
|
||||
|
||||
```bash
|
||||
git fetch origin main
|
||||
```
|
||||
|
||||
2. **Determine branch name:**
|
||||
|
||||
- If the user provided arguments (e.g., `/git-branch feat/add-map-clustering`), use them directly.
|
||||
- Otherwise, ask the user for:
|
||||
- **Branch type**: `feat`, `fix`, or `chore`
|
||||
- **Short description**: a brief kebab-case slug describing the work
|
||||
- Construct the branch name as `{type}/{slug}` (e.g., `feat/add-map-clustering`).
|
||||
|
||||
3. **Create and switch to the new branch:**
|
||||
|
||||
```bash
|
||||
git checkout -b {branch_name} origin/main
|
||||
```
|
||||
|
||||
4. **Confirm** by reporting the new branch name to the user.
|
||||
|
||||
## Rules
|
||||
|
||||
- Branch names MUST follow the `{type}/{slug}` convention.
|
||||
- Valid types are `feat`, `fix`, and `chore` only.
|
||||
- The slug MUST be kebab-case (lowercase, hyphens, no spaces or underscores).
|
||||
- Always branch from `origin/main`, never from the current branch.
|
||||
- Do NOT push the branch — just create it locally.
|
||||
94
.claude/skills/git-pr/SKILL.md
Normal file
94
.claude/skills/git-pr/SKILL.md
Normal file
@@ -0,0 +1,94 @@
|
||||
---
|
||||
name: git-pr
|
||||
description: Create a pull request to main from the current branch. Runs quality checks, commits changes, pushes, and opens a PR via gh CLI. Use when ready to submit work for review.
|
||||
---
|
||||
|
||||
# Git PR
|
||||
|
||||
Create a pull request to `main` from the current feature branch.
|
||||
|
||||
## Process
|
||||
|
||||
### Phase 1: Pre-flight Checks
|
||||
|
||||
1. **Verify branch:**
|
||||
|
||||
```bash
|
||||
git branch --show-current
|
||||
```
|
||||
|
||||
- The current branch must NOT be `main`. If on `main`, tell the user to create a feature branch first (e.g., `/git-branch`).
|
||||
|
||||
2. **Check for uncommitted changes:**
|
||||
|
||||
```bash
|
||||
git status
|
||||
```
|
||||
|
||||
- If there are uncommitted changes, ask the user for a commit message and commit them using the `/git-commit` skill conventions (no Claude authoring details).
|
||||
|
||||
### Phase 2: Quality Checks
|
||||
|
||||
1. **Determine changed components** by comparing against `main`:
|
||||
|
||||
```bash
|
||||
git diff --name-only main...HEAD
|
||||
```
|
||||
|
||||
2. **Run targeted tests** based on changed files:
|
||||
- `tests/test_web/` for web-only changes (templates, static JS, web routes)
|
||||
- `tests/test_api/` for API changes
|
||||
- `tests/test_collector/` for collector changes
|
||||
- `tests/test_interface/` for interface/sender/receiver changes
|
||||
- `tests/test_common/` for common models/schemas/config changes
|
||||
- Run the full `pytest` if changes span multiple components
|
||||
|
||||
3. **Run pre-commit checks:**
|
||||
|
||||
```bash
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
- If checks fail and auto-fix files, commit the fixes and re-run until clean.
|
||||
|
||||
4. If tests or checks fail and cannot be auto-fixed, report the issues to the user and stop.
|
||||
|
||||
### Phase 3: Push and Create PR
|
||||
|
||||
1. **Push the branch to origin:**
|
||||
|
||||
```bash
|
||||
git push -u origin HEAD
|
||||
```
|
||||
|
||||
2. **Generate PR content:**
|
||||
- **Title**: Derive from the branch name. Convert `feat/add-map-clustering` to `Add map clustering`, `fix/login-error` to `Fix login error`, etc. Keep under 70 characters.
|
||||
- **Body**: Generate a summary from the commit history:
|
||||
|
||||
```bash
|
||||
git log main..HEAD --oneline
|
||||
```
|
||||
|
||||
3. **Create the PR:**
|
||||
|
||||
```bash
|
||||
gh pr create --title "{title}" --body "$(cat <<'EOF'
|
||||
## Summary
|
||||
{bullet points summarizing the changes}
|
||||
|
||||
## Test plan
|
||||
{checklist of testing steps}
|
||||
EOF
|
||||
)"
|
||||
```
|
||||
|
||||
4. **Return the PR URL** to the user.
|
||||
|
||||
## Rules
|
||||
|
||||
- Do NOT create a PR from `main`.
|
||||
- Do NOT skip quality checks — tests and pre-commit must pass.
|
||||
- Do NOT force-push.
|
||||
- Always target `main` as the base branch.
|
||||
- Keep the PR title concise (under 70 characters).
|
||||
- If quality checks fail, fix issues or report to the user — do NOT create the PR with failing checks.
|
||||
66
.claude/skills/quality/SKILL.md
Normal file
66
.claude/skills/quality/SKILL.md
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
name: quality
|
||||
description: Run the full test suite, pre-commit checks, and re-run tests to ensure code quality. Fixes any issues found. Use after code changes, before commits, or when the user asks to check quality.
|
||||
---
|
||||
|
||||
# Quality Check
|
||||
|
||||
Run the full quality pipeline: tests, pre-commit checks, and a verification test run. Fix any issues discovered at each stage.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before running checks, ensure the environment is ready:
|
||||
|
||||
1. Check for `.venv` directory — create with `python -m venv .venv` if missing.
|
||||
2. Activate the virtual environment: `source .venv/bin/activate`
|
||||
3. Install dependencies: `pip install -e ".[dev]"`
|
||||
|
||||
## Process
|
||||
|
||||
### Phase 1: Initial Test Run
|
||||
|
||||
Run the full test suite to establish a baseline:
|
||||
|
||||
```bash
|
||||
pytest
|
||||
```
|
||||
|
||||
- If tests **pass**, proceed to Phase 2.
|
||||
- If tests **fail**, investigate and fix the failures before continuing. Re-run the failing tests to confirm fixes. Then proceed to Phase 2.
|
||||
|
||||
### Phase 2: Pre-commit Checks
|
||||
|
||||
Run all pre-commit hooks against the entire codebase:
|
||||
|
||||
```bash
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
- If all checks **pass**, proceed to Phase 3.
|
||||
- If checks **fail**:
|
||||
- Many hooks (black, trailing whitespace, end-of-file) auto-fix issues. Re-run `pre-commit run --all-files` to confirm auto-fixes resolved the issues.
|
||||
- For remaining failures (flake8, mypy, etc.), investigate and fix manually.
|
||||
- Re-run `pre-commit run --all-files` until all checks pass.
|
||||
- Then proceed to Phase 3.
|
||||
|
||||
### Phase 3: Verification Test Run
|
||||
|
||||
Run the full test suite again to ensure pre-commit fixes (formatting, import sorting, etc.) haven't broken any functionality:
|
||||
|
||||
```bash
|
||||
pytest
|
||||
```
|
||||
|
||||
- If tests **pass**, the quality check is complete.
|
||||
- If tests **fail**, the pre-commit fixes introduced a regression. Investigate and fix, then re-run both `pre-commit run --all-files` and `pytest` until both pass cleanly.
|
||||
|
||||
## Rules
|
||||
|
||||
- Always run the FULL test suite (`pytest`), not targeted tests.
|
||||
- Always run pre-commit against ALL files (`--all-files`).
|
||||
- Do NOT skip or ignore failing tests — investigate and fix them.
|
||||
- Do NOT skip or ignore pre-commit failures — investigate and fix them.
|
||||
- Do NOT modify test assertions to make tests pass unless the test is genuinely wrong.
|
||||
- Do NOT disable pre-commit hooks or add noqa/type:ignore unless truly justified.
|
||||
- Fix root causes, not symptoms.
|
||||
- If a fix requires changes outside the scope of a simple quality fix (e.g., a design change), report it to the user rather than making the change silently.
|
||||
114
.claude/skills/release/SKILL.md
Normal file
114
.claude/skills/release/SKILL.md
Normal file
@@ -0,0 +1,114 @@
|
||||
---
|
||||
name: release
|
||||
description: Full release workflow — quality gate, semantic version tag, push, and GitHub release. Use when ready to cut a new release from main.
|
||||
---
|
||||
|
||||
# Release
|
||||
|
||||
Run the full release workflow: quality checks, version tagging, push, and GitHub release creation.
|
||||
|
||||
## Arguments
|
||||
|
||||
The user may optionally provide a version number (e.g., `/release 1.2.0`). If not provided, one will be suggested based on commit history.
|
||||
|
||||
## Process
|
||||
|
||||
### Phase 1: Pre-flight Checks
|
||||
|
||||
1. **Verify on `main` branch:**
|
||||
|
||||
```bash
|
||||
git branch --show-current
|
||||
```
|
||||
|
||||
- Must be on `main`. If not, tell the user to switch to `main` first.
|
||||
|
||||
2. **Verify working tree is clean:**
|
||||
|
||||
```bash
|
||||
git status --porcelain
|
||||
```
|
||||
|
||||
- If there are uncommitted changes, tell the user to commit or stash them first.
|
||||
|
||||
3. **Pull latest:**
|
||||
|
||||
```bash
|
||||
git pull origin main
|
||||
```
|
||||
|
||||
### Phase 2: Quality Gate
|
||||
|
||||
1. **Run full test suite:**
|
||||
|
||||
```bash
|
||||
pytest
|
||||
```
|
||||
|
||||
2. **Run pre-commit checks:**
|
||||
|
||||
```bash
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
3. If either fails, report the issues and stop. Do NOT proceed with a release that has failing checks.
|
||||
|
||||
### Phase 3: Determine Version
|
||||
|
||||
1. **Get the latest tag:**
|
||||
|
||||
```bash
|
||||
git describe --tags --abbrev=0 2>/dev/null || echo "none"
|
||||
```
|
||||
|
||||
2. **List commits since last tag:**
|
||||
|
||||
```bash
|
||||
git log {last_tag}..HEAD --oneline
|
||||
```
|
||||
|
||||
If no previous tag exists, list the last 20 commits:
|
||||
|
||||
```bash
|
||||
git log --oneline -20
|
||||
```
|
||||
|
||||
3. **Determine next version:**
|
||||
- If the user provided a version, use it.
|
||||
- Otherwise, suggest a version based on commit prefixes:
|
||||
- Any commit starting with `feat` or `Add` → **minor** bump
|
||||
- Only `fix` or `Fix` commits → **patch** bump
|
||||
- If no previous tag, suggest `0.1.0`
|
||||
- Present the suggestion and ask the user to confirm or provide a different version.
|
||||
|
||||
### Phase 4: Tag and Release
|
||||
|
||||
1. **Create annotated tag:**
|
||||
|
||||
```bash
|
||||
git tag -a v{version} -m "Release v{version}"
|
||||
```
|
||||
|
||||
2. **Push tag to origin:**
|
||||
|
||||
```bash
|
||||
git push origin v{version}
|
||||
```
|
||||
|
||||
3. **Create GitHub release:**
|
||||
|
||||
```bash
|
||||
gh release create v{version} --title "v{version}" --generate-notes
|
||||
```
|
||||
|
||||
4. **Report** the release URL to the user.
|
||||
|
||||
## Rules
|
||||
|
||||
- MUST be on `main` branch with a clean working tree.
|
||||
- MUST pass all quality checks before tagging.
|
||||
- Tags MUST follow the `v{major}.{minor}.{patch}` format (e.g., `v1.2.0`).
|
||||
- Always create an annotated tag, not a lightweight tag.
|
||||
- Always confirm the version with the user before tagging.
|
||||
- Do NOT skip quality checks under any circumstances.
|
||||
- Do NOT force-push tags.
|
||||
122
.env.example
122
.env.example
@@ -80,6 +80,14 @@ MQTT_PREFIX=meshcore
|
||||
# When enabled, uses TLS with system CA certificates (e.g., for Let's Encrypt)
|
||||
MQTT_TLS=false
|
||||
|
||||
# MQTT transport protocol
|
||||
# Options: tcp, websockets
|
||||
MQTT_TRANSPORT=tcp
|
||||
|
||||
# MQTT WebSocket path (used only when MQTT_TRANSPORT=websockets)
|
||||
# Common values: /mqtt, /
|
||||
MQTT_WS_PATH=/mqtt
|
||||
|
||||
# External port mappings for local MQTT broker (--profile mqtt only)
|
||||
MQTT_EXTERNAL_PORT=1883
|
||||
MQTT_WS_PORT=9001
|
||||
@@ -107,11 +115,46 @@ MESHCORE_DEVICE_NAME=
|
||||
NODE_ADDRESS=
|
||||
NODE_ADDRESS_SENDER=
|
||||
|
||||
# -------------------
|
||||
# Contact Cleanup Settings (RECEIVER mode only)
|
||||
# -------------------
|
||||
# Automatic removal of stale contacts from the MeshCore companion node
|
||||
|
||||
# Enable automatic removal of stale contacts from companion node
|
||||
CONTACT_CLEANUP_ENABLED=true
|
||||
|
||||
# Remove contacts not advertised for this many days
|
||||
CONTACT_CLEANUP_DAYS=7
|
||||
|
||||
# =============================================================================
|
||||
# COLLECTOR SETTINGS
|
||||
# =============================================================================
|
||||
# The collector subscribes to MQTT events and stores them in the database
|
||||
|
||||
# Collector MQTT ingest mode
|
||||
# - native: expects <prefix>/<pubkey>/event/<event_name> topics
|
||||
# - letsmesh_upload: expects LetsMesh observer uploads on
|
||||
# <prefix>/<pubkey>/(packets|status|internal)
|
||||
COLLECTOR_INGEST_MODE=native
|
||||
|
||||
# LetsMesh decoder support (used only when COLLECTOR_INGEST_MODE=letsmesh_upload)
|
||||
# Set to false to disable external packet decoding
|
||||
COLLECTOR_LETSMESH_DECODER_ENABLED=true
|
||||
|
||||
# Decoder command (must be available in container PATH)
|
||||
# Examples: meshcore-decoder, /usr/local/bin/meshcore-decoder, npx meshcore-decoder
|
||||
COLLECTOR_LETSMESH_DECODER_COMMAND=meshcore-decoder
|
||||
|
||||
# Optional: channel secret keys (comma or space separated) used to decrypt GroupText
|
||||
# packets. This supports unlimited keys.
|
||||
# Note: Public + #test keys are built into the collector code by default.
|
||||
# To show friendly channel names in the web feed, use label=hex (example: bot=ABCDEF...).
|
||||
# Without keys, encrypted packets cannot be shown as plaintext.
|
||||
# COLLECTOR_LETSMESH_DECODER_KEYS=
|
||||
|
||||
# Timeout in seconds per decode invocation
|
||||
COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS=2.0
|
||||
|
||||
# -------------------
|
||||
# Webhook Settings
|
||||
# -------------------
|
||||
@@ -179,6 +222,25 @@ API_PORT=8000
|
||||
API_READ_KEY=
|
||||
API_ADMIN_KEY=
|
||||
|
||||
# -------------------
|
||||
# Prometheus Metrics
|
||||
# -------------------
|
||||
# Prometheus metrics endpoint exposed at /metrics on the API service
|
||||
|
||||
# Enable Prometheus metrics endpoint
|
||||
# Default: true
|
||||
METRICS_ENABLED=true
|
||||
|
||||
# Seconds to cache metrics output (reduces database load)
|
||||
# Default: 60
|
||||
METRICS_CACHE_TTL=60
|
||||
|
||||
# External Prometheus port (when using --profile metrics)
|
||||
PROMETHEUS_PORT=9090
|
||||
|
||||
# External Alertmanager port (when using --profile metrics)
|
||||
ALERTMANAGER_PORT=9093
|
||||
|
||||
# =============================================================================
|
||||
# WEB DASHBOARD SETTINGS
|
||||
# =============================================================================
|
||||
@@ -187,11 +249,56 @@ API_ADMIN_KEY=
|
||||
# External web port
|
||||
WEB_PORT=8080
|
||||
|
||||
# API endpoint URL for the web dashboard
|
||||
# Default: http://localhost:8000
|
||||
# API_BASE_URL=http://localhost:8000
|
||||
|
||||
# API key for web dashboard queries (optional)
|
||||
# If API_READ_KEY is set on the API, provide it here
|
||||
# API_KEY=
|
||||
|
||||
# Default theme for the web dashboard (dark or light)
|
||||
# Users can override via the theme toggle; their preference is saved in localStorage
|
||||
# Default: dark
|
||||
# WEB_THEME=dark
|
||||
|
||||
# Locale/language for the web dashboard
|
||||
# Default: en
|
||||
# Supported: en (see src/meshcore_hub/web/static/locales/ for available translations)
|
||||
# WEB_LOCALE=en
|
||||
|
||||
# Locale used for date/time formatting in the web dashboard
|
||||
# Controls date ordering only; 24-hour clock is still used by default
|
||||
# Examples: en-US (MM/DD/YYYY), en-GB (DD/MM/YYYY)
|
||||
# Default: en-US
|
||||
# WEB_DATETIME_LOCALE=en-US
|
||||
|
||||
# Auto-refresh interval in seconds for list pages (nodes, advertisements, messages)
|
||||
# Set to 0 to disable auto-refresh
|
||||
# Default: 30
|
||||
# WEB_AUTO_REFRESH_SECONDS=30
|
||||
|
||||
# Enable admin interface at /a/ (requires auth proxy in front)
|
||||
# Default: false
|
||||
# WEB_ADMIN_ENABLED=false
|
||||
|
||||
# Timezone for displaying dates/times on the web dashboard
|
||||
# Uses standard IANA timezone names (e.g., America/New_York, Europe/London)
|
||||
# Default: UTC
|
||||
TZ=UTC
|
||||
|
||||
# Directory containing custom content (pages/, media/)
|
||||
# Default: ./content
|
||||
# CONTENT_HOME=./content
|
||||
|
||||
# -------------------
|
||||
# Network Information
|
||||
# -------------------
|
||||
# Displayed on the web dashboard homepage
|
||||
|
||||
# Network domain name (optional)
|
||||
# NETWORK_DOMAIN=
|
||||
|
||||
# Network display name
|
||||
NETWORK_NAME=MeshCore Network
|
||||
|
||||
@@ -208,6 +315,20 @@ NETWORK_RADIO_CONFIG=
|
||||
# If not set, a default welcome message is shown
|
||||
NETWORK_WELCOME_TEXT=
|
||||
|
||||
# -------------------
|
||||
# Feature Flags
|
||||
# -------------------
|
||||
# Control which pages are visible in the web dashboard
|
||||
# Set to false to completely hide a page (nav, routes, sitemap, robots.txt)
|
||||
|
||||
# FEATURE_DASHBOARD=true
|
||||
# FEATURE_NODES=true
|
||||
# FEATURE_ADVERTISEMENTS=true
|
||||
# FEATURE_MESSAGES=true
|
||||
# FEATURE_MAP=true
|
||||
# FEATURE_MEMBERS=true
|
||||
# FEATURE_PAGES=true
|
||||
|
||||
# -------------------
|
||||
# Contact Information
|
||||
# -------------------
|
||||
@@ -216,3 +337,4 @@ NETWORK_WELCOME_TEXT=
|
||||
NETWORK_CONTACT_EMAIL=
|
||||
NETWORK_CONTACT_DISCORD=
|
||||
NETWORK_CONTACT_GITHUB=
|
||||
NETWORK_CONTACT_YOUTUBE=
|
||||
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
buy_me_a_coffee: jinglemansweep
|
||||
67
.github/workflows/ci.yml
vendored
67
.github/workflows/ci.yml
vendored
@@ -5,49 +5,40 @@ on:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "alembic/**"
|
||||
- ".python-version"
|
||||
- "pyproject.toml"
|
||||
- ".pre-commit-config.yaml"
|
||||
- ".github/workflows/**"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.13"
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install black flake8 mypy
|
||||
pip install -e ".[dev]"
|
||||
|
||||
- name: Check formatting with black
|
||||
run: black --check src/ tests/
|
||||
|
||||
- name: Lint with flake8
|
||||
run: flake8 src/ tests/
|
||||
|
||||
- name: Type check with mypy
|
||||
run: mypy src/
|
||||
- name: Run pre-commit
|
||||
uses: pre-commit/action@v3.0.1
|
||||
|
||||
test:
|
||||
name: Test (Python ${{ matrix.python-version }})
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -56,27 +47,35 @@ jobs:
|
||||
|
||||
- name: Run tests with pytest
|
||||
run: |
|
||||
pytest --cov=meshcore_hub --cov-report=xml --cov-report=term-missing
|
||||
pytest --cov=meshcore_hub --cov-report=xml --cov-report=term-missing --junitxml=junit.xml -o junit_family=legacy
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
if: matrix.python-version == '3.13'
|
||||
uses: codecov/codecov-action@v5
|
||||
if: always()
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
fail_ci_if_error: false
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
with:
|
||||
report_type: test_results
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build:
|
||||
name: Build Package
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, test]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.13"
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Install build tools
|
||||
run: |
|
||||
@@ -87,7 +86,7 @@ jobs:
|
||||
run: python -m build
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
49
.github/workflows/claude.yml
vendored
49
.github/workflows/claude.yml
vendored
@@ -1,49 +0,0 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
|
||||
# prompt: 'Update the pull request description to include a summary of changes.'
|
||||
|
||||
# Optional: Add claude_args to customize behavior and configuration
|
||||
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
|
||||
# claude_args: '--allowed-tools Bash(gh pr:*)'
|
||||
21
.github/workflows/docker.yml
vendored
21
.github/workflows/docker.yml
vendored
@@ -3,6 +3,15 @@ name: Docker
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**"
|
||||
- "alembic/**"
|
||||
- "alembic.ini"
|
||||
- ".python-version"
|
||||
- "pyproject.toml"
|
||||
- "Dockerfile"
|
||||
- "docker-compose.yml"
|
||||
- ".github/workflows/**"
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
@@ -19,17 +28,17 @@ jobs:
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v4
|
||||
|
||||
- name: Log in to Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -37,7 +46,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@v6
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
@@ -48,7 +57,7 @@ jobs:
|
||||
type=sha
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v7
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@
|
||||
!example/data/
|
||||
/seed/
|
||||
!example/seed/
|
||||
/content/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
||||
100
.plans/2026/03/09/01-security-fixes/changelog.md
Normal file
100
.plans/2026/03/09/01-security-fixes/changelog.md
Normal file
@@ -0,0 +1,100 @@
|
||||
## TASK-001: Remove legacy HTML dashboard endpoint
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/api/routes/dashboard.py`
|
||||
- `tests/test_api/test_dashboard.py`
|
||||
### Notes
|
||||
Removed the `dashboard()` route handler and its `@router.get("")` decorator. Removed `HTMLResponse` and `Request` imports no longer used. Updated existing tests to verify the HTML endpoint returns 404/405. All JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) remain intact.
|
||||
---
|
||||
|
||||
## TASK-002: Replace API key comparisons with constant-time comparison
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/api/auth.py`
|
||||
- `src/meshcore_hub/api/metrics.py`
|
||||
### Notes
|
||||
Added `import hmac` to both files. Replaced `==` comparisons with `hmac.compare_digest()` in `require_read`, `require_admin`, and `verify_basic_auth`. Added truthiness guards for `read_key`/`admin_key` in `require_read` since either can be `None` and `hmac.compare_digest()` raises `TypeError` on `None`.
|
||||
---
|
||||
|
||||
## TASK-003: Add WEB_TRUSTED_PROXY_HOSTS configuration setting
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/common/config.py`
|
||||
### Notes
|
||||
Added `web_trusted_proxy_hosts: str = Field(default="*", ...)` to `WebSettings` class. Automatically configurable via `WEB_TRUSTED_PROXY_HOSTS` env var through Pydantic Settings.
|
||||
---
|
||||
|
||||
## TASK-004: Integrate trusted proxy hosts into web app middleware and add startup warning
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/app.py`
|
||||
### Notes
|
||||
Replaced hardcoded `trusted_hosts="*"` in `ProxyHeadersMiddleware` with configured value. If value is `"*"`, passes string directly; otherwise splits on commas. Added startup warning when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS="*"`. `_is_authenticated_proxy_request` unchanged.
|
||||
---
|
||||
|
||||
## TASK-005: Escape config JSON in template script block to prevent XSS breakout
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/app.py`
|
||||
### Notes
|
||||
Added `.replace("</", "<\\/")` to `_build_config_json` return value. Prevents `</script>` breakout in the Jinja2 template's `<script>` block. `<\/` is valid JSON per spec and parsed correctly by `JSON.parse()`.
|
||||
---
|
||||
|
||||
## TASK-006: Fix stored XSS in admin node-tags page
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js`
|
||||
### Notes
|
||||
Added `escapeHtml` to imports. Escaped `nodeName` with `escapeHtml()` in copy-all and delete-all confirmation dialogs (2 `unsafeHTML()` calls). Escaped `activeTagKey` with `escapeHtml()` in single tag delete confirmation (`innerHTML` assignment). Translation template `<strong>` tags preserved.
|
||||
---
|
||||
|
||||
## TASK-007: Fix stored XSS in admin members page
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/members.js`
|
||||
### Notes
|
||||
Added `escapeHtml` to imports. Escaped `memberName` with `escapeHtml()` before passing to `t()` in delete confirmation dialog. `innerHTML` retained for `<strong>` tag rendering from translation template.
|
||||
---
|
||||
|
||||
## TASK-008: Write tests for legacy dashboard endpoint removal
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `tests/test_api/test_dashboard.py`
|
||||
### Notes
|
||||
Added 5 new tests: 1 for trailing-slash 404/405 verification, 4 for authenticated JSON sub-route responses. Total 20 dashboard tests passing.
|
||||
---
|
||||
|
||||
## TASK-009: Write tests for constant-time API key comparison
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `tests/test_api/test_auth.py`
|
||||
### Notes
|
||||
Restructured from 10 tests (2 classes) to 22 tests (4 classes): `TestReadAuthentication` (9), `TestAdminAuthentication` (4), `TestMetricsAuthentication` (7), `TestHealthEndpoint` (2). Added coverage for multi-endpoint read/admin key acceptance, missing auth header rejection, and metrics credential validation.
|
||||
---
|
||||
|
||||
## TASK-010: Write tests for trusted proxy hosts configuration and startup warning
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `tests/test_common/test_config.py`
|
||||
- `tests/test_web/test_app.py`
|
||||
### Notes
|
||||
Added 3 config tests (default value, specific IP, comma-separated list) and 5 web app tests (warning logged with wildcard+admin, no warning with specific hosts, no warning with admin disabled, comma list parsing, wildcard passed as string).
|
||||
---
|
||||
|
||||
## TASK-011: Write tests for config JSON script block escaping
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
- `tests/test_web/test_app.py`
|
||||
### Notes
|
||||
Added 5 tests in `TestConfigJsonXssEscaping` class: rendered HTML escaping, normal values unaffected, escaped JSON parseable, direct `_build_config_json` escaping, direct no-escaping-needed.
|
||||
---
|
||||
|
||||
## TASK-012: Update documentation for WEB_TRUSTED_PROXY_HOSTS setting
|
||||
**Status:** completed
|
||||
### Files Modified
|
||||
- `README.md`
|
||||
- `AGENTS.md`
|
||||
- `PLAN.md`
|
||||
### Notes
|
||||
Added `WEB_TRUSTED_PROXY_HOSTS` to environment variables sections in all three docs. Documented default value (`*`), production recommendation, and startup warning behavior.
|
||||
---
|
||||
162
.plans/2026/03/09/01-security-fixes/prd.md
Normal file
162
.plans/2026/03/09/01-security-fixes/prd.md
Normal file
@@ -0,0 +1,162 @@
|
||||
# Product Requirements Document
|
||||
|
||||
> Source: `.plans/2026/03/09/01-security-fixes/prompt.md`
|
||||
|
||||
## Project Overview
|
||||
|
||||
This project addresses CRITICAL and HIGH severity vulnerabilities identified in a security audit of MeshCore Hub. The fixes span stored XSS in server-rendered and client-side code, timing attacks on authentication, proxy header forgery, and a legacy endpoint with missing authentication. All changes must be backward-compatible and preserve existing API contracts.
|
||||
|
||||
## Goals
|
||||
|
||||
- Eliminate all CRITICAL and HIGH severity security vulnerabilities found in the audit
|
||||
- Harden API key comparison against timing side-channel attacks
|
||||
- Prevent XSS vectors in both Jinja2 templates and client-side JavaScript
|
||||
- Add configurable proxy trust to defend against header forgery while maintaining backward compatibility
|
||||
- Remove the redundant legacy HTML dashboard endpoint that lacks authentication
|
||||
|
||||
## Functional Requirements
|
||||
|
||||
### REQ-001: Remove legacy HTML dashboard endpoint
|
||||
|
||||
**Description:** Remove the `GET /api/v1/dashboard/` route handler that renders a standalone HTML page with unescaped database content (stored XSS) and no authentication. The JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) must remain intact and unchanged.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] The `dashboard()` route handler in `api/routes/dashboard.py` is removed
|
||||
- [ ] The `HTMLResponse` import is removed (if no longer used)
|
||||
- [ ] `GET /api/v1/dashboard/` returns 404 or Method Not Allowed
|
||||
- [ ] `GET /api/v1/dashboard/stats` continues to return valid JSON with authentication
|
||||
- [ ] `GET /api/v1/dashboard/activity` continues to return valid JSON with authentication
|
||||
- [ ] `GET /api/v1/dashboard/message-activity` continues to return valid JSON with authentication
|
||||
- [ ] `GET /api/v1/dashboard/node-count` continues to return valid JSON with authentication
|
||||
- [ ] Existing API tests for JSON sub-routes still pass
|
||||
|
||||
### REQ-002: Use constant-time comparison for API key validation
|
||||
|
||||
**Description:** Replace all Python `==` comparisons of API keys and credentials with `hmac.compare_digest()` to prevent timing side-channel attacks that could leak key material.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] All API key comparisons in `api/auth.py` use `hmac.compare_digest()` instead of `==`
|
||||
- [ ] All credential comparisons in `api/metrics.py` use `hmac.compare_digest()` instead of `==`
|
||||
- [ ] `hmac` is imported in all files where secret comparison occurs
|
||||
- [ ] The authentication behavior is unchanged — valid keys are accepted, invalid keys are rejected
|
||||
- [ ] Tests confirm authentication still works correctly with valid and invalid keys
|
||||
|
||||
### REQ-003: Add configurable trusted proxy hosts for admin authentication
|
||||
|
||||
**Description:** Add a `WEB_TRUSTED_PROXY_HOSTS` configuration setting that controls which hosts are trusted for proxy authentication headers (`X-Forwarded-User`, `X-Auth-Request-User`, `Authorization: Basic`). The setting defaults to `*` for backward compatibility. A startup warning is emitted when admin is enabled with the wildcard default. The `Authorization: Basic` header check must be preserved for Nginx Proxy Manager compatibility.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] A `WEB_TRUSTED_PROXY_HOSTS` setting is added to the configuration (Pydantic Settings)
|
||||
- [ ] The setting defaults to `*` (backward compatible)
|
||||
- [ ] `ProxyHeadersMiddleware` uses the configured `trusted_hosts` value instead of hardcoded `*`
|
||||
- [ ] A warning is logged at startup when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS` is `*`
|
||||
- [ ] The warning message recommends restricting trusted hosts to the operator's proxy IP
|
||||
- [ ] The `_is_authenticated_proxy_request` function continues to accept `X-Forwarded-User`, `X-Auth-Request-User`, and `Authorization: Basic` headers
|
||||
- [ ] OAuth2 proxy setups continue to function correctly
|
||||
- [ ] Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP restricts proxy header trust to that IP
|
||||
|
||||
### REQ-004: Escape config JSON in template script block
|
||||
|
||||
**Description:** Prevent XSS via `</script>` breakout in the `config_json|safe` template injection by escaping `</` sequences in the serialized JSON string before passing it to the Jinja2 template.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] `config_json` is escaped by replacing `</` with `<\\/` before template rendering (in `web/app.py`)
|
||||
- [ ] The `|safe` filter continues to be used (the escaping happens in Python, not Jinja2)
|
||||
- [ ] A config value containing `</script><script>alert(1)</script>` does not execute JavaScript
|
||||
- [ ] The SPA application correctly parses the escaped config JSON on the client side
|
||||
- [ ] Normal config values (without special characters) render unchanged
|
||||
|
||||
### REQ-005: Fix stored XSS in admin page JavaScript
|
||||
|
||||
**Description:** Sanitize API-sourced data (node names, tag keys, member names) before rendering in admin pages. Replace `unsafeHTML()` and direct `innerHTML` assignment with safe alternatives — either `escapeHtml()` (already available in `components.js`) or lit-html safe templating (`${value}` interpolation without `unsafeHTML`).
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] Node names in `admin/node-tags.js` are escaped or safely templated before HTML rendering
|
||||
- [ ] Tag keys in `admin/node-tags.js` are escaped or safely templated before HTML rendering
|
||||
- [ ] Member names in `admin/members.js` are escaped or safely templated before HTML rendering
|
||||
- [ ] All `unsafeHTML()` calls on API-sourced data in the identified files are replaced with safe alternatives
|
||||
- [ ] All direct `innerHTML` assignments of API-sourced data in the identified files are replaced with safe alternatives
|
||||
- [ ] A node name containing `<img src=x onerror=alert(1)>` renders as text, not as an HTML element
|
||||
- [ ] A member name containing `<script>alert(1)</script>` renders as text, not as executable script
|
||||
- [ ] Normal names (without special characters) continue to display correctly
|
||||
|
||||
## Non-Functional Requirements
|
||||
|
||||
### REQ-006: Backward compatibility
|
||||
|
||||
**Category:** Reliability
|
||||
|
||||
**Description:** All security fixes must maintain backward compatibility with existing deployments. No breaking changes to API contracts, configuration defaults, or deployment workflows.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] All existing API endpoints (except the removed HTML dashboard) return the same response format
|
||||
- [ ] Default configuration values preserve existing behavior without requiring operator action
|
||||
- [ ] Docker Compose deployments continue to function without configuration changes
|
||||
- [ ] All existing tests pass after the security fixes are applied
|
||||
|
||||
### REQ-007: No regression in authentication flows
|
||||
|
||||
**Category:** Security
|
||||
|
||||
**Description:** The security hardening must not introduce authentication regressions. Valid credentials must continue to be accepted, and invalid credentials must continue to be rejected, across all authentication methods.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] API read key authentication accepts valid keys and rejects invalid keys
|
||||
- [ ] API admin key authentication accepts valid keys and rejects invalid keys
|
||||
- [ ] Metrics endpoint authentication (if configured) accepts valid credentials and rejects invalid ones
|
||||
- [ ] Proxy header authentication continues to work with OAuth2 proxy setups
|
||||
- [ ] Basic auth header forwarding from Nginx Proxy Manager continues to work
|
||||
|
||||
## Technical Constraints and Assumptions
|
||||
|
||||
### Constraints
|
||||
|
||||
- Python 3.13+ (specified by project `.python-version`)
|
||||
- Must use `hmac.compare_digest()` from the Python standard library for constant-time comparison
|
||||
- The `Authorization: Basic` header check in `_is_authenticated_proxy_request` must not be removed or modified to validate credentials server-side — credential validation is the proxy's responsibility
|
||||
- Changes must not alter existing API response schemas or status codes (except removing the HTML dashboard endpoint)
|
||||
|
||||
### Assumptions
|
||||
|
||||
- The `escapeHtml()` utility in `components.js` correctly escapes `<`, `>`, `&`, `"`, and `'` characters
|
||||
- The SPA client-side JavaScript can parse JSON containing escaped `<\/` sequences (standard behavior per JSON spec)
|
||||
- Operators using proxy authentication have a reverse proxy (e.g., Nginx, Traefik, NPM) in front of MeshCore Hub
|
||||
|
||||
## Scope
|
||||
|
||||
### In Scope
|
||||
|
||||
- Removing the legacy HTML dashboard route handler (C1 + H2)
|
||||
- Replacing `==` with `hmac.compare_digest()` for all secret comparisons (H1)
|
||||
- Adding `WEB_TRUSTED_PROXY_HOSTS` configuration and startup warning (H3)
|
||||
- Escaping `</` in config JSON template injection (H4)
|
||||
- Fixing `unsafeHTML()`/`innerHTML` XSS in admin JavaScript pages (H5)
|
||||
- Updating tests to cover the security fixes
|
||||
- Updating documentation for the new `WEB_TRUSTED_PROXY_HOSTS` setting
|
||||
|
||||
### Out of Scope
|
||||
|
||||
- MEDIUM severity findings (CORS, error detail leakage, rate limiting, security headers, CSRF, CDN SRI, markdown sanitization, input validation, channel key exposure)
|
||||
- LOW severity findings (auth warnings, version disclosure, unbounded fields, credential logging, SecretStr, port exposure, cache safety, image pinning)
|
||||
- INFO findings (OpenAPI docs, proxy IP logging, alertmanager comments, DOM XSS in error handler, locale path)
|
||||
- Adding rate limiting infrastructure
|
||||
- Adding Content-Security-Policy or other security headers
|
||||
- Dependency version pinning or lockfile generation
|
||||
- Server-side credential validation for Basic auth (proxy responsibility)
|
||||
|
||||
## Suggested Tech Stack
|
||||
|
||||
| Layer | Technology | Rationale |
|
||||
|-------|-----------|-----------|
|
||||
| Secret comparison | `hmac.compare_digest()` (stdlib) | Specified by prompt; constant-time comparison prevents timing attacks |
|
||||
| Template escaping | Python `str.replace()` | Minimal approach to escape `</` in JSON before Jinja2 rendering |
|
||||
| Client-side escaping | `escapeHtml()` from `components.js` | Already available in the codebase; standard HTML entity escaping |
|
||||
| Configuration | Pydantic Settings | Specified by project stack; used for `WEB_TRUSTED_PROXY_HOSTS` |
|
||||
| Testing | pytest, pytest-asyncio | Specified by project stack |
|
||||
65
.plans/2026/03/09/01-security-fixes/prompt.md
Normal file
65
.plans/2026/03/09/01-security-fixes/prompt.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Phase: 01-security-fixes
|
||||
|
||||
## Overview
|
||||
|
||||
Address CRITICAL and HIGH severity vulnerabilities identified in the MeshCore Hub security audit across API and Web components. These findings represent exploitable vulnerabilities including XSS, timing attacks, authentication bypasses, and insecure defaults.
|
||||
|
||||
## Goals
|
||||
|
||||
- Eliminate all CRITICAL and HIGH severity security vulnerabilities
|
||||
- Harden authentication mechanisms against timing attacks and header forgery
|
||||
- Prevent XSS vectors in both server-rendered HTML and client-side JavaScript
|
||||
- Secure default MQTT configuration against unauthenticated access
|
||||
|
||||
## Requirements
|
||||
|
||||
### C1 + H2 — Remove legacy HTML dashboard endpoint
|
||||
- **File:** `src/meshcore_hub/api/routes/dashboard.py:367-536`
|
||||
- The `GET /api/v1/dashboard/` endpoint is a standalone HTML page with two CRITICAL/HIGH issues: stored XSS (unescaped DB content in f-string HTML) and missing authentication
|
||||
- The SPA web dashboard provides a full-featured replacement, making this endpoint redundant
|
||||
- **Fix:** Remove the `dashboard()` route handler and its `HTMLResponse` import. Keep all JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) intact.
|
||||
|
||||
### H1 — Fix timing attack on API key comparison
|
||||
- **Files:** `api/auth.py:82,127` | `api/metrics.py:57`
|
||||
- All secret comparisons use Python `==`, which is not constant-time
|
||||
- **Fix:** Replace with `hmac.compare_digest()` for all key/credential comparisons
|
||||
|
||||
### H3 — Harden admin auth against proxy header forgery
|
||||
- **File:** `web/app.py:73-86,239`
|
||||
- Admin access trusts `X-Forwarded-User`, `X-Auth-Request-User`, or `Authorization: Basic` header
|
||||
- `ProxyHeadersMiddleware(trusted_hosts="*")` accepts forged headers from any client
|
||||
- The `Authorization: Basic` check must be preserved — it is required by the Nginx Proxy Manager (NPM) Access List setup documented in README.md (NPM validates credentials and forwards the header)
|
||||
- **Fix:** Add a `WEB_TRUSTED_PROXY_HOSTS` config setting (default `*` for backward compatibility). Pass it to `ProxyHeadersMiddleware(trusted_hosts=...)`. Add a startup warning when `WEB_ADMIN_ENABLED=true` and `trusted_hosts` is still `*`, recommending operators restrict it to their proxy IP. Do NOT remove the Basic auth header check or validate credentials server-side — that is the proxy's responsibility.
|
||||
|
||||
### H4 — Fix XSS via config_json|safe script block breakout
|
||||
- **File:** `web/templates/spa.html:188` | `web/app.py:157-183`
|
||||
- Operator config values injected into `<script>` block with `|safe` — a value containing `</script>` breaks out and executes arbitrary JS
|
||||
- **Fix:** Escape `</` sequences in the JSON string: `config_json = json.dumps(config).replace("</", "<\\/")`
|
||||
|
||||
### H5 — Fix stored XSS via unsafeHTML/innerHTML with API-sourced data
|
||||
- **Files:** `web/static/js/spa/pages/admin/node-tags.js:243,272,454` | `admin/members.js:309`
|
||||
- Node names, tag keys, and member names from the API are interpolated into HTML via `unsafeHTML()` and direct `innerHTML` assignment
|
||||
- **Fix:** Use `escapeHtml()` (already in `components.js`) on API data before HTML interpolation, or replace with lit-html safe templating
|
||||
|
||||
|
||||
## Constraints
|
||||
|
||||
- Must not break existing functionality or API contracts
|
||||
- Changes to docker-compose.yml and mosquitto.conf must remain backward-compatible (use env var defaults)
|
||||
- The `_is_authenticated_proxy_request` function must continue to work with OAuth2 proxy setups — only add defense-in-depth, don't remove proxy header support entirely
|
||||
|
||||
## Out of Scope
|
||||
|
||||
- MEDIUM severity findings (CORS config, error detail leakage, rate limiting, security headers, CSRF, CDN SRI, markdown sanitization, input validation, channel key exposure)
|
||||
- LOW severity findings (auth warnings, version disclosure, unbounded fields, credential logging, SecretStr, port exposure, cache safety, image pinning)
|
||||
- INFO findings (OpenAPI docs, proxy IP logging, alertmanager comments, DOM XSS in error handler, locale path)
|
||||
- Adding rate limiting infrastructure
|
||||
- Adding Content-Security-Policy or other security headers
|
||||
- Dependency version pinning or lockfile generation
|
||||
|
||||
## References
|
||||
|
||||
- Security audit performed in this conversation (2026-03-09)
|
||||
- OWASP Top 10: XSS (A7:2017), Broken Authentication (A2:2017)
|
||||
- Python `hmac.compare_digest` documentation
|
||||
- FastAPI security best practices
|
||||
54
.plans/2026/03/09/01-security-fixes/reviews/cycle/001.yaml
Normal file
54
.plans/2026/03/09/01-security-fixes/reviews/cycle/001.yaml
Normal file
@@ -0,0 +1,54 @@
|
||||
# Code review round 001
|
||||
# Phase: .plans/2026/03/09/01-security-fixes
|
||||
# Scope: full
|
||||
# Generated by: /jp-codereview
|
||||
|
||||
issues:
|
||||
- id: "ISSUE-001"
|
||||
severity: "MINOR"
|
||||
category: "integration"
|
||||
file: "src/meshcore_hub/web/app.py"
|
||||
line: 251
|
||||
description: |
|
||||
The startup warning for insecure trusted proxy hosts checks `settings.web_admin_enabled`
|
||||
instead of the effective admin_enabled value that gets stored in `app.state.admin_enabled`.
|
||||
The `create_app()` function accepts an `admin_enabled` parameter (line 193) that can override
|
||||
the setting. If a caller passes `admin_enabled=True` but `settings.web_admin_enabled` is False,
|
||||
the warning will not fire despite admin being enabled. In practice this does not affect production
|
||||
deployments (CLI always uses the settings value), only programmatic/test usage.
|
||||
suggestion: |
|
||||
Consider computing the effective admin_enabled value before the warning check and using
|
||||
that for both the warning and `app.state.admin_enabled`, e.g.:
|
||||
`effective_admin = admin_enabled if admin_enabled is not None else settings.web_admin_enabled`
|
||||
related_tasks:
|
||||
- "TASK-004"
|
||||
|
||||
- id: "ISSUE-002"
|
||||
severity: "MINOR"
|
||||
category: "style"
|
||||
file: "src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js"
|
||||
line: 3
|
||||
description: |
|
||||
The `unsafeHTML` import is retained and still used on lines 243 and 272. Although the
|
||||
API-sourced data (`nodeName`) is now safely escaped via `escapeHtml()` before interpolation,
|
||||
the continued use of `unsafeHTML()` may confuse future reviewers into thinking the XSS
|
||||
fix is incomplete. The `unsafeHTML()` is needed to render the translation template's HTML
|
||||
tags (e.g., `<strong>`), so this is functionally correct.
|
||||
suggestion: |
|
||||
Add a brief inline comment above each `unsafeHTML()` call explaining that the dynamic
|
||||
values are pre-escaped and `unsafeHTML()` is only needed for the template's HTML formatting.
|
||||
related_tasks:
|
||||
- "TASK-006"
|
||||
|
||||
summary:
|
||||
total_issues: 2
|
||||
critical: 0
|
||||
major: 0
|
||||
minor: 2
|
||||
by_category:
|
||||
integration: 1
|
||||
architecture: 0
|
||||
security: 0
|
||||
duplication: 0
|
||||
error-handling: 0
|
||||
style: 1
|
||||
70
.plans/2026/03/09/01-security-fixes/reviews/prd.md
Normal file
70
.plans/2026/03/09/01-security-fixes/reviews/prd.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# PRD Review
|
||||
|
||||
> Phase: `.plans/2026/03/09/01-security-fixes`
|
||||
> PRD: `.plans/2026/03/09/01-security-fixes/prd.md`
|
||||
> Prompt: `.plans/2026/03/09/01-security-fixes/prompt.md`
|
||||
|
||||
## Verdict: PASS
|
||||
|
||||
The PRD fully covers all five security requirements from the prompt with clear, implementable, and testable acceptance criteria. No contradictions, blocking ambiguities, or feasibility concerns were found. One prompt goal ("Secure default MQTT configuration") has no corresponding requirement in either the prompt or the PRD, but since no prompt requirement addresses it, the PRD correctly does not fabricate one.
|
||||
|
||||
## Coverage Assessment
|
||||
|
||||
| Prompt Item | PRD Section | Covered? | Notes |
|
||||
|---|---|---|---|
|
||||
| C1+H2: Remove legacy HTML dashboard endpoint | REQ-001 | Yes | Route removal, import cleanup, sub-route preservation all specified |
|
||||
| H1: Fix timing attack on API key comparison | REQ-002 | Yes | Files and `hmac.compare_digest()` approach match |
|
||||
| H3: Harden admin auth / proxy header forgery | REQ-003 | Yes | Config setting, default, warning, Basic auth preservation all covered |
|
||||
| H4: Fix XSS via config_json\|safe breakout | REQ-004 | Yes | Escape approach and XSS test payload specified |
|
||||
| H5: Fix stored XSS via unsafeHTML/innerHTML | REQ-005 | Yes | Files, fix approach, and XSS test payloads specified |
|
||||
| Constraint: No breaking changes to API contracts | REQ-006 | Yes | |
|
||||
| Constraint: docker-compose.yml/mosquitto.conf backward-compatible | REQ-006 | Partial | REQ-006 covers Docker Compose but not mosquitto.conf; moot since no requirement changes mosquitto.conf |
|
||||
| Constraint: _is_authenticated_proxy_request works with OAuth2 | REQ-003, REQ-007 | Yes | |
|
||||
| Goal: Secure default MQTT configuration | -- | No | Goal stated in prompt but no prompt requirement addresses it; PRD correctly does not fabricate one |
|
||||
| Out of scope items | Scope section | Yes | All exclusions match prompt |
|
||||
|
||||
**Coverage summary:** 5 of 5 prompt requirements fully covered, 1 constraint partially covered (moot), 1 prompt goal has no corresponding requirement in the prompt itself.
|
||||
|
||||
## Requirement Evaluation
|
||||
|
||||
All requirements passed evaluation. Minor observations noted below.
|
||||
|
||||
### REQ-003: Add configurable trusted proxy hosts
|
||||
|
||||
- **Implementability:** Pass -- A developer familiar with Pydantic Settings and `ProxyHeadersMiddleware` can implement this without ambiguity. The env var format (comma-separated list vs. single value) is not explicitly stated but follows standard Pydantic patterns.
|
||||
- **Testability:** Pass
|
||||
- **Completeness:** Pass
|
||||
- **Consistency:** Pass
|
||||
|
||||
### REQ-006: Backward compatibility
|
||||
|
||||
- **Implementability:** Pass
|
||||
- **Testability:** Pass
|
||||
- **Completeness:** Pass -- The prompt constraint about mosquitto.conf backward compatibility is not explicitly mentioned, but no requirement modifies mosquitto.conf, making this moot.
|
||||
- **Consistency:** Pass
|
||||
|
||||
## Structural Issues
|
||||
|
||||
### Contradictions
|
||||
|
||||
None found.
|
||||
|
||||
### Ambiguities
|
||||
|
||||
None that would block implementation. The `WEB_TRUSTED_PROXY_HOSTS` env var format is a minor detail resolvable by the developer from the `ProxyHeadersMiddleware` API and standard Pydantic Settings patterns.
|
||||
|
||||
### Missing Edge Cases
|
||||
|
||||
None significant. The `hmac.compare_digest()` change (REQ-002) assumes the existing code handles the "no key configured" case before reaching the comparison, which is standard practice and verifiable during implementation.
|
||||
|
||||
### Feasibility Concerns
|
||||
|
||||
None.
|
||||
|
||||
### Scope Inconsistencies
|
||||
|
||||
The prompt states a goal of "Secure default MQTT configuration against unauthenticated access" but provides no requirement for it. The PRD drops this goal without explanation. This is a prompt-level gap, not a PRD-level gap -- the PRD should not invent requirements that the prompt does not specify.
|
||||
|
||||
## Action Items
|
||||
|
||||
No action items. The PRD is ready for task breakdown.
|
||||
90
.plans/2026/03/09/01-security-fixes/reviews/tasks.md
Normal file
90
.plans/2026/03/09/01-security-fixes/reviews/tasks.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Task Review
|
||||
|
||||
> Phase: `.plans/2026/03/09/01-security-fixes`
|
||||
> Tasks: `.plans/2026/03/09/01-security-fixes/tasks.yaml`
|
||||
> PRD: `.plans/2026/03/09/01-security-fixes/prd.md`
|
||||
|
||||
## Verdict: PASS
|
||||
|
||||
The task list is structurally sound, correctly ordered, and fully covers all 7 PRD requirements. The dependency graph is a valid DAG with no cycles or invalid references. No ordering issues, coverage gaps, vague tasks, or invalid fields were found. Two non-blocking warnings are noted: TASK-006 and TASK-007 (frontend XSS fixes) lack corresponding test tasks, and two pairs of independent tasks share output files but modify independent sections.
|
||||
|
||||
## Dependency Validation
|
||||
|
||||
### Reference Validity
|
||||
|
||||
All dependency references are valid. Every task ID referenced in a `dependencies` list corresponds to an existing task in the inventory.
|
||||
|
||||
### DAG Validation
|
||||
|
||||
The dependency graph is a valid directed acyclic graph. No cycles detected.
|
||||
|
||||
Topological layers:
|
||||
- **Layer 0 (roots):** TASK-001, TASK-002, TASK-003, TASK-005, TASK-006, TASK-007
|
||||
- **Layer 1:** TASK-004 (depends on TASK-003), TASK-008 (depends on TASK-001), TASK-009 (depends on TASK-002), TASK-011 (depends on TASK-005)
|
||||
- **Layer 2:** TASK-010 (depends on TASK-003, TASK-004), TASK-012 (depends on TASK-003, TASK-004)
|
||||
|
||||
### Orphan Tasks
|
||||
|
||||
No orphan tasks detected. All non-root tasks with dependencies are either terminal test/docs tasks (TASK-008, TASK-009, TASK-010, TASK-011, TASK-012) or integration tasks (TASK-004). Root tasks without dependents (TASK-006, TASK-007) are excluded from orphan detection per the review protocol.
|
||||
|
||||
## Ordering Check
|
||||
|
||||
No blocking ordering issues detected.
|
||||
|
||||
**Observation (non-blocking):** Two pairs of independent tasks share output files:
|
||||
|
||||
1. **TASK-004 and TASK-005** both modify `src/meshcore_hub/web/app.py` without a dependency between them. TASK-004 modifies `ProxyHeadersMiddleware` (line ~239) and adds a startup warning, while TASK-005 modifies `_build_config_json` (line ~183). These are independent functions in the same file; no actual conflict exists.
|
||||
|
||||
2. **TASK-010 and TASK-011** both modify `tests/test_web/test_app.py` without a dependency between them. Both add new test functions to the same test file. No actual conflict exists.
|
||||
|
||||
These are not blocking because neither task creates the shared file — both modify existing files in independent sections. Adding artificial dependencies would unnecessarily serialize parallelizable work.
|
||||
|
||||
## Coverage Check
|
||||
|
||||
### Uncovered Requirements
|
||||
|
||||
All PRD requirements are covered.
|
||||
|
||||
### Phantom References
|
||||
|
||||
No phantom references detected.
|
||||
|
||||
**Coverage summary:** 7 of 7 PRD requirements covered by tasks.
|
||||
|
||||
| Requirement | Tasks |
|
||||
|---|---|
|
||||
| REQ-001 | TASK-001, TASK-008 |
|
||||
| REQ-002 | TASK-002, TASK-009 |
|
||||
| REQ-003 | TASK-003, TASK-004, TASK-010, TASK-012 |
|
||||
| REQ-004 | TASK-005, TASK-011 |
|
||||
| REQ-005 | TASK-006, TASK-007 |
|
||||
| REQ-006 | TASK-001, TASK-003, TASK-004, TASK-005, TASK-006, TASK-007, TASK-008, TASK-010, TASK-011, TASK-012 |
|
||||
| REQ-007 | TASK-002, TASK-004, TASK-009 |
|
||||
|
||||
## Scope Check
|
||||
|
||||
### Tasks Too Large
|
||||
|
||||
No tasks flagged as too large. No task has `estimated_complexity: large`.
|
||||
|
||||
### Tasks Too Vague
|
||||
|
||||
No tasks flagged as too vague. All tasks have detailed descriptions (>50 chars), multiple testable acceptance criteria, and specific file paths in `files_affected`.
|
||||
|
||||
### Missing Test Tasks
|
||||
|
||||
Two implementation tasks lack corresponding test tasks:
|
||||
|
||||
- **TASK-006** (Fix stored XSS in admin node-tags page) — modifies `admin/node-tags.js` but no test task verifies the XSS fix in this JavaScript file. The acceptance criteria include XSS payload testing, but no automated test is specified. This is a frontend JavaScript change where manual verification or browser-based testing may be appropriate.
|
||||
|
||||
- **TASK-007** (Fix stored XSS in admin members page) — modifies `admin/members.js` but no test task verifies the XSS fix in this JavaScript file. Same reasoning as TASK-006.
|
||||
|
||||
**Note:** These are warnings, not blocking issues. The project's test infrastructure (`tests/test_web/`) focuses on server-side rendering and API responses. Client-side JavaScript XSS fixes are typically verified through acceptance criteria rather than automated unit tests.
|
||||
|
||||
### Field Validation
|
||||
|
||||
All tasks have valid fields:
|
||||
|
||||
- **Roles:** All `suggested_role` values are valid (`python`, `frontend`, `docs`).
|
||||
- **Complexity:** All `estimated_complexity` values are valid (`small`, `medium`).
|
||||
- **Completeness:** All 12 tasks have all required fields (`id`, `title`, `description`, `requirements`, `dependencies`, `suggested_role`, `acceptance_criteria`, `estimated_complexity`, `files_affected`). All list fields have at least one entry.
|
||||
22
.plans/2026/03/09/01-security-fixes/state.yaml
Normal file
22
.plans/2026/03/09/01-security-fixes/state.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
status: running
|
||||
phase_path: .plans/2026/03/09/01-security-fixes
|
||||
branch: fix/security-fixes
|
||||
current_phase: summary
|
||||
current_task: null
|
||||
fix_round: 0
|
||||
last_review_round: 1
|
||||
review_loop_exit_reason: success
|
||||
quality_gate: pass
|
||||
tasks:
|
||||
TASK-001: completed
|
||||
TASK-002: completed
|
||||
TASK-003: completed
|
||||
TASK-004: completed
|
||||
TASK-005: completed
|
||||
TASK-006: completed
|
||||
TASK-007: completed
|
||||
TASK-008: completed
|
||||
TASK-009: completed
|
||||
TASK-010: completed
|
||||
TASK-011: completed
|
||||
TASK-012: completed
|
||||
117
.plans/2026/03/09/01-security-fixes/summary.md
Normal file
117
.plans/2026/03/09/01-security-fixes/summary.md
Normal file
@@ -0,0 +1,117 @@
|
||||
# Phase Summary
|
||||
|
||||
> Phase: `.plans/2026/03/09/01-security-fixes`
|
||||
> Generated by: `/jp-summary`
|
||||
|
||||
## Project Overview
|
||||
|
||||
This phase addresses CRITICAL and HIGH severity vulnerabilities identified in a security audit of MeshCore Hub. The fixes span stored XSS in server-rendered and client-side code, timing attacks on authentication, proxy header forgery, and a legacy endpoint with missing authentication. All changes are backward-compatible and preserve existing API contracts.
|
||||
|
||||
### Goals
|
||||
|
||||
- Eliminate all CRITICAL and HIGH severity security vulnerabilities found in the audit
|
||||
- Harden API key comparison against timing side-channel attacks
|
||||
- Prevent XSS vectors in both Jinja2 templates and client-side JavaScript
|
||||
- Add configurable proxy trust to defend against header forgery while maintaining backward compatibility
|
||||
- Remove the redundant legacy HTML dashboard endpoint that lacks authentication
|
||||
|
||||
## Task Execution
|
||||
|
||||
### Overview
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total tasks | 12 |
|
||||
| Completed | 12 |
|
||||
| Failed | 0 |
|
||||
| Blocked | 0 |
|
||||
| Skipped | 0 |
|
||||
|
||||
### Task Details
|
||||
|
||||
| ID | Title | Role | Complexity | Status |
|
||||
|---|---|---|---|---|
|
||||
| TASK-001 | Remove legacy HTML dashboard endpoint | python | small | completed |
|
||||
| TASK-002 | Replace API key comparisons with constant-time comparison | python | small | completed |
|
||||
| TASK-003 | Add WEB_TRUSTED_PROXY_HOSTS configuration setting | python | small | completed |
|
||||
| TASK-004 | Integrate trusted proxy hosts into web app middleware and add startup warning | python | medium | completed |
|
||||
| TASK-005 | Escape config JSON in template script block to prevent XSS breakout | python | small | completed |
|
||||
| TASK-006 | Fix stored XSS in admin node-tags page | frontend | medium | completed |
|
||||
| TASK-007 | Fix stored XSS in admin members page | frontend | small | completed |
|
||||
| TASK-008 | Write tests for legacy dashboard endpoint removal | python | small | completed |
|
||||
| TASK-009 | Write tests for constant-time API key comparison | python | small | completed |
|
||||
| TASK-010 | Write tests for trusted proxy hosts configuration and startup warning | python | medium | completed |
|
||||
| TASK-011 | Write tests for config JSON script block escaping | python | small | completed |
|
||||
| TASK-012 | Update documentation for WEB_TRUSTED_PROXY_HOSTS setting | docs | small | completed |
|
||||
|
||||
### Requirement Coverage
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total PRD requirements | 7 |
|
||||
| Requirements covered by completed tasks | 7 |
|
||||
| Requirements with incomplete coverage | 0 |
|
||||
|
||||
All functional requirements (REQ-001 through REQ-005) and non-functional requirements (REQ-006, REQ-007) are fully covered by completed tasks.
|
||||
|
||||
## Files Created and Modified
|
||||
|
||||
### Created
|
||||
|
||||
- `tests/test_web/test_app.py`
|
||||
|
||||
### Modified
|
||||
|
||||
- `src/meshcore_hub/api/routes/dashboard.py`
|
||||
- `src/meshcore_hub/api/auth.py`
|
||||
- `src/meshcore_hub/api/metrics.py`
|
||||
- `src/meshcore_hub/common/config.py`
|
||||
- `src/meshcore_hub/web/app.py`
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js`
|
||||
- `src/meshcore_hub/web/static/js/spa/pages/admin/members.js`
|
||||
- `tests/test_api/test_dashboard.py`
|
||||
- `tests/test_api/test_auth.py`
|
||||
- `tests/test_common/test_config.py`
|
||||
- `README.md`
|
||||
- `AGENTS.md`
|
||||
- `PLAN.md`
|
||||
|
||||
## Review Rounds
|
||||
|
||||
### Overview
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total review rounds | 1 |
|
||||
| Total issues found | 2 |
|
||||
| Issues fixed | 2 |
|
||||
| Issues deferred | 0 |
|
||||
| Issues remaining | 0 |
|
||||
| Regressions introduced | 0 |
|
||||
|
||||
### Round Details
|
||||
|
||||
#### Round 1 (scope: full)
|
||||
|
||||
- **Issues found:** 2 (0 CRITICAL, 0 MAJOR, 2 MINOR)
|
||||
- **Issues fixed:** 2 (both MINOR issues were addressed post-review)
|
||||
- **Exit reason:** success (no CRITICAL or MAJOR issues)
|
||||
|
||||
## Known Issues and Deferred Items
|
||||
|
||||
No known issues. Both MINOR issues identified in the code review were addressed:
|
||||
|
||||
- **ISSUE-001** (MINOR, integration) -- Startup warning for proxy hosts used `settings.web_admin_enabled` instead of the effective admin_enabled value. Fixed by computing `effective_admin` before the warning check.
|
||||
- **ISSUE-002** (MINOR, style) -- `unsafeHTML()` calls on pre-escaped data lacked explanatory comments. Fixed by adding inline HTML comments explaining that dynamic values are pre-escaped.
|
||||
|
||||
## Decisions
|
||||
|
||||
- **Truthiness guards for `hmac.compare_digest()`** -- Added `read_key and ...` / `admin_key and ...` guards in `require_read` because either key can be `None` when only one is configured, and `hmac.compare_digest()` raises `TypeError` on `None` arguments. This ensures the existing behavior of accepting either key type when configured.
|
||||
- **`unsafeHTML()` retained with `escapeHtml()` pre-processing** -- The `unsafeHTML()` calls in admin JS pages were retained because translation strings contain intentional HTML formatting tags (e.g., `<strong>`). API-sourced data is escaped before interpolation, making this pattern safe.
|
||||
- **`innerHTML` retained for tag delete confirmation** -- The delete confirmation in `node-tags.js` uses `innerHTML` because the translation template includes `<span>` formatting. The dynamic tag key is escaped with `escapeHtml()` before interpolation.
|
||||
|
||||
## Suggested Next Steps
|
||||
|
||||
1. Run full manual testing of admin pages (node-tags, members) with XSS payloads to verify fixes in a browser environment.
|
||||
2. Test `WEB_TRUSTED_PROXY_HOSTS` with a real reverse proxy (Traefik/Nginx) to verify proxy header trust restriction works as expected.
|
||||
3. Push commits and create a pull request for merge into `main`.
|
||||
401
.plans/2026/03/09/01-security-fixes/tasks.yaml
Normal file
401
.plans/2026/03/09/01-security-fixes/tasks.yaml
Normal file
@@ -0,0 +1,401 @@
|
||||
# Task list generated from PRD: .plans/2026/03/09/01-security-fixes/prd.md
|
||||
# Generated by: /jp-task-list
|
||||
|
||||
tasks:
|
||||
- id: "TASK-001"
|
||||
title: "Remove legacy HTML dashboard endpoint"
|
||||
description: |
|
||||
Remove the `dashboard()` route handler from `src/meshcore_hub/api/routes/dashboard.py` (lines ~367-536).
|
||||
This handler renders a standalone HTML page using f-string HTML with unescaped database content (stored XSS)
|
||||
and has no authentication. The JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`)
|
||||
must remain intact and unchanged.
|
||||
|
||||
Specifically:
|
||||
1. Delete the `dashboard()` async function and its `@router.get("")` decorator (the handler that returns HTMLResponse).
|
||||
2. Remove the `HTMLResponse` import from `fastapi.responses` if it is no longer used by any remaining route.
|
||||
3. Verify that `GET /api/v1/dashboard/stats`, `/activity`, `/message-activity`, and `/node-count` still function.
|
||||
requirements:
|
||||
- "REQ-001"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "The `dashboard()` route handler is removed from `api/routes/dashboard.py`"
|
||||
- "`HTMLResponse` import is removed if no longer used"
|
||||
- "`GET /api/v1/dashboard/` returns 404 or 405"
|
||||
- "`GET /api/v1/dashboard/stats` returns valid JSON with authentication"
|
||||
- "`GET /api/v1/dashboard/activity` returns valid JSON with authentication"
|
||||
- "`GET /api/v1/dashboard/message-activity` returns valid JSON with authentication"
|
||||
- "`GET /api/v1/dashboard/node-count` returns valid JSON with authentication"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/api/routes/dashboard.py"
|
||||
|
||||
- id: "TASK-002"
|
||||
title: "Replace API key comparisons with constant-time comparison"
|
||||
description: |
|
||||
Replace all Python `==` comparisons of API keys and credentials with `hmac.compare_digest()` to prevent
|
||||
timing side-channel attacks.
|
||||
|
||||
In `src/meshcore_hub/api/auth.py`:
|
||||
1. Add `import hmac` at the top of the file.
|
||||
2. Line ~82 in `require_read`: replace `if token == read_key or token == admin_key:` with
|
||||
`if hmac.compare_digest(token, read_key) or hmac.compare_digest(token, admin_key):`.
|
||||
3. Line ~127 in `require_admin`: replace `if token == admin_key:` with
|
||||
`if hmac.compare_digest(token, admin_key):`.
|
||||
|
||||
In `src/meshcore_hub/api/metrics.py`:
|
||||
1. Add `import hmac` at the top of the file.
|
||||
2. Line ~57: replace `return username == "metrics" and password == read_key` with
|
||||
`return hmac.compare_digest(username, "metrics") and hmac.compare_digest(password, read_key)`.
|
||||
|
||||
Note: `hmac.compare_digest()` requires both arguments to be strings (or both bytes). The existing code
|
||||
already works with strings, so no type conversion is needed.
|
||||
requirements:
|
||||
- "REQ-002"
|
||||
- "REQ-007"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "All API key comparisons in `api/auth.py` use `hmac.compare_digest()`"
|
||||
- "All credential comparisons in `api/metrics.py` use `hmac.compare_digest()`"
|
||||
- "`hmac` is imported in both files"
|
||||
- "Valid API keys are accepted and invalid keys are rejected (no behavior change)"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/api/auth.py"
|
||||
- "src/meshcore_hub/api/metrics.py"
|
||||
|
||||
- id: "TASK-003"
|
||||
title: "Add WEB_TRUSTED_PROXY_HOSTS configuration setting"
|
||||
description: |
|
||||
Add a `web_trusted_proxy_hosts` field to the web settings in `src/meshcore_hub/common/config.py`.
|
||||
|
||||
1. In the `WebSettings` class (or the relevant settings class containing web config), add:
|
||||
```python
|
||||
web_trusted_proxy_hosts: str = Field(default="*", description="Comma-separated list of trusted proxy hosts or '*' for all")
|
||||
```
|
||||
2. The field should accept a string value. The `ProxyHeadersMiddleware` in uvicorn accepts either `"*"` or a list of strings.
|
||||
If the value is `"*"`, pass it directly. Otherwise, split on commas and strip whitespace to produce a list.
|
||||
|
||||
This task only adds the configuration field. The middleware integration and startup warning are in TASK-004.
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "A `web_trusted_proxy_hosts` setting exists in the configuration with default value `*`"
|
||||
- "The setting can be configured via the `WEB_TRUSTED_PROXY_HOSTS` environment variable"
|
||||
- "The setting accepts `*` or a comma-separated list of hostnames/IPs"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/common/config.py"
|
||||
|
||||
- id: "TASK-004"
|
||||
title: "Integrate trusted proxy hosts into web app middleware and add startup warning"
|
||||
description: |
|
||||
Update `src/meshcore_hub/web/app.py` to use the new `WEB_TRUSTED_PROXY_HOSTS` setting and emit a
|
||||
startup warning when using the insecure default.
|
||||
|
||||
1. Find the `ProxyHeadersMiddleware` addition (line ~239):
|
||||
```python
|
||||
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts="*")
|
||||
```
|
||||
Replace the hardcoded `"*"` with the configured value. If the config value is `"*"`, pass `"*"`.
|
||||
Otherwise, split the comma-separated string into a list of strings.
|
||||
|
||||
2. Add a startup warning (in the app factory or lifespan) when `WEB_ADMIN_ENABLED=true` and
|
||||
`WEB_TRUSTED_PROXY_HOSTS` is `"*"`:
|
||||
```python
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
if settings.web_admin_enabled and settings.web_trusted_proxy_hosts == "*":
|
||||
logger.warning(
|
||||
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*' (trust all). "
|
||||
"Consider restricting to your reverse proxy IP for production deployments."
|
||||
)
|
||||
```
|
||||
|
||||
3. Verify that the `_is_authenticated_proxy_request` function still accepts `X-Forwarded-User`,
|
||||
`X-Auth-Request-User`, and `Authorization: Basic` headers — do not modify that function.
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "`ProxyHeadersMiddleware` uses the configured `trusted_hosts` value instead of hardcoded `*`"
|
||||
- "A warning is logged at startup when admin is enabled and trusted hosts is `*`"
|
||||
- "The warning recommends restricting trusted hosts to the proxy IP"
|
||||
- "`_is_authenticated_proxy_request` still accepts all three header types"
|
||||
- "Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP restricts proxy header trust"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/app.py"
|
||||
|
||||
- id: "TASK-005"
|
||||
title: "Escape config JSON in template script block to prevent XSS breakout"
|
||||
description: |
|
||||
Prevent XSS via `</script>` breakout in the config JSON template injection in `src/meshcore_hub/web/app.py`.
|
||||
|
||||
In the `_build_config_json` function (or wherever `config_json` is prepared for the template, around
|
||||
line 183), after calling `json.dumps(config)`, escape `</` sequences:
|
||||
```python
|
||||
config_json = json.dumps(config).replace("</", "<\\/")
|
||||
```
|
||||
|
||||
This prevents a config value containing `</script><script>alert(1)</script>` from breaking out of the
|
||||
`<script>` block in `spa.html` (line ~188: `window.__APP_CONFIG__ = {{ config_json|safe }};`).
|
||||
|
||||
The `|safe` filter in the template remains unchanged — the escaping happens in Python before the value
|
||||
reaches Jinja2. The SPA client-side JavaScript can parse JSON containing `<\/` sequences because this
|
||||
is valid JSON per the spec.
|
||||
requirements:
|
||||
- "REQ-004"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "`config_json` is escaped by replacing `</` with `<\\/` before template rendering"
|
||||
- "The `|safe` filter continues to be used in the template"
|
||||
- "A config value containing `</script><script>alert(1)</script>` does not execute JavaScript"
|
||||
- "The SPA application correctly parses the escaped config JSON"
|
||||
- "Normal config values without special characters render unchanged"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/app.py"
|
||||
|
||||
- id: "TASK-006"
|
||||
title: "Fix stored XSS in admin node-tags page"
|
||||
description: |
|
||||
Sanitize API-sourced data in `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js` to prevent
|
||||
stored XSS.
|
||||
|
||||
Three locations need fixing:
|
||||
|
||||
1. **Line ~243** — `unsafeHTML()` with nodeName in copy-all confirmation:
|
||||
```javascript
|
||||
<p class="mb-4">${unsafeHTML(t('common.copy_all_entity_description', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
|
||||
```
|
||||
Replace `unsafeHTML()` with safe rendering. Either escape `nodeName` with `escapeHtml()` before
|
||||
passing to `t()`, or use `textContent`-based rendering.
|
||||
|
||||
2. **Line ~272** — `unsafeHTML()` with nodeName in delete-all confirmation:
|
||||
```javascript
|
||||
<p class="mb-4">${unsafeHTML(t('common.delete_all_entity_confirm', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
|
||||
```
|
||||
Same fix as above.
|
||||
|
||||
3. **Line ~454** — `innerHTML` with tag key in delete confirmation:
|
||||
```javascript
|
||||
container.querySelector('#delete_tag_confirm_message').innerHTML = confirmMsg;
|
||||
```
|
||||
where `confirmMsg` is built with `activeTagKey` interpolated into an HTML span. Replace `innerHTML`
|
||||
with `textContent`, or escape `activeTagKey` with `escapeHtml()` before interpolation.
|
||||
|
||||
Import `escapeHtml` from `../components.js` if not already imported. The function escapes `<`, `>`,
|
||||
`&`, `"`, and `'` characters using DOM textContent.
|
||||
requirements:
|
||||
- "REQ-005"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "frontend"
|
||||
acceptance_criteria:
|
||||
- "Node names in node-tags.js are escaped before HTML rendering"
|
||||
- "Tag keys in node-tags.js are escaped before HTML rendering"
|
||||
- "All `unsafeHTML()` calls on API-sourced data are replaced with safe alternatives"
|
||||
- "All `innerHTML` assignments of API-sourced data are replaced with safe alternatives"
|
||||
- "A node name containing `<img src=x onerror=alert(1)>` renders as text"
|
||||
- "Normal names without special characters display correctly"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js"
|
||||
|
||||
- id: "TASK-007"
|
||||
title: "Fix stored XSS in admin members page"
|
||||
description: |
|
||||
Sanitize API-sourced data in `src/meshcore_hub/web/static/js/spa/pages/admin/members.js` to prevent
|
||||
stored XSS.
|
||||
|
||||
**Line ~309** — `innerHTML` with memberName in delete confirmation:
|
||||
```javascript
|
||||
container.querySelector('#delete_confirm_message').innerHTML = confirmMsg;
|
||||
```
|
||||
where `confirmMsg` is built from `t('common.delete_entity_confirm', { entity: ..., name: memberName })`.
|
||||
`memberName` comes from `row.dataset.memberName` which is API-sourced data.
|
||||
|
||||
Fix by escaping `memberName` with `escapeHtml()` before passing to `t()`, or replace `innerHTML` with
|
||||
`textContent`.
|
||||
|
||||
Import `escapeHtml` from `../components.js` if not already imported.
|
||||
requirements:
|
||||
- "REQ-005"
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "frontend"
|
||||
acceptance_criteria:
|
||||
- "Member names in members.js are escaped before HTML rendering"
|
||||
- "The `innerHTML` assignment of API-sourced data is replaced with a safe alternative"
|
||||
- "A member name containing `<script>alert(1)</script>` renders as text"
|
||||
- "Normal member names display correctly"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/static/js/spa/pages/admin/members.js"
|
||||
|
||||
- id: "TASK-008"
|
||||
title: "Write tests for legacy dashboard endpoint removal"
|
||||
description: |
|
||||
Add or update tests in `tests/test_api/` to verify that the legacy HTML dashboard endpoint is removed
|
||||
while JSON sub-routes remain functional.
|
||||
|
||||
Tests to add/update:
|
||||
1. `GET /api/v1/dashboard/` returns 404 or 405 (no longer serves HTML).
|
||||
2. `GET /api/v1/dashboard/stats` returns 200 with valid JSON when authenticated.
|
||||
3. `GET /api/v1/dashboard/activity` returns 200 with valid JSON when authenticated.
|
||||
4. `GET /api/v1/dashboard/message-activity` returns 200 with valid JSON when authenticated.
|
||||
5. `GET /api/v1/dashboard/node-count` returns 200 with valid JSON when authenticated.
|
||||
|
||||
Use the existing test fixtures and patterns from `tests/test_api/`. Check `tests/conftest.py` for
|
||||
available fixtures (test client, db session, auth headers).
|
||||
requirements:
|
||||
- "REQ-001"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-001"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Test confirms `GET /api/v1/dashboard/` returns 404 or 405"
|
||||
- "Tests confirm all four JSON sub-routes return valid JSON with authentication"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_api/test_dashboard.py"
|
||||
|
||||
- id: "TASK-009"
|
||||
title: "Write tests for constant-time API key comparison"
|
||||
description: |
|
||||
Add or update tests in `tests/test_api/` to verify that authentication still works correctly after
|
||||
switching to `hmac.compare_digest()`.
|
||||
|
||||
Tests to add/update:
|
||||
1. Valid read key is accepted by read-protected endpoints.
|
||||
2. Valid admin key is accepted by admin-protected endpoints.
|
||||
3. Invalid keys are rejected with 401/403.
|
||||
4. Valid admin key also grants read access.
|
||||
5. Metrics endpoint accepts valid credentials and rejects invalid ones (if metrics auth is testable).
|
||||
|
||||
These tests verify no behavioral regression from the `==` to `hmac.compare_digest()` change.
|
||||
Use existing test patterns and fixtures from `tests/test_api/`.
|
||||
requirements:
|
||||
- "REQ-002"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-002"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Tests confirm valid read key is accepted"
|
||||
- "Tests confirm valid admin key is accepted"
|
||||
- "Tests confirm invalid keys are rejected"
|
||||
- "Tests confirm metrics auth works correctly"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_api/test_auth.py"
|
||||
|
||||
- id: "TASK-010"
|
||||
title: "Write tests for trusted proxy hosts configuration and startup warning"
|
||||
description: |
|
||||
Add tests to verify the `WEB_TRUSTED_PROXY_HOSTS` configuration setting and the startup warning.
|
||||
|
||||
Tests to add:
|
||||
1. Default value of `WEB_TRUSTED_PROXY_HOSTS` is `*`.
|
||||
2. Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP is correctly parsed.
|
||||
3. Setting `WEB_TRUSTED_PROXY_HOSTS` to a comma-separated list is correctly parsed into a list.
|
||||
4. A warning is logged when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS` is `*`.
|
||||
5. No warning is logged when `WEB_TRUSTED_PROXY_HOSTS` is set to a specific value.
|
||||
|
||||
Place config tests in `tests/test_common/` and web app tests in `tests/test_web/`.
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
- "TASK-004"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Tests confirm default value is `*`"
|
||||
- "Tests confirm specific IP/list parsing works"
|
||||
- "Tests confirm startup warning is emitted with wildcard default"
|
||||
- "Tests confirm no warning when specific hosts are configured"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "tests/test_common/test_config.py"
|
||||
- "tests/test_web/test_app.py"
|
||||
|
||||
- id: "TASK-011"
|
||||
title: "Write tests for config JSON script block escaping"
|
||||
description: |
|
||||
Add tests in `tests/test_web/` to verify that the config JSON escaping prevents XSS breakout.
|
||||
|
||||
Tests to add:
|
||||
1. A config value containing `</script><script>alert(1)</script>` is escaped to `<\/script>...` in
|
||||
the rendered HTML.
|
||||
2. A config value without special characters renders unchanged.
|
||||
3. The escaped JSON is still valid and parseable by `json.loads()` (after un-escaping `<\/` back to `</`
|
||||
if needed, though `json.loads` handles `<\/` fine).
|
||||
|
||||
Test by calling the config JSON builder function directly or by checking the rendered template output.
|
||||
requirements:
|
||||
- "REQ-004"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-005"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Test confirms `</script>` in config values is escaped to `<\\/script>`"
|
||||
- "Test confirms normal config values are unaffected"
|
||||
- "Test confirms escaped JSON is still valid and parseable"
|
||||
- "All tests pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_web/test_app.py"
|
||||
|
||||
- id: "TASK-012"
|
||||
title: "Update documentation for WEB_TRUSTED_PROXY_HOSTS setting"
|
||||
description: |
|
||||
Update project documentation to document the new `WEB_TRUSTED_PROXY_HOSTS` environment variable.
|
||||
|
||||
Files to update:
|
||||
|
||||
1. **README.md** — Add `WEB_TRUSTED_PROXY_HOSTS` to the environment variables table with description:
|
||||
"Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts).
|
||||
Recommended: set to your reverse proxy IP in production."
|
||||
|
||||
2. **AGENTS.md** — Add `WEB_TRUSTED_PROXY_HOSTS` to the Environment Variables section with the same description.
|
||||
|
||||
3. **PLAN.md** — If there is a configuration section, add the new variable there as well.
|
||||
|
||||
Ensure the documentation notes:
|
||||
- Default is `*` for backward compatibility
|
||||
- A startup warning is emitted when using the default with admin enabled
|
||||
- Operators should set this to their reverse proxy IP in production
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-006"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
- "TASK-004"
|
||||
suggested_role: "docs"
|
||||
acceptance_criteria:
|
||||
- "`WEB_TRUSTED_PROXY_HOSTS` is documented in README.md"
|
||||
- "`WEB_TRUSTED_PROXY_HOSTS` is documented in AGENTS.md"
|
||||
- "Documentation notes the default value, startup warning, and production recommendation"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "README.md"
|
||||
- "AGENTS.md"
|
||||
- "PLAN.md"
|
||||
81
.plans/2026/03/17/01-multibyte-support/changelog.md
Normal file
81
.plans/2026/03/17/01-multibyte-support/changelog.md
Normal file
@@ -0,0 +1,81 @@
|
||||
## TASK-001: Verify meshcore_py v2.3.0+ backwards compatibility
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
_(none)_
|
||||
### Notes
|
||||
Research-only task. meshcore_py v2.3.0 handles multibyte path hashes transparently at the protocol level. Path hash size is self-describing in the wire format (upper 2 bits of path length byte encode hash size). The interface receiver, sender, and device wrapper pass event payloads through without manipulation, so no code changes are needed. pyproject.toml dependency confirmed at meshcore>=2.3.0.
|
||||
---
|
||||
|
||||
## TASK-002: Update _normalize_hash_list to accept variable-length hex strings
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/collector/letsmesh_normalizer.py`
|
||||
### Notes
|
||||
Changed length validation from `if len(token) != 2` to `if len(token) < 2 or len(token) % 2 != 0`. Updated docstring to describe variable-length hex hash support. Existing hex validation and uppercase normalization unchanged. All 98 collector tests pass.
|
||||
---
|
||||
|
||||
## TASK-003: Update Pydantic schema descriptions for path_hashes fields
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
- `src/meshcore_hub/common/schemas/events.py`
|
||||
- `src/meshcore_hub/common/schemas/messages.py`
|
||||
- `src/meshcore_hub/common/models/trace_path.py`
|
||||
### Notes
|
||||
Updated TraceDataEvent.path_hashes, TracePathRead.path_hashes, and TracePath model docstring to reflect variable-length hex strings. No Pydantic validators needed changes - both schemas use Optional[list[str]] with no per-element length constraints.
|
||||
---
|
||||
|
||||
## TASK-004: Update SCHEMAS.md documentation for multibyte path hashes
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
- `SCHEMAS.md`
|
||||
### Notes
|
||||
Updated path_hashes field description from "2-character" to variable-length hex. Updated example to include mixed-length hashes ["4a", "b3fa", "02"]. Added firmware v1.14 compatibility note.
|
||||
---
|
||||
|
||||
## TASK-008: Verify web dashboard trace path display handles variable-length hashes
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
_(none)_
|
||||
### Notes
|
||||
Verification-only task. The web dashboard SPA has no trace path page and no JavaScript/CSS code referencing path_hash or pathHash. Trace path data is only served by the REST API which returns path_hashes as list[str] with no length constraints. No changes needed.
|
||||
---
|
||||
|
||||
## TASK-005: Write tests for multibyte path hash normalizer
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
- `tests/test_collector/test_letsmesh_normalizer.py`
|
||||
### Files Modified
|
||||
- `tests/test_collector/test_subscriber.py`
|
||||
### Notes
|
||||
Created 12 unit tests for _normalize_hash_list covering all 7 required scenarios plus edge cases. Added 2 integration tests to test_subscriber.py verifying multibyte path hashes flow through the full collector pipeline. All 35 collector tests pass.
|
||||
---
|
||||
|
||||
## TASK-006: Write tests for database round-trip of multibyte path hashes
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
- `tests/test_common/test_models.py`
|
||||
### Notes
|
||||
Added 2 new test methods to TestTracePathModel: test_multibyte_path_hashes_round_trip and test_mixed_length_path_hashes_round_trip. Verified JSON column handles variable-length strings natively. All 10 model tests pass. No Alembic migration needed.
|
||||
---
|
||||
|
||||
## TASK-007: Write tests for API trace path responses with multibyte hashes
|
||||
**Status:** completed
|
||||
### Files Created
|
||||
_(none)_
|
||||
### Files Modified
|
||||
- `tests/test_api/test_trace_paths.py`
|
||||
### Notes
|
||||
Added TestMultibytePathHashes class with 2 tests: list endpoint with multibyte hashes and detail endpoint with mixed-length hashes. All 9 API trace path tests pass.
|
||||
---
|
||||
146
.plans/2026/03/17/01-multibyte-support/prd.md
Normal file
146
.plans/2026/03/17/01-multibyte-support/prd.md
Normal file
@@ -0,0 +1,146 @@
|
||||
# Product Requirements Document
|
||||
|
||||
> Source: `.plans/2026/03/17/01-multibyte-support/prompt.md`
|
||||
|
||||
## Project Overview
|
||||
|
||||
MeshCore Hub must be updated to support multibyte path hashes introduced in MeshCore firmware v1.14 and the meshcore_py v2.3.0 Python bindings. Path hashes — node identifiers embedded in trace and route data — were previously fixed at 1 byte (2 hex characters) per hop but can now be multiple bytes, allowing longer repeater IDs at the cost of reduced maximum hops. The update must maintain backwards compatibility with nodes running older single-byte firmware.
|
||||
|
||||
## Goals
|
||||
|
||||
- Support variable-length (multibyte) path hashes throughout the data pipeline: interface → MQTT → collector → database → API → web dashboard.
|
||||
- Maintain backwards compatibility so single-byte path hashes from older firmware nodes continue to work without modification.
|
||||
- Update documentation and schemas to accurately describe the new variable-length path hash format.
|
||||
|
||||
## Functional Requirements
|
||||
|
||||
### REQ-001: Accept Variable-Length Path Hashes in Collector
|
||||
|
||||
**Description:** The collector's event handlers and normalizer must accept path hash strings of any even length (not just 2-character strings). Path hashes arriving from both the meshcore_py interface and LetsMesh-compatible ingest must be processed correctly regardless of byte length.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] Path hashes with 2-character values (legacy single-byte) are accepted and stored correctly
|
||||
- [ ] Path hashes with 4+ character values (multibyte) are accepted and stored correctly
|
||||
- [ ] Mixed-length path hash arrays (e.g. `["4a", "b3fa", "02"]`) are accepted when the mesh contains nodes with different firmware versions
|
||||
- [ ] The LetsMesh normalizer handles multibyte `pathHashes` values from decoded payloads
|
||||
|
||||
### REQ-002: Update Pydantic Schema Validation for Path Hashes
|
||||
|
||||
**Description:** The `path_hashes` field in event and message Pydantic schemas currently describes values as "2-character node hash identifiers". The schema description and any validation constraints must be updated to permit variable-length hex strings.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] `TraceDataEvent.path_hashes` field description reflects variable-length hex strings
|
||||
- [ ] `MessageEventBase.path_hashes` field description reflects variable-length hex strings (if applicable)
|
||||
- [ ] No schema validation rejects path hash strings longer than 2 characters
|
||||
|
||||
### REQ-003: Verify Database Storage Compatibility
|
||||
|
||||
**Description:** The `path_hashes` column on the `trace_paths` table uses a JSON column type. Confirm that variable-length path hash strings are stored and retrieved correctly without requiring a schema migration.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] Multibyte path hash arrays are round-tripped correctly through SQLAlchemy JSON column (store and retrieve)
|
||||
- [ ] No Alembic migration is required (JSON column already supports arbitrary string lengths)
|
||||
|
||||
### REQ-004: Update API Responses for Variable-Length Path Hashes
|
||||
|
||||
**Description:** The trace paths API must return multibyte path hashes faithfully. API response schemas and any serialization logic must not truncate or assume a fixed length.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] `GET /trace-paths` returns multibyte path hash arrays as-is from the database
|
||||
- [ ] `GET /trace-paths/{id}` returns multibyte path hash arrays as-is from the database
|
||||
- [ ] API response examples in documentation reflect variable-length path hashes
|
||||
|
||||
### REQ-005: Update Web Dashboard Trace/Path Display
|
||||
|
||||
**Description:** If the web dashboard displays path hashes (e.g. in trace path views), the rendering must handle variable-length strings without layout breakage or truncation.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] Trace path views display multibyte path hashes correctly
|
||||
- [ ] No fixed-width formatting assumes 2-character hash strings
|
||||
|
||||
### REQ-006: Verify meshcore_py Library Compatibility
|
||||
|
||||
**Description:** Confirm that the meshcore_py v2.3.0+ library handles backwards compatibility with single-byte firmware nodes transparently, so that MeshCore Hub does not need to implement compatibility logic itself.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] meshcore_py v2.3.0+ is confirmed to handle mixed single-byte and multibyte path hashes at the protocol level
|
||||
- [ ] The interface receiver and sender components work with the updated library without code changes beyond the dependency version bump (or with minimal changes if the library API changed)
|
||||
|
||||
## Non-Functional Requirements
|
||||
|
||||
### REQ-007: Backwards Compatibility
|
||||
|
||||
**Category:** Reliability
|
||||
|
||||
**Description:** The system must continue to operate correctly when receiving events from nodes running older (single-byte) firmware. No data loss or processing errors may occur for legacy path hash formats.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] Existing test cases with 2-character path hashes continue to pass without modification
|
||||
- [ ] New test cases with multibyte path hashes pass alongside legacy test cases
|
||||
- [ ] No database migration is required that would break rollback to the previous version
|
||||
|
||||
### REQ-008: Documentation Accuracy
|
||||
|
||||
**Category:** Maintainability
|
||||
|
||||
**Description:** All documentation referencing path hash format must be updated to reflect the variable-length nature of multibyte path hashes.
|
||||
|
||||
**Acceptance Criteria:**
|
||||
|
||||
- [ ] `SCHEMAS.md` path hash descriptions updated from "2-character" to "variable-length hex string"
|
||||
- [ ] Code docstrings and field descriptions in models/schemas updated
|
||||
- [ ] Example payloads in documentation include at least one multibyte path hash example
|
||||
|
||||
## Technical Constraints and Assumptions
|
||||
|
||||
### Constraints
|
||||
|
||||
- Python 3.13+ (specified by project)
|
||||
- meshcore_py >= 2.3.0 (already set in `pyproject.toml`)
|
||||
- SQLite with JSON column for path hash storage (existing schema)
|
||||
- No breaking changes to the REST API response format
|
||||
|
||||
### Assumptions
|
||||
|
||||
- The meshcore_py library handles protocol-level backwards compatibility for multibyte path hashes, so MeshCore Hub only needs to ensure its data pipeline accepts variable-length strings
|
||||
- Path hashes are always valid hex strings (even number of characters)
|
||||
- The JSON column type in SQLite/SQLAlchemy does not impose length restrictions on individual array element strings
|
||||
- The `pyproject.toml` dependency has already been bumped to `meshcore>=2.3.0`
|
||||
|
||||
## Scope
|
||||
|
||||
### In Scope
|
||||
|
||||
- Updating Pydantic schema descriptions and validation for variable-length path hashes
|
||||
- Updating collector handlers and normalizer for multibyte path hashes
|
||||
- Verifying database storage compatibility (no migration expected)
|
||||
- Verifying API response compatibility
|
||||
- Updating web dashboard path hash display if applicable
|
||||
- Updating `SCHEMAS.md` and code documentation
|
||||
- Adding/updating tests for multibyte path hashes
|
||||
- Confirming meshcore_py library handles backwards compatibility
|
||||
|
||||
### Out of Scope
|
||||
|
||||
- MeshCore firmware changes or device-side configuration
|
||||
- Adding UI controls for selecting single-byte vs. multibyte mode
|
||||
- Performance optimization of path hash processing
|
||||
- Changes to MQTT topic structure or message format
|
||||
- LetsMesh ingest protocol changes (beyond accepting multibyte values that LetsMesh already provides)
|
||||
|
||||
## Suggested Tech Stack
|
||||
|
||||
| Layer | Technology | Rationale |
|
||||
|-------|-----------|-----------|
|
||||
| MeshCore bindings | meshcore_py >= 2.3.0 | Specified by prompt; provides multibyte path hash support |
|
||||
| Validation | Pydantic v2 | Existing stack — schema descriptions updated |
|
||||
| Database | SQLAlchemy 2.0 + SQLite JSON | Existing stack — no migration needed |
|
||||
| API | FastAPI | Existing stack — no changes to framework |
|
||||
| Testing | pytest + pytest-asyncio | Existing stack — new test cases for multibyte |
|
||||
17
.plans/2026/03/17/01-multibyte-support/prompt.md
Normal file
17
.plans/2026/03/17/01-multibyte-support/prompt.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Phase: 01-multibyte-support
|
||||
|
||||
## Overview
|
||||
|
||||
The latest MeshCore firmware (v1.14) has introduced multibyte support for multi-byte path hashes. The latest version of the MeshCore Python bindings (meshcore_py) has been updated to use this. This allows longer repeater IDs per hop, but reduces the maximum allowed hops. Nodes running older firmware only support 1-byte path hashes and will not receive messages if other nodes use multibyte path hashes.
|
||||
|
||||
## Goals
|
||||
|
||||
* Update Receiver/Sender component to use latest version of MeshCore Python bindings that support multibyte path hash handling.
|
||||
|
||||
## Requirements
|
||||
|
||||
* Must remain backwards compatible with previous version. Confirm whether this is handled by the Python library.
|
||||
|
||||
## References
|
||||
|
||||
* https://github.com/meshcore-dev/meshcore_py/releases/tag/v2.3.0
|
||||
@@ -0,0 +1,19 @@
|
||||
# Code review round 001
|
||||
# Phase: .plans/2026/03/17/01-multibyte-support
|
||||
# Scope: full
|
||||
# Generated by: /jp-codereview
|
||||
|
||||
issues: []
|
||||
|
||||
summary:
|
||||
total_issues: 0
|
||||
critical: 0
|
||||
major: 0
|
||||
minor: 0
|
||||
by_category:
|
||||
integration: 0
|
||||
architecture: 0
|
||||
security: 0
|
||||
duplication: 0
|
||||
error-handling: 0
|
||||
style: 0
|
||||
57
.plans/2026/03/17/01-multibyte-support/reviews/prd.md
Normal file
57
.plans/2026/03/17/01-multibyte-support/reviews/prd.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# PRD Review
|
||||
|
||||
> Phase: `.plans/2026/03/17/01-multibyte-support`
|
||||
> PRD: `.plans/2026/03/17/01-multibyte-support/prd.md`
|
||||
> Prompt: `.plans/2026/03/17/01-multibyte-support/prompt.md`
|
||||
|
||||
## Verdict: PASS
|
||||
|
||||
The PRD comprehensively addresses the narrow scope of the original prompt. All prompt items are covered by specific requirements with testable acceptance criteria. The PRD appropriately expands the prompt's Receiver/Sender focus to cover the full data pipeline (collector, schemas, database, API, web), which is necessary for end-to-end multibyte support. No contradictions, feasibility concerns, or scope inconsistencies were found.
|
||||
|
||||
## Coverage Assessment
|
||||
|
||||
| Prompt Item | PRD Section | Covered? | Notes |
|
||||
|---|---|---|---|
|
||||
| Update Receiver/Sender to use latest meshcore_py with multibyte support | REQ-006 | Yes | Covered by library compatibility verification; receiver/sender work with updated bindings |
|
||||
| Must remain backwards compatible with previous version | REQ-007 | Yes | Explicit non-functional requirement with 3 testable acceptance criteria |
|
||||
| Confirm whether backwards compat is handled by the Python library | REQ-006 | Yes | First AC specifically calls for confirming library-level protocol compatibility |
|
||||
| Reference to meshcore_py v2.3.0 release | Constraints, Tech Stack | Yes | Noted in constraints and suggested tech stack table |
|
||||
|
||||
**Coverage summary:** 4 of 4 prompt items fully covered, 0 partially covered, 0 not covered.
|
||||
|
||||
## Requirement Evaluation
|
||||
|
||||
All requirements passed evaluation. Minor observations:
|
||||
|
||||
### REQ-006: Verify meshcore_py Library Compatibility
|
||||
|
||||
- **Implementability:** Pass
|
||||
- **Testability:** Pass -- though the first AC ("confirmed to handle...at the protocol level") is a verification/research task rather than an automated test, this is appropriate given the prompt explicitly asks to confirm library behavior
|
||||
- **Completeness:** Pass
|
||||
- **Consistency:** Pass
|
||||
|
||||
## Structural Issues
|
||||
|
||||
### Contradictions
|
||||
|
||||
None found.
|
||||
|
||||
### Ambiguities
|
||||
|
||||
None found. The PRD is appropriately specific for the scope of work.
|
||||
|
||||
### Missing Edge Cases
|
||||
|
||||
None significant. The PRD covers the key edge case of mixed-length path hash arrays from heterogeneous firmware networks (REQ-001 AC3).
|
||||
|
||||
### Feasibility Concerns
|
||||
|
||||
None. The changes are primarily documentation/description updates and verification tasks. The JSON column type inherently supports variable-length strings, and the meshcore_py dependency is already bumped.
|
||||
|
||||
### Scope Inconsistencies
|
||||
|
||||
None. The PRD's scope appropriately extends beyond the prompt's Receiver/Sender focus to cover downstream components (collector, API, web) that also handle path hashes. This is a necessary expansion, not scope creep.
|
||||
|
||||
## Action Items
|
||||
|
||||
No action items -- verdict is PASS.
|
||||
89
.plans/2026/03/17/01-multibyte-support/reviews/tasks.md
Normal file
89
.plans/2026/03/17/01-multibyte-support/reviews/tasks.md
Normal file
@@ -0,0 +1,89 @@
|
||||
# Task Review
|
||||
|
||||
> Phase: `.plans/2026/03/17/01-multibyte-support`
|
||||
> Tasks: `.plans/2026/03/17/01-multibyte-support/tasks.yaml`
|
||||
> PRD: `.plans/2026/03/17/01-multibyte-support/prd.md`
|
||||
|
||||
## Verdict: PASS
|
||||
|
||||
The task list is structurally sound, correctly ordered, and fully covers all 8 PRD requirements. The dependency graph is a valid DAG with no cycles or invalid references. No ordering issues were found — no task references files that should be produced by a task outside its dependency chain. All tasks have valid roles, complexity values, and complete fields. The task breakdown is appropriate for the narrow scope of this phase.
|
||||
|
||||
## Dependency Validation
|
||||
|
||||
### Reference Validity
|
||||
|
||||
All dependency references are valid. Every task ID in every `dependencies` list corresponds to an existing task in the inventory.
|
||||
|
||||
### DAG Validation
|
||||
|
||||
The dependency graph is a valid DAG with no cycles. Maximum dependency depth is 1 (two test tasks depend on one implementation task each).
|
||||
|
||||
### Orphan Tasks
|
||||
|
||||
The following tasks are never referenced as dependencies by other tasks:
|
||||
|
||||
- **TASK-001** (Verify meshcore_py compatibility) — terminal verification task, expected
|
||||
- **TASK-004** (Update SCHEMAS.md) — terminal documentation task, expected
|
||||
- **TASK-005** (Tests for normalizer) — terminal test task, expected
|
||||
- **TASK-006** (Tests for DB round-trip) — terminal test task, expected
|
||||
- **TASK-007** (Tests for API responses) — terminal test task, expected
|
||||
- **TASK-008** (Verify web dashboard) — terminal verification task, expected
|
||||
|
||||
All orphan tasks are leaf nodes (tests, docs, or verification tasks). No missing integration points.
|
||||
|
||||
## Ordering Check
|
||||
|
||||
No ordering issues detected. No task modifies a file that is also modified by another task outside its dependency chain. The `files_affected` sets across all tasks are disjoint except where proper dependency relationships exist.
|
||||
|
||||
## Coverage Check
|
||||
|
||||
### Uncovered Requirements
|
||||
|
||||
All PRD requirements are covered.
|
||||
|
||||
### Phantom References
|
||||
|
||||
No phantom references detected. Every requirement ID referenced in tasks exists in the PRD.
|
||||
|
||||
**Coverage summary:** 8 of 8 PRD requirements covered by tasks.
|
||||
|
||||
| Requirement | Covered By |
|
||||
|---|---|
|
||||
| REQ-001 | TASK-002, TASK-005 |
|
||||
| REQ-002 | TASK-003 |
|
||||
| REQ-003 | TASK-006 |
|
||||
| REQ-004 | TASK-007 |
|
||||
| REQ-005 | TASK-008 |
|
||||
| REQ-006 | TASK-001 |
|
||||
| REQ-007 | TASK-005, TASK-006, TASK-007 |
|
||||
| REQ-008 | TASK-004 |
|
||||
|
||||
## Scope Check
|
||||
|
||||
### Tasks Too Large
|
||||
|
||||
No tasks flagged as too large. All tasks are `small` complexity except TASK-005 (`medium`), which is appropriately scoped for a test suite covering 7 unit test scenarios plus an integration test.
|
||||
|
||||
### Tasks Too Vague
|
||||
|
||||
No tasks flagged as too vague. All tasks have detailed descriptions (well over 50 characters), multiple testable acceptance criteria, and specific file paths.
|
||||
|
||||
### Missing Test Tasks
|
||||
|
||||
- **TASK-001** (Verify meshcore_py compatibility) — no associated test task. This is a research/verification task that does not produce source code, so a test task is not applicable. (Warning only)
|
||||
- **TASK-004** (Update SCHEMAS.md) — no associated test task. This is a documentation-only task. (Warning only)
|
||||
- **TASK-008** (Verify web dashboard) — no associated test task. This is a verification task that may result in no code changes. (Warning only)
|
||||
|
||||
All implementation tasks that modify source code (TASK-002, TASK-003) have corresponding test tasks (TASK-005, TASK-006, TASK-007).
|
||||
|
||||
### Field Validation
|
||||
|
||||
All tasks have valid fields:
|
||||
- All `suggested_role` values are valid (`python`, `docs`, `frontend`)
|
||||
- All `estimated_complexity` values are valid (`small`, `medium`)
|
||||
- All tasks have at least one entry in `requirements`, `acceptance_criteria`, and `files_affected`
|
||||
- All task IDs follow the `TASK-NNN` format with sequential numbering
|
||||
|
||||
## Action Items
|
||||
|
||||
No action items — verdict is PASS.
|
||||
18
.plans/2026/03/17/01-multibyte-support/state.yaml
Normal file
18
.plans/2026/03/17/01-multibyte-support/state.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
status: completed
|
||||
phase_path: .plans/2026/03/17/01-multibyte-support
|
||||
branch: feature/multibyte-support
|
||||
current_phase: completed
|
||||
current_task: null
|
||||
fix_round: 0
|
||||
last_review_round: 1
|
||||
review_loop_exit_reason: success
|
||||
quality_gate: pass
|
||||
tasks:
|
||||
TASK-001: completed
|
||||
TASK-002: completed
|
||||
TASK-003: completed
|
||||
TASK-004: completed
|
||||
TASK-005: completed
|
||||
TASK-006: completed
|
||||
TASK-007: completed
|
||||
TASK-008: completed
|
||||
102
.plans/2026/03/17/01-multibyte-support/summary.md
Normal file
102
.plans/2026/03/17/01-multibyte-support/summary.md
Normal file
@@ -0,0 +1,102 @@
|
||||
# Phase Summary
|
||||
|
||||
> Phase: `.plans/2026/03/17/01-multibyte-support`
|
||||
> Generated by: `/jp-summary`
|
||||
|
||||
## Project Overview
|
||||
|
||||
MeshCore Hub was updated to support multibyte path hashes introduced in MeshCore firmware v1.14 and meshcore_py v2.3.0. Path hashes — node identifiers embedded in trace and route data — were previously fixed at 1 byte (2 hex characters) per hop but can now be multiple bytes. The update maintains backwards compatibility with nodes running older single-byte firmware.
|
||||
|
||||
### Goals
|
||||
|
||||
- Support variable-length (multibyte) path hashes throughout the data pipeline: interface → MQTT → collector → database → API → web dashboard.
|
||||
- Maintain backwards compatibility so single-byte path hashes from older firmware nodes continue to work without modification.
|
||||
- Update documentation and schemas to accurately describe the new variable-length path hash format.
|
||||
|
||||
## Task Execution
|
||||
|
||||
### Overview
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total tasks | 8 |
|
||||
| Completed | 8 |
|
||||
| Failed | 0 |
|
||||
| Blocked | 0 |
|
||||
| Skipped | 0 |
|
||||
|
||||
### Task Details
|
||||
|
||||
| ID | Title | Role | Complexity | Status |
|
||||
|---|---|---|---|---|
|
||||
| TASK-001 | Verify meshcore_py v2.3.0+ backwards compatibility | python | small | completed |
|
||||
| TASK-002 | Update _normalize_hash_list to accept variable-length hex strings | python | small | completed |
|
||||
| TASK-003 | Update Pydantic schema descriptions for path_hashes fields | python | small | completed |
|
||||
| TASK-004 | Update SCHEMAS.md documentation for multibyte path hashes | docs | small | completed |
|
||||
| TASK-005 | Write tests for multibyte path hash normalizer | python | medium | completed |
|
||||
| TASK-006 | Write tests for database round-trip of multibyte path hashes | python | small | completed |
|
||||
| TASK-007 | Write tests for API trace path responses with multibyte hashes | python | small | completed |
|
||||
| TASK-008 | Verify web dashboard trace path display handles variable-length hashes | frontend | small | completed |
|
||||
|
||||
### Requirement Coverage
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total PRD requirements | 8 |
|
||||
| Requirements covered by completed tasks | 8 |
|
||||
| Requirements with incomplete coverage | 0 |
|
||||
|
||||
## Files Created and Modified
|
||||
|
||||
### Created
|
||||
|
||||
- `tests/test_collector/test_letsmesh_normalizer.py`
|
||||
|
||||
### Modified
|
||||
|
||||
- `pyproject.toml`
|
||||
- `SCHEMAS.md`
|
||||
- `src/meshcore_hub/collector/letsmesh_normalizer.py`
|
||||
- `src/meshcore_hub/common/schemas/events.py`
|
||||
- `src/meshcore_hub/common/schemas/messages.py`
|
||||
- `src/meshcore_hub/common/models/trace_path.py`
|
||||
- `tests/test_collector/test_subscriber.py`
|
||||
- `tests/test_common/test_models.py`
|
||||
- `tests/test_api/test_trace_paths.py`
|
||||
|
||||
## Review Rounds
|
||||
|
||||
### Overview
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| Total review rounds | 1 |
|
||||
| Total issues found | 0 |
|
||||
| Issues fixed | 0 |
|
||||
| Issues deferred | 0 |
|
||||
| Issues remaining | 0 |
|
||||
| Regressions introduced | 0 |
|
||||
|
||||
### Round Details
|
||||
|
||||
#### Round 1 (scope: full)
|
||||
|
||||
- **Issues found:** 0 (0 CRITICAL, 0 MAJOR, 0 MINOR)
|
||||
- **Exit reason:** success (clean review, no fix rounds needed)
|
||||
|
||||
## Known Issues and Deferred Items
|
||||
|
||||
No known issues.
|
||||
|
||||
## Decisions
|
||||
|
||||
- **meshcore_py handles backwards compatibility transparently** -- Research (TASK-001) confirmed that meshcore_py v2.3.0 handles multibyte path hashes at the protocol level via self-describing wire format. No compatibility logic needed in MeshCore Hub's interface layer.
|
||||
- **No database migration required** -- The existing JSON column type on `trace_paths.path_hashes` stores variable-length string arrays natively. Round-trip tests confirmed no data loss.
|
||||
- **No web dashboard changes needed** -- The SPA has no trace path rendering page. Path hashes are only served via the REST API which uses `list[str]` with no length constraints.
|
||||
- **Normalizer validation approach** -- Changed from exact length check (`len == 2`) to even-length minimum-2 check (`len >= 2 and len % 2 == 0`), preserving existing hex validation and uppercase normalization.
|
||||
|
||||
## Suggested Next Steps
|
||||
|
||||
1. Push the branch and create a pull request for review.
|
||||
2. Perform manual integration testing with a MeshCore device running firmware v1.14+ to verify multibyte path hashes flow end-to-end.
|
||||
3. Verify that mixed-firmware networks (some nodes v1.14+, some older) produce correct mixed-length path hash arrays in the database.
|
||||
274
.plans/2026/03/17/01-multibyte-support/tasks.yaml
Normal file
274
.plans/2026/03/17/01-multibyte-support/tasks.yaml
Normal file
@@ -0,0 +1,274 @@
|
||||
# Task list generated from PRD: .plans/2026/03/17/01-multibyte-support/prd.md
|
||||
# Generated by: /jp-task-list
|
||||
|
||||
tasks:
|
||||
- id: "TASK-001"
|
||||
title: "Verify meshcore_py v2.3.0+ backwards compatibility"
|
||||
description: |
|
||||
Research and confirm that meshcore_py v2.3.0+ handles backwards compatibility
|
||||
with single-byte firmware nodes at the protocol level. Check the meshcore_py
|
||||
v2.3.0 release notes and source code to determine whether the library
|
||||
transparently handles mixed single-byte and multibyte path hashes, or whether
|
||||
MeshCore Hub needs to implement any compatibility logic.
|
||||
|
||||
The pyproject.toml dependency is already set to meshcore>=2.3.0. Verify the
|
||||
interface receiver (src/meshcore_hub/interface/receiver.py) and sender
|
||||
(src/meshcore_hub/interface/sender.py) components work with the updated library
|
||||
without code changes, or document any API changes that require updates.
|
||||
|
||||
Document findings as a comment block at the top of the PR description or in
|
||||
the phase changelog.
|
||||
requirements:
|
||||
- "REQ-006"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "meshcore_py v2.3.0+ backwards compatibility behaviour is documented"
|
||||
- "Any required interface code changes are identified (or confirmed unnecessary)"
|
||||
- "pyproject.toml dependency version is confirmed correct at >=2.3.0"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "pyproject.toml"
|
||||
|
||||
- id: "TASK-002"
|
||||
title: "Update _normalize_hash_list to accept variable-length hex strings"
|
||||
description: |
|
||||
The LetsMesh normalizer method `_normalize_hash_list` in
|
||||
src/meshcore_hub/collector/letsmesh_normalizer.py (line ~724) currently rejects
|
||||
any path hash string that is not exactly 2 characters long:
|
||||
|
||||
if len(token) != 2:
|
||||
continue
|
||||
|
||||
Update this method to accept variable-length hex strings (any even-length hex
|
||||
string of 2+ characters). The validation should:
|
||||
- Accept strings of length 2, 4, 6, etc. (even-length, minimum 2)
|
||||
- Reject odd-length strings and empty strings
|
||||
- Continue to validate that all characters are valid hexadecimal (0-9, A-F)
|
||||
- Continue to uppercase-normalize the hex strings
|
||||
|
||||
Also update the method's docstring from "Normalize a list of one-byte hash
|
||||
strings" to reflect variable-length support.
|
||||
requirements:
|
||||
- "REQ-001"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "_normalize_hash_list accepts 2-character hex strings (legacy single-byte)"
|
||||
- "_normalize_hash_list accepts 4+ character hex strings (multibyte)"
|
||||
- "_normalize_hash_list rejects odd-length strings"
|
||||
- "_normalize_hash_list rejects non-hex characters"
|
||||
- "_normalize_hash_list uppercases all hex strings"
|
||||
- "Method docstring updated to describe variable-length support"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/collector/letsmesh_normalizer.py"
|
||||
|
||||
- id: "TASK-003"
|
||||
done: true
|
||||
title: "Update Pydantic schema descriptions for path_hashes fields"
|
||||
description: |
|
||||
Update the `path_hashes` field description in Pydantic schemas to reflect
|
||||
variable-length hex strings instead of fixed 2-character strings.
|
||||
|
||||
Files and fields to update:
|
||||
|
||||
1. src/meshcore_hub/common/schemas/events.py - TraceDataEvent.path_hashes
|
||||
(line ~134): Change description from "Array of 2-character node hash
|
||||
identifiers" to "Array of hex-encoded node hash identifiers (variable
|
||||
length, e.g. '4a' for single-byte or 'b3fa' for multibyte)"
|
||||
|
||||
2. src/meshcore_hub/common/schemas/messages.py - MessageEventBase.path_hashes
|
||||
or TracePathRead.path_hashes (line ~157): Update description similarly
|
||||
if it references fixed-length hashes.
|
||||
|
||||
3. src/meshcore_hub/common/models/trace_path.py - TracePath.path_hashes
|
||||
docstring (line ~23): Change "JSON array of node hash identifiers" to
|
||||
"JSON array of hex-encoded node hash identifiers (variable length)"
|
||||
|
||||
Ensure no Pydantic validators or Field constraints reject strings longer
|
||||
than 2 characters. The current schemas use Optional[list[str]] with no
|
||||
per-element length validation, so no validator changes should be needed.
|
||||
requirements:
|
||||
- "REQ-002"
|
||||
dependencies: []
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "TraceDataEvent.path_hashes description reflects variable-length hex strings"
|
||||
- "TracePathRead.path_hashes description reflects variable-length hex strings"
|
||||
- "TracePath model docstring updated for variable-length path hashes"
|
||||
- "No Pydantic validation rejects path hash strings longer than 2 characters"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/common/schemas/events.py"
|
||||
- "src/meshcore_hub/common/schemas/messages.py"
|
||||
- "src/meshcore_hub/common/models/trace_path.py"
|
||||
|
||||
- id: "TASK-004"
|
||||
title: "Update SCHEMAS.md documentation for multibyte path hashes"
|
||||
description: |
|
||||
Update SCHEMAS.md to reflect the new variable-length path hash format
|
||||
introduced in MeshCore firmware v1.14.
|
||||
|
||||
Changes needed:
|
||||
|
||||
1. Line ~228: Change "Array of 2-character node hash identifiers (ordered
|
||||
by hops)" to "Array of hex-encoded node hash identifiers, variable length
|
||||
(e.g. '4a' for single-byte, 'b3fa' for multibyte), ordered by hops"
|
||||
|
||||
2. Line ~239: Update the example path_hashes array to include at least one
|
||||
multibyte hash, e.g.:
|
||||
"path_hashes": ["4a", "b3fa", "02"]
|
||||
This demonstrates mixed single-byte and multibyte hashes in the same trace.
|
||||
|
||||
3. Add a brief note explaining that firmware v1.14+ supports multibyte path
|
||||
hashes and that older nodes use single-byte (2-character) hashes, so
|
||||
mixed-length arrays are expected in heterogeneous networks.
|
||||
requirements:
|
||||
- "REQ-008"
|
||||
dependencies: []
|
||||
suggested_role: "docs"
|
||||
acceptance_criteria:
|
||||
- "path_hashes field description updated from '2-character' to 'variable-length hex'"
|
||||
- "Example payload includes at least one multibyte path hash"
|
||||
- "Note about firmware version compatibility is present"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "SCHEMAS.md"
|
||||
|
||||
- id: "TASK-005"
|
||||
done: true
|
||||
title: "Write tests for multibyte path hash normalizer"
|
||||
description: |
|
||||
Add tests for the updated _normalize_hash_list method in the LetsMesh
|
||||
normalizer to verify it handles variable-length hex strings correctly.
|
||||
|
||||
Add test cases in tests/test_collector/ (either in an existing normalizer
|
||||
test file or a new test_letsmesh_normalizer.py if one doesn't exist):
|
||||
|
||||
1. Single-byte (2-char) hashes: ["4a", "b3", "fa"] -> accepted, uppercased
|
||||
2. Multibyte (4-char) hashes: ["4a2b", "b3fa"] -> accepted, uppercased
|
||||
3. Mixed-length hashes: ["4a", "b3fa", "02"] -> all accepted
|
||||
4. Odd-length strings: ["4a", "b3f", "02"] -> "b3f" filtered out
|
||||
5. Invalid hex characters: ["4a", "zz", "02"] -> "zz" filtered out
|
||||
6. Empty list: [] -> returns None
|
||||
7. Non-string items: [42, "4a"] -> 42 filtered out
|
||||
|
||||
Also add/update integration-level tests in tests/test_collector/test_subscriber.py
|
||||
to verify that multibyte path hashes flow through the full collector pipeline
|
||||
(subscriber -> handler -> database) correctly. The existing test cases at
|
||||
lines ~607 and ~662 use 2-character hashes; add a parallel test case with
|
||||
multibyte hashes.
|
||||
requirements:
|
||||
- "REQ-001"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-002"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Unit tests for _normalize_hash_list cover all 7 scenarios listed"
|
||||
- "Integration test verifies multibyte path hashes stored correctly in database"
|
||||
- "All existing 2-character path hash tests continue to pass"
|
||||
- "All new tests pass"
|
||||
estimated_complexity: "medium"
|
||||
files_affected:
|
||||
- "tests/test_collector/test_letsmesh_normalizer.py"
|
||||
- "tests/test_collector/test_subscriber.py"
|
||||
|
||||
- id: "TASK-006"
|
||||
title: "Write tests for database round-trip of multibyte path hashes"
|
||||
description: |
|
||||
Verify that the SQLAlchemy JSON column on the TracePath model correctly
|
||||
stores and retrieves variable-length path hash arrays without data loss
|
||||
or truncation.
|
||||
|
||||
Add test cases in tests/test_common/test_models.py (where existing
|
||||
TracePath tests are at line ~129):
|
||||
|
||||
1. Store and retrieve a TracePath with multibyte path_hashes:
|
||||
["4a2b", "b3fa", "02cd"] -> verify round-trip equality
|
||||
2. Store and retrieve a TracePath with mixed-length path_hashes:
|
||||
["4a", "b3fa", "02"] -> verify round-trip equality
|
||||
3. Verify existing test with 2-character hashes still passes
|
||||
|
||||
These tests confirm REQ-003 (no migration needed) and contribute to
|
||||
REQ-007 (backwards compatibility).
|
||||
requirements:
|
||||
- "REQ-003"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Test verifies multibyte path_hashes round-trip through JSON column correctly"
|
||||
- "Test verifies mixed-length path_hashes round-trip correctly"
|
||||
- "Existing 2-character path hash test continues to pass"
|
||||
- "No Alembic migration is created or required"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_common/test_models.py"
|
||||
|
||||
- id: "TASK-007"
|
||||
title: "Write tests for API trace path responses with multibyte hashes"
|
||||
description: |
|
||||
Add test cases in tests/test_api/test_trace_paths.py to verify that the
|
||||
trace paths API returns multibyte path hashes faithfully.
|
||||
|
||||
The existing test fixtures in tests/test_api/conftest.py create
|
||||
sample_trace_path objects with path_hashes like ["abc123", "def456",
|
||||
"ghi789"] (line ~275). Note these are already 6-character strings, so
|
||||
the API serialization likely already works. Add explicit test cases:
|
||||
|
||||
1. Create a trace path with multibyte path_hashes (e.g. ["4a2b", "b3fa"])
|
||||
via the fixture, then GET /trace-paths and verify the response contains
|
||||
the exact same array.
|
||||
2. Create a trace path with mixed-length path_hashes (e.g. ["4a", "b3fa",
|
||||
"02"]), then GET /trace-paths/{id} and verify the response.
|
||||
3. Verify existing API tests with current path_hashes continue to pass.
|
||||
|
||||
These tests confirm REQ-004.
|
||||
requirements:
|
||||
- "REQ-004"
|
||||
- "REQ-007"
|
||||
dependencies:
|
||||
- "TASK-003"
|
||||
suggested_role: "python"
|
||||
acceptance_criteria:
|
||||
- "Test verifies GET /trace-paths returns multibyte path hashes correctly"
|
||||
- "Test verifies GET /trace-paths/{id} returns mixed-length path hashes correctly"
|
||||
- "Existing API trace path tests continue to pass"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "tests/test_api/test_trace_paths.py"
|
||||
- "tests/test_api/conftest.py"
|
||||
|
||||
- id: "TASK-008"
|
||||
done: true
|
||||
title: "Verify web dashboard trace path display handles variable-length hashes"
|
||||
description: |
|
||||
Verify that the web dashboard does not have any hardcoded assumptions about
|
||||
2-character path hash strings. A grep of src/meshcore_hub/web/static/js/spa/
|
||||
for "path_hash" and "trace" shows no direct references to path hashes in the
|
||||
SPA JavaScript code, meaning path hashes are likely rendered generically
|
||||
through the API data display.
|
||||
|
||||
Confirm this by:
|
||||
1. Checking all web template and JavaScript files that render trace path data
|
||||
2. Verifying no CSS or JS applies fixed-width formatting to path hash elements
|
||||
3. If any fixed-width or truncation logic exists, update it to handle
|
||||
variable-length strings
|
||||
|
||||
If no web code references path hashes directly (as initial grep suggests),
|
||||
document that the web dashboard requires no changes for multibyte support.
|
||||
This satisfies REQ-005.
|
||||
requirements:
|
||||
- "REQ-005"
|
||||
dependencies: []
|
||||
suggested_role: "frontend"
|
||||
acceptance_criteria:
|
||||
- "Web dashboard trace/path display verified to handle variable-length hashes"
|
||||
- "No fixed-width formatting assumes 2-character hash strings"
|
||||
- "Any necessary changes applied, or no-change finding documented"
|
||||
estimated_complexity: "small"
|
||||
files_affected:
|
||||
- "src/meshcore_hub/web/static/js/spa/pages/trace-paths.js"
|
||||
@@ -1,3 +1,6 @@
|
||||
default_language_version:
|
||||
python: python3
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
@@ -14,7 +17,6 @@ repos:
|
||||
rev: 24.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.13
|
||||
args: ["--line-length=88"]
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.13
|
||||
3.14
|
||||
|
||||
226
AGENTS.md
226
AGENTS.md
@@ -12,8 +12,15 @@ This document provides context and guidelines for AI coding assistants working o
|
||||
- `source .venv/bin/activate`
|
||||
* You MUST install all project dependencies using `pip install -e ".[dev]"` command`
|
||||
* You MUST install `pre-commit` for quality checks
|
||||
* You MUST keep project documentation in sync with behavior/config/schema changes made in code (at minimum update relevant sections in `README.md`, `SCHEMAS.md`, `PLAN.md`, and/or `TASKS.md` when applicable)
|
||||
* Before commiting:
|
||||
- Run tests with `pytest` to ensure recent changes haven't broken anything
|
||||
- Run **targeted tests** for the components you changed, not the full suite:
|
||||
- `pytest tests/test_web/` for web-only changes (templates, static JS, web routes)
|
||||
- `pytest tests/test_api/` for API changes
|
||||
- `pytest tests/test_collector/` for collector changes
|
||||
- `pytest tests/test_interface/` for interface/sender/receiver changes
|
||||
- `pytest tests/test_common/` for common models/schemas/config changes
|
||||
- Only run the full `pytest` if changes span multiple components
|
||||
- Run `pre-commit run --all-files` to perform all quality checks
|
||||
|
||||
## Project Overview
|
||||
@@ -46,7 +53,8 @@ MeshCore Hub is a Python 3.13+ monorepo for managing and orchestrating MeshCore
|
||||
| REST API | FastAPI |
|
||||
| MQTT Client | paho-mqtt |
|
||||
| MeshCore Interface | meshcore |
|
||||
| Templates | Jinja2 |
|
||||
| Templates | Jinja2 (server), lit-html (SPA) |
|
||||
| Frontend | ES Modules SPA with client-side routing |
|
||||
| CSS Framework | Tailwind CSS + DaisyUI |
|
||||
| Testing | pytest, pytest-asyncio |
|
||||
| Formatting | black |
|
||||
@@ -274,17 +282,26 @@ meshcore-hub/
|
||||
│ │ ├── app.py # FastAPI app
|
||||
│ │ ├── auth.py # Authentication
|
||||
│ │ ├── dependencies.py
|
||||
│ │ ├── metrics.py # Prometheus metrics endpoint
|
||||
│ │ └── routes/ # API routes
|
||||
│ │ ├── members.py # Member CRUD endpoints
|
||||
│ │ └── ...
|
||||
│ └── web/
|
||||
│ ├── cli.py
|
||||
│ ├── app.py # FastAPI app
|
||||
│ ├── routes/ # Page routes
|
||||
│ │ ├── members.py # Members page
|
||||
│ │ └── ...
|
||||
│ ├── templates/ # Jinja2 templates
|
||||
│ └── static/ # CSS, JS
|
||||
│ ├── pages.py # Custom markdown page loader
|
||||
│ ├── templates/ # Jinja2 templates (spa.html shell)
|
||||
│ └── static/
|
||||
│ ├── css/app.css # Custom styles
|
||||
│ └── js/spa/ # SPA frontend (ES modules)
|
||||
│ ├── app.js # Entry point, route registration
|
||||
│ ├── router.js # Client-side History API router
|
||||
│ ├── api.js # API fetch helper
|
||||
│ ├── components.js # Shared UI components (lit-html)
|
||||
│ ├── icons.js # SVG icon functions (lit-html)
|
||||
│ └── pages/ # Page modules (lazy-loaded)
|
||||
│ ├── home.js, dashboard.js, nodes.js, ...
|
||||
│ └── admin/ # Admin page modules
|
||||
├── tests/
|
||||
│ ├── conftest.py
|
||||
│ ├── test_common/
|
||||
@@ -296,11 +313,19 @@ meshcore-hub/
|
||||
│ ├── env.py
|
||||
│ └── versions/
|
||||
├── etc/
|
||||
│ └── mosquitto.conf # MQTT broker configuration
|
||||
│ ├── mosquitto.conf # MQTT broker configuration
|
||||
│ ├── prometheus/ # Prometheus configuration
|
||||
│ │ ├── prometheus.yml # Scrape and alerting config
|
||||
│ │ └── alerts.yml # Alert rules
|
||||
│ └── alertmanager/ # Alertmanager configuration
|
||||
│ └── alertmanager.yml # Routing and receiver config
|
||||
├── example/
|
||||
│ └── seed/ # Example seed data files
|
||||
│ ├── node_tags.yaml # Example node tags
|
||||
│ └── members.yaml # Example network members
|
||||
│ ├── seed/ # Example seed data files
|
||||
│ │ ├── node_tags.yaml # Example node tags
|
||||
│ │ └── members.yaml # Example network members
|
||||
│ └── content/ # Example custom content
|
||||
│ ├── pages/ # Example custom pages
|
||||
│ └── media/ # Example media files
|
||||
├── seed/ # Seed data directory (SEED_HOME)
|
||||
│ ├── node_tags.yaml # Node tags for import
|
||||
│ └── members.yaml # Network members for import
|
||||
@@ -341,6 +366,25 @@ Examples:
|
||||
- JSON columns for flexible data (path_hashes, parsed_data, etc.)
|
||||
- Foreign keys reference nodes by UUID, not public_key
|
||||
|
||||
## Standard Node Tags
|
||||
|
||||
Node tags are flexible key-value pairs that allow custom metadata to be attached to nodes. While tags are completely optional and freeform, the following standard tag keys are recommended for consistent use across the web dashboard:
|
||||
|
||||
| Tag Key | Description | Usage |
|
||||
|---------|-------------|-------|
|
||||
| `name` | Node display name | Used as the primary display name throughout the UI (overrides the advertised name) |
|
||||
| `description` | Short description | Displayed as supplementary text under the node name |
|
||||
| `member_id` | Member identifier reference | Links the node to a network member (matches `member_id` in Members table) |
|
||||
| `lat` | GPS latitude override | Overrides node-reported latitude for map display |
|
||||
| `lon` | GPS longitude override | Overrides node-reported longitude for map display |
|
||||
| `elevation` | GPS elevation override | Overrides node-reported elevation |
|
||||
| `role` | Node role/purpose | Used for website presentation and filtering (e.g., "gateway", "repeater", "sensor") |
|
||||
|
||||
**Important Notes:**
|
||||
- All tags are optional - nodes can function without any tags
|
||||
- Tag keys are case-sensitive
|
||||
- The `member_id` tag should reference a valid `member_id` from the Members table
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Unit Tests
|
||||
@@ -417,13 +461,121 @@ async def client(db_session):
|
||||
5. Add Alembic migration if schema changed
|
||||
6. Add tests in `tests/test_collector/`
|
||||
|
||||
### Adding a New SPA Page
|
||||
|
||||
The web dashboard is a Single Page Application. Pages are ES modules loaded by the client-side router.
|
||||
|
||||
1. Create a page module in `web/static/js/spa/pages/` (e.g., `my-page.js`)
|
||||
2. Export an `async function render(container, params, router)` that renders into `container` using `litRender(html\`...\`, container)`
|
||||
3. Register the route in `web/static/js/spa/app.js` with `router.addRoute('/my-page', pageHandler(pages.myPage))`
|
||||
4. Add the page title to `updatePageTitle()` in `app.js`
|
||||
5. Add a nav link in `web/templates/spa.html` (both mobile and desktop menus)
|
||||
|
||||
**Key patterns:**
|
||||
- Import `html`, `litRender`, `nothing` from `../components.js` (re-exports lit-html)
|
||||
- Use `apiGet()` from `../api.js` for API calls
|
||||
- For list pages with filters, use the `renderPage()` pattern: render the page header immediately, then re-render with the filter form + results after fetch (keeps the form out of the shell to avoid layout shift from data-dependent filter selects)
|
||||
- Old page content stays visible until data is ready (navbar spinner indicates loading)
|
||||
- Use `pageColors` from `components.js` for section-specific colors (reads CSS custom properties from `app.css`)
|
||||
- Return a cleanup function if the page creates resources (e.g., Leaflet maps, Chart.js instances)
|
||||
|
||||
### Internationalization (i18n)
|
||||
|
||||
The web dashboard supports internationalization via JSON translation files. The default language is English.
|
||||
|
||||
**Translation files location:** `src/meshcore_hub/web/static/locales/`
|
||||
|
||||
**Key files:**
|
||||
- `en.json` - English translations (reference implementation)
|
||||
- `languages.md` - Comprehensive translation reference guide for translators
|
||||
|
||||
**Using translations in JavaScript:**
|
||||
|
||||
Import the `t()` function from `components.js`:
|
||||
|
||||
```javascript
|
||||
import { t } from '../components.js';
|
||||
|
||||
// Simple translation
|
||||
const label = t('common.save'); // "Save"
|
||||
|
||||
// Translation with variable interpolation
|
||||
const title = t('common.add_entity', { entity: t('entities.node') }); // "Add Node"
|
||||
|
||||
// Composed patterns for consistency
|
||||
const emptyMsg = t('common.no_entity_found', { entity: t('entities.nodes').toLowerCase() }); // "No nodes found"
|
||||
```
|
||||
|
||||
**Translation architecture:**
|
||||
|
||||
1. **Entity-based composition:** Core entity names (`entities.*`) are referenced by composite patterns for consistency
|
||||
2. **Reusable patterns:** Common UI patterns (`common.*`) use `{{variable}}` interpolation for dynamic content
|
||||
3. **Separation of concerns:**
|
||||
- Keys without `_label` suffix = table headers (title case, no colon)
|
||||
- Keys with `_label` suffix = inline labels (sentence case, with colon)
|
||||
|
||||
**When adding/modifying translations:**
|
||||
|
||||
1. **Add new keys** to `en.json` following existing patterns:
|
||||
- Use composition when possible (reference `entities.*` in `common.*` patterns)
|
||||
- Group related keys by section (e.g., `admin_members.*`, `admin_node_tags.*`)
|
||||
- Use `{{variable}}` syntax for dynamic content
|
||||
|
||||
2. **Update `languages.md`** with:
|
||||
- Key name, English value, and usage context
|
||||
- Variable descriptions if using interpolation
|
||||
- Notes about HTML content or special formatting
|
||||
|
||||
3. **Add tests** in `tests/test_common/test_i18n.py`:
|
||||
- Test new interpolation patterns
|
||||
- Test required sections if adding new top-level sections
|
||||
- Test composed patterns with entity references
|
||||
|
||||
4. **Run i18n tests:**
|
||||
```bash
|
||||
pytest tests/test_common/test_i18n.py -v
|
||||
```
|
||||
|
||||
**Best practices:**
|
||||
|
||||
- **Avoid duplication:** Use `common.*` patterns instead of duplicating similar strings
|
||||
- **Compose with entities:** Reference `entities.*` keys in patterns rather than hardcoding entity names
|
||||
- **Preserve variables:** Keep `{{variable}}` placeholders unchanged when translating
|
||||
- **Test composition:** Verify patterns work with all entity types (singular/plural, lowercase/uppercase)
|
||||
- **Document context:** Always update `languages.md` so translators understand usage
|
||||
|
||||
**Example - adding a new entity and patterns:**
|
||||
|
||||
```javascript
|
||||
// 1. Add entity to en.json
|
||||
"entities": {
|
||||
"sensor": "Sensor"
|
||||
}
|
||||
|
||||
// 2. Use with existing common patterns
|
||||
t('common.add_entity', { entity: t('entities.sensor') }) // "Add Sensor"
|
||||
t('common.no_entity_found', { entity: t('entities.sensors').toLowerCase() }) // "No sensors found"
|
||||
|
||||
// 3. Update languages.md with context
|
||||
// 4. Add test to test_i18n.py
|
||||
```
|
||||
|
||||
**Translation loading:**
|
||||
|
||||
The i18n system (`src/meshcore_hub/common/i18n.py`) loads translations on startup:
|
||||
- Defaults to English (`en`)
|
||||
- Falls back to English for missing keys
|
||||
- Returns the key itself if translation not found
|
||||
|
||||
For full translation guidelines, see `src/meshcore_hub/web/static/locales/languages.md`.
|
||||
|
||||
### Adding a New Database Model
|
||||
|
||||
1. Create model in `common/models/`
|
||||
2. Export in `common/models/__init__.py`
|
||||
3. Create Alembic migration: `alembic revision --autogenerate -m "description"`
|
||||
3. Create Alembic migration: `meshcore-hub db revision --autogenerate -m "description"`
|
||||
4. Review and adjust migration file
|
||||
5. Test migration: `alembic upgrade head`
|
||||
5. Test migration: `meshcore-hub db upgrade`
|
||||
|
||||
### Running the Development Environment
|
||||
|
||||
@@ -445,7 +597,7 @@ pytest
|
||||
# Run specific component
|
||||
meshcore-hub api --reload
|
||||
meshcore-hub collector
|
||||
meshcore-hub interface --mode receiver --mock
|
||||
meshcore-hub interface receiver --mock
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
@@ -455,9 +607,18 @@ See [PLAN.md](PLAN.md#configuration-environment-variables) for complete list.
|
||||
Key variables:
|
||||
- `DATA_HOME` - Base directory for runtime data (default: `./data`)
|
||||
- `SEED_HOME` - Directory containing seed data files (default: `./seed`)
|
||||
- `CONTENT_HOME` - Directory containing custom content (pages, media) (default: `./content`)
|
||||
- `MQTT_HOST`, `MQTT_PORT`, `MQTT_PREFIX` - MQTT broker connection
|
||||
- `MQTT_TLS` - Enable TLS/SSL for MQTT (default: `false`)
|
||||
- `API_READ_KEY`, `API_ADMIN_KEY` - API authentication keys
|
||||
- `WEB_ADMIN_ENABLED` - Enable admin interface at /a/ (default: `false`, requires auth proxy)
|
||||
- `WEB_TRUSTED_PROXY_HOSTS` - Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. A startup warning is emitted when using the default `*` with admin enabled.
|
||||
- `WEB_THEME` - Default theme for the web dashboard (default: `dark`, options: `dark`, `light`). Users can override via the theme toggle in the navbar, which persists their preference in browser localStorage.
|
||||
- `WEB_AUTO_REFRESH_SECONDS` - Auto-refresh interval in seconds for list pages (default: `30`, `0` to disable)
|
||||
- `TZ` - Timezone for web dashboard date/time display (default: `UTC`, e.g., `America/New_York`, `Europe/London`)
|
||||
- `FEATURE_DASHBOARD`, `FEATURE_NODES`, `FEATURE_ADVERTISEMENTS`, `FEATURE_MESSAGES`, `FEATURE_MAP`, `FEATURE_MEMBERS`, `FEATURE_PAGES` - Feature flags to enable/disable specific web dashboard pages (default: all `true`). Dependencies: Dashboard auto-disables when all of Nodes/Advertisements/Messages are disabled. Map auto-disables when Nodes is disabled.
|
||||
- `METRICS_ENABLED` - Enable Prometheus metrics endpoint at /metrics (default: `true`)
|
||||
- `METRICS_CACHE_TTL` - Seconds to cache metrics output (default: `60`)
|
||||
- `LOG_LEVEL` - Logging verbosity
|
||||
|
||||
The database defaults to `sqlite:///{DATA_HOME}/collector/meshcore.db` and does not typically need to be configured.
|
||||
@@ -471,6 +632,32 @@ ${SEED_HOME}/
|
||||
└── members.yaml # Network members list
|
||||
```
|
||||
|
||||
**Custom Content (`CONTENT_HOME`)** - Contains custom pages and media for the web dashboard:
|
||||
```
|
||||
${CONTENT_HOME}/
|
||||
├── pages/ # Custom markdown pages
|
||||
│ ├── about.md # Example: About page (/pages/about)
|
||||
│ ├── faq.md # Example: FAQ page (/pages/faq)
|
||||
│ └── getting-started.md # Example: Getting Started (/pages/getting-started)
|
||||
└── media/ # Custom media files
|
||||
└── images/
|
||||
├── logo.svg # Full-color custom logo (default)
|
||||
└── logo-invert.svg # Monochrome custom logo (darkened in light mode)
|
||||
```
|
||||
|
||||
Pages use YAML frontmatter for metadata:
|
||||
```markdown
|
||||
---
|
||||
title: About Us # Browser tab title and nav link (not rendered on page)
|
||||
slug: about # URL path (default: filename without .md)
|
||||
menu_order: 10 # Nav sort order (default: 100, lower = earlier)
|
||||
---
|
||||
|
||||
# About Our Network
|
||||
|
||||
Markdown content here (include your own heading)...
|
||||
```
|
||||
|
||||
**Runtime Data (`DATA_HOME`)** - Contains runtime data (gitignored):
|
||||
```
|
||||
${DATA_HOME}/
|
||||
@@ -486,7 +673,7 @@ The database can be seeded with node tags and network members from YAML files in
|
||||
- `node_tags.yaml` - Node tag definitions (keyed by public_key)
|
||||
- `members.yaml` - Network member definitions
|
||||
|
||||
Seeding is a separate process from the collector and must be run explicitly:
|
||||
**Important:** Seeding is NOT automatic and must be run explicitly. This prevents seed files from overwriting user changes made via the admin UI.
|
||||
|
||||
```bash
|
||||
# Native CLI
|
||||
@@ -496,6 +683,8 @@ meshcore-hub collector seed
|
||||
docker compose --profile seed up
|
||||
```
|
||||
|
||||
**Note:** Once the admin UI is enabled (`WEB_ADMIN_ENABLED=true`), tags should be managed through the web interface rather than seed files.
|
||||
|
||||
### Webhook Configuration
|
||||
|
||||
The collector supports forwarding events to external HTTP endpoints:
|
||||
@@ -651,9 +840,10 @@ await mc.start_auto_message_fetching()
|
||||
|
||||
On startup, the receiver performs these initialization steps:
|
||||
1. Set device clock to current Unix timestamp
|
||||
2. Send a local (non-flood) advertisement
|
||||
3. Start automatic message fetching
|
||||
4. Sync the device's contact database
|
||||
2. Optionally set the device name (if `MESHCORE_DEVICE_NAME` is configured)
|
||||
3. Send a flood advertisement (broadcasts device name to the mesh)
|
||||
4. Start automatic message fetching
|
||||
5. Sync the device's contact database
|
||||
|
||||
### Contact Sync Behavior
|
||||
|
||||
|
||||
21
Dockerfile
21
Dockerfile
@@ -4,7 +4,7 @@
|
||||
# =============================================================================
|
||||
# Stage 1: Builder - Install dependencies and build package
|
||||
# =============================================================================
|
||||
FROM python:3.13-slim AS builder
|
||||
FROM python:3.14-slim AS builder
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
@@ -39,7 +39,7 @@ RUN sed -i "s|__version__ = \"dev\"|__version__ = \"${BUILD_VERSION}\"|" src/mes
|
||||
# =============================================================================
|
||||
# Stage 2: Runtime - Final production image
|
||||
# =============================================================================
|
||||
FROM python:3.13-slim AS runtime
|
||||
FROM python:3.14-slim AS runtime
|
||||
|
||||
# Labels
|
||||
LABEL org.opencontainers.image.title="MeshCore Hub" \
|
||||
@@ -65,9 +65,26 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
# For serial port access
|
||||
udev \
|
||||
# LetsMesh decoder runtime
|
||||
nodejs \
|
||||
npm \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mkdir -p /data
|
||||
|
||||
# Install meshcore-decoder CLI.
|
||||
RUN mkdir -p /opt/letsmesh-decoder \
|
||||
&& cd /opt/letsmesh-decoder \
|
||||
&& npm init -y >/dev/null 2>&1 \
|
||||
&& npm install --omit=dev @michaelhart/meshcore-decoder@0.2.7 patch-package
|
||||
|
||||
# Apply maintained meshcore-decoder compatibility patch.
|
||||
COPY patches/@michaelhart+meshcore-decoder+0.2.7.patch /opt/letsmesh-decoder/patches/@michaelhart+meshcore-decoder+0.2.7.patch
|
||||
RUN cd /opt/letsmesh-decoder \
|
||||
&& npx patch-package --error-on-fail \
|
||||
&& npm uninstall patch-package \
|
||||
&& npm prune --omit=dev
|
||||
RUN ln -s /opt/letsmesh-decoder/node_modules/.bin/meshcore-decoder /usr/local/bin/meshcore-decoder
|
||||
|
||||
# Copy virtual environment from builder
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
14
PLAN.md
14
PLAN.md
@@ -489,6 +489,16 @@ ${DATA_HOME}/
|
||||
|----------|---------|-------------|
|
||||
| DATABASE_URL | sqlite:///{DATA_HOME}/collector/meshcore.db | SQLAlchemy URL |
|
||||
| TAGS_FILE | {DATA_HOME}/collector/tags.json | Path to tags JSON file |
|
||||
| COLLECTOR_INGEST_MODE | native | Ingest mode (`native` or `letsmesh_upload`) |
|
||||
| COLLECTOR_LETSMESH_DECODER_ENABLED | true | Enable external packet decoding in LetsMesh mode |
|
||||
|
||||
LetsMesh compatibility parity note:
|
||||
- `status` feed packets are stored as informational `letsmesh_status` events and do not create advertisement rows.
|
||||
- Advertisement rows in LetsMesh mode are created from decoded payload type `4` only.
|
||||
- Decoded payload type `11` is normalized to native `contact` updates.
|
||||
- Decoded payload type `9` is normalized to native `trace_data`.
|
||||
- Decoded payload type `8` is normalized to informational `path_updated`.
|
||||
- Decoded payload type `1` can map to native response-style events when decrypted structured content is available.
|
||||
|
||||
### API
|
||||
| Variable | Default | Description |
|
||||
@@ -506,6 +516,10 @@ ${DATA_HOME}/
|
||||
| WEB_PORT | 8080 | Web bind port |
|
||||
| API_BASE_URL | http://localhost:8000 | API endpoint |
|
||||
| API_KEY | | API key for queries |
|
||||
| WEB_TRUSTED_PROXY_HOSTS | * | Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. |
|
||||
| WEB_LOCALE | en | UI translation locale |
|
||||
| WEB_DATETIME_LOCALE | en-US | Date formatting locale for UI timestamps |
|
||||
| TZ | UTC | Timezone used for UI timestamp rendering |
|
||||
| NETWORK_DOMAIN | | Network domain |
|
||||
| NETWORK_NAME | MeshCore Network | Network name |
|
||||
| NETWORK_CITY | | City location |
|
||||
|
||||
432
README.md
432
README.md
@@ -1,9 +1,19 @@
|
||||
# MeshCore Hub
|
||||
|
||||
Python 3.11+ platform for managing and orchestrating MeshCore mesh networks.
|
||||
[](https://github.com/ipnet-mesh/meshcore-hub/actions/workflows/ci.yml)
|
||||
[](https://github.com/ipnet-mesh/meshcore-hub/actions/workflows/docker.yml)
|
||||
[](https://codecov.io/github/ipnet-mesh/meshcore-hub)
|
||||
[](https://www.buymeacoffee.com/jinglemansweep)
|
||||
|
||||
Python 3.13+ platform for managing and orchestrating MeshCore mesh networks.
|
||||
|
||||

|
||||
|
||||
> [!IMPORTANT]
|
||||
> **Help Translate MeshCore Hub** 🌍
|
||||
>
|
||||
> We need volunteers to translate the web dashboard! Currently only English is available. Check out the [Translation Guide](src/meshcore_hub/web/static/locales/languages.md) to contribute a language pack. Partial translations welcome!
|
||||
|
||||
## Overview
|
||||
|
||||
MeshCore Hub provides a complete solution for monitoring, collecting, and interacting with MeshCore mesh networks. It consists of multiple components that work together:
|
||||
@@ -13,7 +23,7 @@ MeshCore Hub provides a complete solution for monitoring, collecting, and intera
|
||||
| **Interface** | Connects to MeshCore companion nodes via Serial/USB, bridges events to/from MQTT |
|
||||
| **Collector** | Subscribes to MQTT events and persists them to a database |
|
||||
| **API** | REST API for querying data and sending commands to the network |
|
||||
| **Web Dashboard** | User-friendly web interface for visualizing network status |
|
||||
| **Web Dashboard** | Single Page Application (SPA) for visualizing network status |
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -66,6 +76,7 @@ flowchart LR
|
||||
- **Command Dispatch**: Send messages and advertisements via the API
|
||||
- **Node Tagging**: Add custom metadata to nodes for organization
|
||||
- **Web Dashboard**: Visualize network status, node locations, and message history
|
||||
- **Internationalization**: Full i18n support with composable translation patterns
|
||||
- **Docker Ready**: Single image with all components, easy deployment
|
||||
|
||||
## Getting Started
|
||||
@@ -80,9 +91,13 @@ The quickest way to get started is running the entire stack on a single machine
|
||||
|
||||
**Steps:**
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/ipnet-mesh/meshcore-hub.git
|
||||
# Create a directory, download the Docker Compose file and
|
||||
# example environment configuration file
|
||||
|
||||
mkdir meshcore-hub
|
||||
cd meshcore-hub
|
||||
wget https://raw.githubusercontent.com/ipnet-mesh/meshcore-hub/refs/heads/main/docker-compose.yml
|
||||
wget https://raw.githubusercontent.com/ipnet-mesh/meshcore-hub/refs/heads/main/.env.example
|
||||
|
||||
# Copy and configure environment
|
||||
cp .env.example .env
|
||||
@@ -156,33 +171,26 @@ This architecture allows:
|
||||
- Community members to contribute coverage with minimal setup
|
||||
- The central server to be hosted anywhere with internet access
|
||||
|
||||
## Quick Start
|
||||
## Deployment
|
||||
|
||||
### Using Docker Compose (Recommended)
|
||||
### Docker Compose Profiles
|
||||
|
||||
Docker Compose uses **profiles** to select which services to run:
|
||||
|
||||
| Profile | Services | Use Case |
|
||||
|---------|----------|----------|
|
||||
| `core` | collector, api, web | Central server infrastructure |
|
||||
| `core` | db-migrate, collector, api, web | Central server infrastructure |
|
||||
| `receiver` | interface-receiver | Receiver node (events to MQTT) |
|
||||
| `sender` | interface-sender | Sender node (MQTT to device) |
|
||||
| `mqtt` | mosquitto broker | Local MQTT broker (optional) |
|
||||
| `mock` | interface-mock-receiver | Testing without hardware |
|
||||
| `migrate` | db-migrate | One-time database migration |
|
||||
| `seed` | seed | One-time seed data import |
|
||||
| `metrics` | prometheus, alertmanager | Prometheus metrics and alerting |
|
||||
|
||||
**Note:** Most deployments connect to an external MQTT broker. Add `--profile mqtt` only if you need a local broker.
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/ipnet-mesh/meshcore-hub.git
|
||||
cd meshcore-hub
|
||||
|
||||
# Copy and configure environment
|
||||
cp .env.example .env
|
||||
# Edit .env with your settings (API keys, serial port, network info)
|
||||
|
||||
# Create database schema
|
||||
docker compose --profile migrate run --rm db-migrate
|
||||
|
||||
@@ -205,7 +213,7 @@ docker compose logs -f
|
||||
docker compose down
|
||||
```
|
||||
|
||||
#### Serial Device Access
|
||||
### Serial Device Access
|
||||
|
||||
For production with real MeshCore devices, ensure the serial port is accessible:
|
||||
|
||||
@@ -221,13 +229,25 @@ SERIAL_PORT=/dev/ttyUSB0
|
||||
SERIAL_PORT_SENDER=/dev/ttyUSB1 # If using separate sender device
|
||||
```
|
||||
|
||||
**Tip:** If USB devices reconnect as different numeric IDs (e.g., `/dev/ttyUSB0` becomes `/dev/ttyUSB1`), use the stable `/dev/serial/by-id/` path instead:
|
||||
|
||||
```bash
|
||||
# List available devices by ID
|
||||
ls -la /dev/serial/by-id/
|
||||
|
||||
# Example output:
|
||||
# usb-Silicon_Labs_CP2102N_USB_to_UART_Bridge_abc123-if00-port0 -> ../../ttyUSB0
|
||||
|
||||
# Configure using the stable ID
|
||||
SERIAL_PORT=/dev/serial/by-id/usb-Silicon_Labs_CP2102N_USB_to_UART_Bridge_abc123-if00-port0
|
||||
```
|
||||
|
||||
### Manual Installation
|
||||
|
||||
```bash
|
||||
# Create virtual environment
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate # Linux/macOS
|
||||
# .venv\Scripts\activate # Windows
|
||||
source .venv/bin/activate
|
||||
|
||||
# Install the package
|
||||
pip install -e ".[dev]"
|
||||
@@ -236,63 +256,12 @@ pip install -e ".[dev]"
|
||||
meshcore-hub db upgrade
|
||||
|
||||
# Start components (in separate terminals)
|
||||
meshcore-hub interface --mode receiver --port /dev/ttyUSB0
|
||||
meshcore-hub interface receiver --port /dev/ttyUSB0
|
||||
meshcore-hub collector
|
||||
meshcore-hub api
|
||||
meshcore-hub web
|
||||
```
|
||||
|
||||
## Updating an Existing Installation
|
||||
|
||||
To update MeshCore Hub to the latest version:
|
||||
|
||||
```bash
|
||||
# Navigate to your installation directory
|
||||
cd meshcore-hub
|
||||
|
||||
# Pull the latest code
|
||||
git pull
|
||||
|
||||
# Pull latest Docker images
|
||||
docker compose --profile all pull
|
||||
|
||||
# Recreate and restart services
|
||||
# For receiver/sender only installs:
|
||||
docker compose --profile receiver up -d --force-recreate
|
||||
|
||||
# For core services with MQTT:
|
||||
docker compose --profile mqtt --profile core up -d --force-recreate
|
||||
|
||||
# For core services without local MQTT:
|
||||
docker compose --profile core up -d --force-recreate
|
||||
|
||||
# For complete stack (all services):
|
||||
docker compose --profile mqtt --profile core --profile receiver up -d --force-recreate
|
||||
|
||||
# View logs to verify update
|
||||
docker compose logs -f
|
||||
```
|
||||
|
||||
**Note:** Database migrations run automatically on collector startup, so no manual migration step is needed when using Docker.
|
||||
|
||||
For manual installations:
|
||||
|
||||
```bash
|
||||
# Pull latest code
|
||||
git pull
|
||||
|
||||
# Activate virtual environment
|
||||
source .venv/bin/activate
|
||||
|
||||
# Update dependencies
|
||||
pip install -e ".[dev]"
|
||||
|
||||
# Run database migrations
|
||||
meshcore-hub db upgrade
|
||||
|
||||
# Restart your services
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
All components are configured via environment variables. Create a `.env` file or export variables:
|
||||
@@ -310,6 +279,8 @@ All components are configured via environment variables. Create a `.env` file or
|
||||
| `MQTT_PASSWORD` | *(none)* | MQTT password (optional) |
|
||||
| `MQTT_PREFIX` | `meshcore` | Topic prefix for all MQTT messages |
|
||||
| `MQTT_TLS` | `false` | Enable TLS/SSL for MQTT connection |
|
||||
| `MQTT_TRANSPORT` | `tcp` | MQTT transport (`tcp` or `websockets`) |
|
||||
| `MQTT_WS_PATH` | `/mqtt` | MQTT WebSocket path (used when `MQTT_TRANSPORT=websockets`) |
|
||||
|
||||
### Interface Settings
|
||||
|
||||
@@ -318,12 +289,50 @@ All components are configured via environment variables. Create a `.env` file or
|
||||
| `SERIAL_PORT` | `/dev/ttyUSB0` | Serial port for MeshCore device |
|
||||
| `SERIAL_BAUD` | `115200` | Serial baud rate |
|
||||
| `MESHCORE_DEVICE_NAME` | *(none)* | Device/node name set on startup (broadcast in advertisements) |
|
||||
| `NODE_ADDRESS` | *(none)* | Override for device public key (64-char hex string) |
|
||||
| `NODE_ADDRESS_SENDER` | *(none)* | Override for sender device public key |
|
||||
| `CONTACT_CLEANUP_ENABLED` | `true` | Enable automatic removal of stale contacts from companion node |
|
||||
| `CONTACT_CLEANUP_DAYS` | `7` | Remove contacts not advertised for this many days |
|
||||
|
||||
### Collector Settings
|
||||
|
||||
The database is stored in `{DATA_HOME}/collector/meshcore.db` by default.
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `COLLECTOR_INGEST_MODE` | `native` | Ingest mode (`native` or `letsmesh_upload`) |
|
||||
| `COLLECTOR_LETSMESH_DECODER_ENABLED` | `true` | Enable external LetsMesh packet decoding |
|
||||
| `COLLECTOR_LETSMESH_DECODER_COMMAND` | `meshcore-decoder` | Decoder CLI command |
|
||||
| `COLLECTOR_LETSMESH_DECODER_KEYS` | *(none)* | Additional decoder channel keys (`label=hex`, `label:hex`, or `hex`) |
|
||||
| `COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS` | `2.0` | Timeout per decoder invocation |
|
||||
|
||||
#### Webhook Configuration
|
||||
#### LetsMesh Upload Compatibility Mode
|
||||
|
||||
When `COLLECTOR_INGEST_MODE=letsmesh_upload`, the collector subscribes to:
|
||||
|
||||
- `<prefix>/+/packets`
|
||||
- `<prefix>/+/status`
|
||||
- `<prefix>/+/internal`
|
||||
|
||||
Normalization behavior:
|
||||
|
||||
- `status` packets are stored as informational `letsmesh_status` events and are not mapped to `advertisement` rows.
|
||||
- Decoder payload type `4` is mapped to `advertisement` when node identity metadata is present.
|
||||
- Decoder payload type `11` (control discover response) is mapped to `contact`.
|
||||
- Decoder payload type `9` is mapped to `trace_data`.
|
||||
- Decoder payload type `8` is mapped to informational `path_updated` events.
|
||||
- Decoder payload type `1` can map to native response events (`telemetry_response`, `battery`, `path_updated`, `status_response`) when decrypted structured content is available.
|
||||
- `packet_type=5` packets are mapped to `channel_msg_recv`.
|
||||
- `packet_type=1`, `2`, and `7` packets are mapped to `contact_msg_recv` when decryptable text is available.
|
||||
- For channel packets, if a channel key is available, a channel label is attached (for example `Public` or `#test`) for UI display.
|
||||
- In the messages feed and dashboard channel sections, known channel indexes are preferred for labels (`17 -> Public`, `217 -> #test`) to avoid stale channel-name mismatches.
|
||||
- Additional channel names are loaded from `COLLECTOR_LETSMESH_DECODER_KEYS` when entries are provided as `label=hex` (for example `bot=<key>`).
|
||||
- Decoder-advertisement packets with location metadata update node GPS (`lat/lon`) for map display.
|
||||
- This keeps advertisement listings closer to native mode behavior (node advert traffic only, not observer status telemetry).
|
||||
- Packets without decryptable message text are kept as informational `letsmesh_packet` events and are not shown in the messages feed; when decode succeeds the decoded JSON is attached to those packet log events.
|
||||
- When decoder output includes a human sender (`payload.decoded.decrypted.sender`), message text is normalized to `Name: Message` before storage; receiver/observer names are never used as sender fallback.
|
||||
- The collector keeps built-in keys for `Public` and `#test`, and merges any additional keys from `COLLECTOR_LETSMESH_DECODER_KEYS`.
|
||||
- Docker runtime installs `@michaelhart/meshcore-decoder@0.2.7` and applies `patches/@michaelhart+meshcore-decoder+0.2.7.patch` via `patch-package` for Node compatibility.
|
||||
|
||||
### Webhooks
|
||||
|
||||
The collector can forward certain events to external HTTP endpoints:
|
||||
|
||||
@@ -334,7 +343,9 @@ The collector can forward certain events to external HTTP endpoints:
|
||||
| `WEBHOOK_MESSAGE_URL` | *(none)* | Webhook URL for all message events |
|
||||
| `WEBHOOK_MESSAGE_SECRET` | *(none)* | Secret for message webhook |
|
||||
| `WEBHOOK_CHANNEL_MESSAGE_URL` | *(none)* | Override URL for channel messages only |
|
||||
| `WEBHOOK_CHANNEL_MESSAGE_SECRET` | *(none)* | Secret for channel message webhook |
|
||||
| `WEBHOOK_DIRECT_MESSAGE_URL` | *(none)* | Override URL for direct messages only |
|
||||
| `WEBHOOK_DIRECT_MESSAGE_SECRET` | *(none)* | Secret for direct message webhook |
|
||||
| `WEBHOOK_TIMEOUT` | `10.0` | Request timeout in seconds |
|
||||
| `WEBHOOK_MAX_RETRIES` | `3` | Max retry attempts on failure |
|
||||
| `WEBHOOK_RETRY_BACKOFF` | `2.0` | Exponential backoff multiplier |
|
||||
@@ -348,7 +359,7 @@ Webhook payload format:
|
||||
}
|
||||
```
|
||||
|
||||
#### Data Retention
|
||||
### Data Retention
|
||||
|
||||
The collector automatically cleans up old event data and inactive nodes:
|
||||
|
||||
@@ -368,6 +379,8 @@ The collector automatically cleans up old event data and inactive nodes:
|
||||
| `API_PORT` | `8000` | API port |
|
||||
| `API_READ_KEY` | *(none)* | Read-only API key |
|
||||
| `API_ADMIN_KEY` | *(none)* | Admin API key (required for commands) |
|
||||
| `METRICS_ENABLED` | `true` | Enable Prometheus metrics endpoint at `/metrics` |
|
||||
| `METRICS_CACHE_TTL` | `60` | Seconds to cache metrics output (reduces database load) |
|
||||
|
||||
### Web Dashboard Settings
|
||||
|
||||
@@ -376,6 +389,15 @@ The collector automatically cleans up old event data and inactive nodes:
|
||||
| `WEB_HOST` | `0.0.0.0` | Web server bind address |
|
||||
| `WEB_PORT` | `8080` | Web server port |
|
||||
| `API_BASE_URL` | `http://localhost:8000` | API endpoint URL |
|
||||
| `API_KEY` | *(none)* | API key for web dashboard queries (optional) |
|
||||
| `WEB_THEME` | `dark` | Default theme (`dark` or `light`). Users can override via theme toggle in navbar. |
|
||||
| `WEB_LOCALE` | `en` | Locale/language for the web dashboard (e.g., `en`, `es`, `fr`) |
|
||||
| `WEB_DATETIME_LOCALE` | `en-US` | Locale used for date formatting in the web dashboard (e.g., `en-US` for MM/DD/YYYY, `en-GB` for DD/MM/YYYY). |
|
||||
| `WEB_AUTO_REFRESH_SECONDS` | `30` | Auto-refresh interval in seconds for list pages (0 to disable) |
|
||||
| `WEB_ADMIN_ENABLED` | `false` | Enable admin interface at /a/ (requires auth proxy: `X-Forwarded-User`/`X-Auth-Request-User` or forwarded `Authorization: Basic ...`) |
|
||||
| `WEB_TRUSTED_PROXY_HOSTS` | `*` | Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. A startup warning is emitted when using the default `*` with admin enabled. |
|
||||
| `TZ` | `UTC` | Timezone for displaying dates/times (e.g., `America/New_York`, `Europe/London`) |
|
||||
| `NETWORK_DOMAIN` | *(none)* | Network domain name (optional) |
|
||||
| `NETWORK_NAME` | `MeshCore Network` | Display name for the network |
|
||||
| `NETWORK_CITY` | *(none)* | City where network is located |
|
||||
| `NETWORK_COUNTRY` | *(none)* | Country code (ISO 3166-1 alpha-2) |
|
||||
@@ -384,51 +406,148 @@ The collector automatically cleans up old event data and inactive nodes:
|
||||
| `NETWORK_CONTACT_EMAIL` | *(none)* | Contact email address |
|
||||
| `NETWORK_CONTACT_DISCORD` | *(none)* | Discord server link |
|
||||
| `NETWORK_CONTACT_GITHUB` | *(none)* | GitHub repository URL |
|
||||
| `NETWORK_CONTACT_YOUTUBE` | *(none)* | YouTube channel URL |
|
||||
| `CONTENT_HOME` | `./content` | Directory containing custom content (pages/, media/) |
|
||||
|
||||
## CLI Reference
|
||||
Timezone handling note:
|
||||
- API timestamps that omit an explicit timezone suffix are treated as UTC before rendering in the configured `TZ`.
|
||||
|
||||
#### Nginx Proxy Manager (NPM) Admin Setup
|
||||
|
||||
Use two hostnames so the public map/site stays open while admin stays protected:
|
||||
|
||||
1. Public host: no Access List (normal users).
|
||||
2. Admin host: Access List enabled (operators only).
|
||||
|
||||
Both proxy hosts should forward to the same web container:
|
||||
- Scheme: `http`
|
||||
- Forward Hostname/IP: your MeshCore Hub host
|
||||
- Forward Port: `18080` (or your mapped web port)
|
||||
- Websockets Support: `ON`
|
||||
- Block Common Exploits: `ON`
|
||||
|
||||
Important:
|
||||
- Do not host this app under a subpath (for example `/meshcore`); proxy it at `/`.
|
||||
- `WEB_ADMIN_ENABLED` must be `true`.
|
||||
|
||||
In NPM, for the **admin host**, paste this in the `Advanced` field:
|
||||
|
||||
```nginx
|
||||
# Forward authenticated identity for MeshCore Hub admin checks
|
||||
proxy_set_header Authorization $http_authorization;
|
||||
proxy_set_header X-Forwarded-User $remote_user;
|
||||
proxy_set_header X-Auth-Request-User $remote_user;
|
||||
proxy_set_header X-Forwarded-Email "";
|
||||
proxy_set_header X-Forwarded-Groups "";
|
||||
```
|
||||
|
||||
Then attach your NPM Access List (Basic auth users) to that admin host.
|
||||
|
||||
Verify auth forwarding:
|
||||
|
||||
```bash
|
||||
# Show help
|
||||
meshcore-hub --help
|
||||
curl -s -u 'admin:password' "https://admin.example.com/config.js?t=$(date +%s)" \
|
||||
| grep -o '"is_authenticated":[^,]*'
|
||||
```
|
||||
|
||||
# Interface component
|
||||
meshcore-hub interface --mode receiver --port /dev/ttyUSB0
|
||||
meshcore-hub interface --mode receiver --device-name "Gateway Node" # Set device name
|
||||
meshcore-hub interface --mode sender --mock # Use mock device
|
||||
Expected:
|
||||
|
||||
# Collector component
|
||||
meshcore-hub collector # Run collector
|
||||
meshcore-hub collector seed # Import all seed data from SEED_HOME
|
||||
meshcore-hub collector import-tags # Import node tags from SEED_HOME/node_tags.yaml
|
||||
meshcore-hub collector import-tags /path/to/file.yaml # Import from specific file
|
||||
meshcore-hub collector import-members # Import members from SEED_HOME/members.yaml
|
||||
meshcore-hub collector import-members /path/to/file.yaml # Import from specific file
|
||||
```text
|
||||
"is_authenticated": true
|
||||
```
|
||||
|
||||
# API component
|
||||
meshcore-hub api --host 0.0.0.0 --port 8000
|
||||
If it still shows `false`, check:
|
||||
1. You are using the admin hostname, not the public hostname.
|
||||
2. The Access List is attached to that admin host.
|
||||
3. The `Advanced` block above is present exactly.
|
||||
4. `WEB_ADMIN_ENABLED=true` is loaded in the running web container.
|
||||
|
||||
# Web dashboard
|
||||
meshcore-hub web --port 8080 --network-name "My Network"
|
||||
#### Feature Flags
|
||||
|
||||
# Database management
|
||||
meshcore-hub db upgrade # Run migrations
|
||||
meshcore-hub db downgrade # Rollback one migration
|
||||
meshcore-hub db current # Show current revision
|
||||
Control which pages are visible in the web dashboard. Disabled features are fully hidden: removed from navigation, return 404 on their routes, and excluded from sitemap/robots.txt.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `FEATURE_DASHBOARD` | `true` | Enable the `/dashboard` page |
|
||||
| `FEATURE_NODES` | `true` | Enable the `/nodes` pages (list, detail, short links) |
|
||||
| `FEATURE_ADVERTISEMENTS` | `true` | Enable the `/advertisements` page |
|
||||
| `FEATURE_MESSAGES` | `true` | Enable the `/messages` page |
|
||||
| `FEATURE_MAP` | `true` | Enable the `/map` page and `/map/data` endpoint |
|
||||
| `FEATURE_MEMBERS` | `true` | Enable the `/members` page |
|
||||
| `FEATURE_PAGES` | `true` | Enable custom markdown pages |
|
||||
|
||||
**Dependencies:** Dashboard auto-disables when all of Nodes/Advertisements/Messages are disabled. Map auto-disables when Nodes is disabled.
|
||||
|
||||
### Custom Content
|
||||
|
||||
The web dashboard supports custom content including markdown pages and media files. Content is organized in subdirectories:
|
||||
|
||||
Custom logo options:
|
||||
- `logo.svg` — full-color logo, displayed as-is in both themes (no automatic darkening)
|
||||
- `logo-invert.svg` — monochrome/two-tone logo, automatically darkened in light mode for visibility
|
||||
```
|
||||
content/
|
||||
├── pages/ # Custom markdown pages
|
||||
│ └── about.md
|
||||
└── media/ # Custom media files
|
||||
└── images/
|
||||
├── logo.svg # Full-color custom logo (default)
|
||||
└── logo-invert.svg # Monochrome custom logo (darkened in light mode)
|
||||
```
|
||||
|
||||
**Setup:**
|
||||
```bash
|
||||
# Create content directory structure
|
||||
mkdir -p content/pages content/media
|
||||
|
||||
# Create a custom page
|
||||
cat > content/pages/about.md << 'EOF'
|
||||
---
|
||||
title: About Us
|
||||
slug: about
|
||||
menu_order: 10
|
||||
---
|
||||
|
||||
# About Our Network
|
||||
|
||||
Welcome to our MeshCore mesh network!
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Get a compatible LoRa device
|
||||
2. Flash MeshCore firmware
|
||||
3. Configure your radio settings
|
||||
EOF
|
||||
```
|
||||
|
||||
**Frontmatter fields:**
|
||||
| Field | Default | Description |
|
||||
|-------|---------|-------------|
|
||||
| `title` | Filename titlecased | Browser tab title and navigation link text (not rendered on page) |
|
||||
| `slug` | Filename without `.md` | URL path (e.g., `about` → `/pages/about`) |
|
||||
| `menu_order` | `100` | Sort order in navigation (lower = earlier) |
|
||||
|
||||
The markdown content is rendered as-is, so include your own `# Heading` if desired.
|
||||
|
||||
Pages automatically appear in the navigation menu and sitemap. With Docker, mount the content directory:
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml (already configured)
|
||||
volumes:
|
||||
- ${CONTENT_HOME:-./content}:/content:ro
|
||||
environment:
|
||||
- CONTENT_HOME=/content
|
||||
```
|
||||
|
||||
## Seed Data
|
||||
|
||||
The database can be seeded with node tags and network members from YAML files in the `SEED_HOME` directory (default: `./seed`).
|
||||
|
||||
### Running the Seed Process
|
||||
#### Running the Seed Process
|
||||
|
||||
Seeding is a separate process and must be run explicitly:
|
||||
|
||||
```bash
|
||||
# Native CLI
|
||||
meshcore-hub collector seed
|
||||
|
||||
# With Docker Compose
|
||||
docker compose --profile seed up
|
||||
```
|
||||
|
||||
@@ -436,7 +555,7 @@ This imports data from the following files (if they exist):
|
||||
- `{SEED_HOME}/node_tags.yaml` - Node tag definitions
|
||||
- `{SEED_HOME}/members.yaml` - Network member definitions
|
||||
|
||||
### Directory Structure
|
||||
#### Directory Structure
|
||||
|
||||
```
|
||||
seed/ # SEED_HOME (seed data files)
|
||||
@@ -450,60 +569,45 @@ data/ # DATA_HOME (runtime data)
|
||||
|
||||
Example seed files are provided in `example/seed/`.
|
||||
|
||||
## Node Tags
|
||||
### Node Tags
|
||||
|
||||
Node tags allow you to attach custom metadata to nodes (e.g., location, role, owner). Tags are stored in the database and returned with node data via the API.
|
||||
|
||||
### Node Tags YAML Format
|
||||
#### Node Tags YAML Format
|
||||
|
||||
Tags are keyed by public key in YAML format:
|
||||
|
||||
```yaml
|
||||
# Each key is a 64-character hex public key
|
||||
0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef:
|
||||
friendly_name: Gateway Node
|
||||
name: Gateway Node
|
||||
description: Main network gateway
|
||||
role: gateway
|
||||
lat: 37.7749
|
||||
lon: -122.4194
|
||||
is_online: true
|
||||
member_id: alice
|
||||
|
||||
fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210:
|
||||
friendly_name: Oakland Repeater
|
||||
altitude: 150
|
||||
location:
|
||||
value: "37.8044,-122.2712"
|
||||
type: coordinate
|
||||
name: Oakland Repeater
|
||||
elevation: 150
|
||||
```
|
||||
|
||||
Tag values can be:
|
||||
- **YAML primitives** (auto-detected type): strings, numbers, booleans
|
||||
- **Explicit type** (for special types like coordinate):
|
||||
- **Explicit type** (when you need to force a specific type):
|
||||
```yaml
|
||||
location:
|
||||
value: "37.7749,-122.4194"
|
||||
type: coordinate
|
||||
altitude:
|
||||
value: "150"
|
||||
type: number
|
||||
```
|
||||
|
||||
Supported types: `string`, `number`, `boolean`, `coordinate`
|
||||
Supported types: `string`, `number`, `boolean`
|
||||
|
||||
### Import Tags Manually
|
||||
|
||||
```bash
|
||||
# Import from default location ({SEED_HOME}/node_tags.yaml)
|
||||
meshcore-hub collector import-tags
|
||||
|
||||
# Import from specific file
|
||||
meshcore-hub collector import-tags /path/to/node_tags.yaml
|
||||
|
||||
# Skip tags for nodes that don't exist
|
||||
meshcore-hub collector import-tags --no-create-nodes
|
||||
```
|
||||
|
||||
## Network Members
|
||||
### Network Members
|
||||
|
||||
Network members represent the people operating nodes in your network. Members can optionally be linked to nodes via their public key.
|
||||
|
||||
### Members YAML Format
|
||||
#### Members YAML Format
|
||||
|
||||
```yaml
|
||||
- member_id: walshie86
|
||||
@@ -528,44 +632,6 @@ Network members represent the people operating nodes in your network. Members ca
|
||||
| `contact` | No | Contact information |
|
||||
| `public_key` | No | Associated node public key (64-char hex) |
|
||||
|
||||
### Import Members Manually
|
||||
|
||||
```bash
|
||||
# Import from default location ({SEED_HOME}/members.yaml)
|
||||
meshcore-hub collector import-members
|
||||
|
||||
# Import from specific file
|
||||
meshcore-hub collector import-members /path/to/members.yaml
|
||||
```
|
||||
|
||||
### Managing Tags via API
|
||||
|
||||
Tags can also be managed via the REST API:
|
||||
|
||||
```bash
|
||||
# List tags for a node
|
||||
curl http://localhost:8000/api/v1/nodes/{public_key}/tags
|
||||
|
||||
# Create a tag (requires admin key)
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer <API_ADMIN_KEY>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"key": "location", "value": "Building A"}' \
|
||||
http://localhost:8000/api/v1/nodes/{public_key}/tags
|
||||
|
||||
# Update a tag
|
||||
curl -X PUT \
|
||||
-H "Authorization: Bearer <API_ADMIN_KEY>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"value": "Building B"}' \
|
||||
http://localhost:8000/api/v1/nodes/{public_key}/tags/location
|
||||
|
||||
# Delete a tag
|
||||
curl -X DELETE \
|
||||
-H "Authorization: Bearer <API_ADMIN_KEY>" \
|
||||
http://localhost:8000/api/v1/nodes/{public_key}/tags/location
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
When running, the API provides interactive documentation at:
|
||||
@@ -578,6 +644,7 @@ Health check endpoints are also available:
|
||||
|
||||
- **Health**: http://localhost:8000/health
|
||||
- **Ready**: http://localhost:8000/health/ready (includes database check)
|
||||
- **Metrics**: http://localhost:8000/metrics (Prometheus format)
|
||||
|
||||
### Authentication
|
||||
|
||||
@@ -601,15 +668,21 @@ curl -X POST \
|
||||
|--------|----------|-------------|
|
||||
| GET | `/api/v1/nodes` | List all known nodes |
|
||||
| GET | `/api/v1/nodes/{public_key}` | Get node details |
|
||||
| GET | `/api/v1/nodes/prefix/{prefix}` | Get node by public key prefix |
|
||||
| GET | `/api/v1/nodes/{public_key}/tags` | Get node tags |
|
||||
| POST | `/api/v1/nodes/{public_key}/tags` | Create node tag |
|
||||
| GET | `/api/v1/messages` | List messages with filters |
|
||||
| GET | `/api/v1/advertisements` | List advertisements |
|
||||
| GET | `/api/v1/telemetry` | List telemetry data |
|
||||
| GET | `/api/v1/trace-paths` | List trace paths |
|
||||
| GET | `/api/v1/members` | List network members |
|
||||
| POST | `/api/v1/commands/send-message` | Send direct message |
|
||||
| POST | `/api/v1/commands/send-channel-message` | Send channel message |
|
||||
| POST | `/api/v1/commands/send-advertisement` | Send advertisement |
|
||||
| GET | `/api/v1/dashboard/stats` | Get network statistics |
|
||||
| GET | `/api/v1/dashboard/activity` | Get daily advertisement activity |
|
||||
| GET | `/api/v1/dashboard/message-activity` | Get daily message activity |
|
||||
| GET | `/api/v1/dashboard/node-count` | Get cumulative node count history |
|
||||
|
||||
## Development
|
||||
|
||||
@@ -673,14 +746,27 @@ meshcore-hub/
|
||||
│ ├── collector/ # MQTT event collector
|
||||
│ ├── api/ # REST API
|
||||
│ └── web/ # Web dashboard
|
||||
│ ├── templates/ # Jinja2 templates (SPA shell)
|
||||
│ └── static/
|
||||
│ ├── js/spa/ # SPA frontend (ES modules, lit-html)
|
||||
│ └── locales/ # Translation files (en.json, languages.md)
|
||||
├── tests/ # Test suite
|
||||
├── alembic/ # Database migrations
|
||||
├── etc/ # Configuration files (mosquitto.conf)
|
||||
├── example/ # Example files for testing
|
||||
│ └── seed/ # Example seed data files
|
||||
│ ├── node_tags.yaml # Example node tags
|
||||
│ └── members.yaml # Example network members
|
||||
├── etc/ # Configuration files (MQTT, Prometheus, Alertmanager)
|
||||
├── example/ # Example files for reference
|
||||
│ ├── seed/ # Example seed data files
|
||||
│ │ ├── node_tags.yaml # Example node tags
|
||||
│ │ └── members.yaml # Example network members
|
||||
│ └── content/ # Example custom content
|
||||
│ ├── pages/ # Example custom pages
|
||||
│ │ └── join.md # Example join page
|
||||
│ └── media/ # Example media files
|
||||
│ └── images/ # Custom images
|
||||
├── seed/ # Seed data directory (SEED_HOME, copy from example/seed/)
|
||||
├── content/ # Custom content directory (CONTENT_HOME, optional)
|
||||
│ ├── pages/ # Custom markdown pages
|
||||
│ └── media/ # Custom media files
|
||||
│ └── images/ # Custom images (logo.svg/png/jpg/jpeg/webp replace default logo)
|
||||
├── data/ # Runtime data directory (DATA_HOME, created at runtime)
|
||||
├── Dockerfile # Docker build configuration
|
||||
├── docker-compose.yml # Docker Compose services
|
||||
|
||||
47
SCHEMAS.md
47
SCHEMAS.md
@@ -45,15 +45,19 @@ Node advertisements announcing presence and metadata.
|
||||
"public_key": "string (64 hex chars)",
|
||||
"name": "string (optional)",
|
||||
"adv_type": "string (optional)",
|
||||
"flags": "integer (optional)"
|
||||
"flags": "integer (optional)",
|
||||
"lat": "number (optional)",
|
||||
"lon": "number (optional)"
|
||||
}
|
||||
```
|
||||
|
||||
**Field Descriptions**:
|
||||
- `public_key`: Node's full 64-character hexadecimal public key (required)
|
||||
- `name`: Node name/alias (e.g., "Gateway-01", "Alice")
|
||||
- `adv_type`: Node type - one of: `"chat"`, `"repeater"`, `"room"`, `"none"`
|
||||
- `adv_type`: Node type - common values: `"chat"`, `"repeater"`, `"room"`, `"companion"` (other values may appear from upstream feeds and are normalized by the collector when possible)
|
||||
- `flags`: Node capability/status flags (bitmask)
|
||||
- `lat`: GPS latitude when provided by decoder metadata
|
||||
- `lon`: GPS longitude when provided by decoder metadata
|
||||
|
||||
**Example**:
|
||||
```json
|
||||
@@ -61,7 +65,9 @@ Node advertisements announcing presence and metadata.
|
||||
"public_key": "4767c2897c256df8d85a5fa090574284bfd15b92d47359741b0abd5098ed30c4",
|
||||
"name": "Gateway-01",
|
||||
"adv_type": "repeater",
|
||||
"flags": 218
|
||||
"flags": 218,
|
||||
"lat": 42.470001,
|
||||
"lon": -71.330001
|
||||
}
|
||||
```
|
||||
|
||||
@@ -90,7 +96,7 @@ Direct/private messages between two nodes.
|
||||
```
|
||||
|
||||
**Field Descriptions**:
|
||||
- `pubkey_prefix`: First 12 characters of sender's public key
|
||||
- `pubkey_prefix`: First 12 characters of sender's public key (or source hash prefix in compatibility ingest modes)
|
||||
- `path_len`: Number of hops message traveled
|
||||
- `txt_type`: Message type indicator (0=plain, 2=signed, etc.)
|
||||
- `signature`: Message signature (8 hex chars) when `txt_type=2`
|
||||
@@ -128,7 +134,9 @@ Group/broadcast messages on specific channels.
|
||||
**Payload Schema**:
|
||||
```json
|
||||
{
|
||||
"channel_idx": "integer",
|
||||
"channel_idx": "integer (optional)",
|
||||
"channel_name": "string (optional)",
|
||||
"pubkey_prefix": "string (12 chars, optional)",
|
||||
"path_len": "integer (optional)",
|
||||
"txt_type": "integer (optional)",
|
||||
"signature": "string (optional)",
|
||||
@@ -139,7 +147,9 @@ Group/broadcast messages on specific channels.
|
||||
```
|
||||
|
||||
**Field Descriptions**:
|
||||
- `channel_idx`: Channel number (0-255)
|
||||
- `channel_idx`: Channel number (0-255) when available
|
||||
- `channel_name`: Channel display label (e.g., `"Public"`, `"#test"`) when available
|
||||
- `pubkey_prefix`: First 12 characters of sender's public key when available
|
||||
- `path_len`: Number of hops message traveled
|
||||
- `txt_type`: Message type indicator (0=plain, 2=signed, etc.)
|
||||
- `signature`: Message signature (8 hex chars) when `txt_type=2`
|
||||
@@ -166,6 +176,25 @@ Group/broadcast messages on specific channels.
|
||||
- Send only text: `$.data.text`
|
||||
- Send channel + text: `$.data.[channel_idx,text]`
|
||||
|
||||
**Compatibility ingest note**:
|
||||
- In LetsMesh upload compatibility mode, packet type `5` is normalized to `CHANNEL_MSG_RECV` and packet types `1`, `2`, and `7` are normalized to `CONTACT_MSG_RECV` when decryptable text is available.
|
||||
- LetsMesh packets without decryptable message text are treated as informational `letsmesh_packet` events instead of message events.
|
||||
- For UI labels, known channel indexes are mapped (`17 -> Public`, `217 -> #test`) and preferred over ambiguous/stale channel-name hints.
|
||||
- Additional channel labels can be provided through `COLLECTOR_LETSMESH_DECODER_KEYS` using `label=hex` entries.
|
||||
- When decoder output includes a human sender (`payload.decoded.decrypted.sender`), message text is normalized to `Name: Message`; sender identity remains unknown when only hash/prefix metadata is available.
|
||||
|
||||
**Compatibility ingest note (advertisements)**:
|
||||
- In LetsMesh upload compatibility mode, `status` feed payloads are persisted as informational `letsmesh_status` events and are not normalized to `ADVERTISEMENT`.
|
||||
- In LetsMesh upload compatibility mode, decoded payload type `4` is normalized to `ADVERTISEMENT` when node identity metadata is present.
|
||||
- Payload type `4` location metadata (`appData.location.latitude/longitude`) is mapped to node `lat/lon` for map rendering.
|
||||
- This keeps advertisement persistence aligned with native mode expectations (advertisement traffic only).
|
||||
|
||||
**Compatibility ingest note (non-message structured events)**:
|
||||
- Decoded payload type `9` is normalized to `TRACE_DATA` (`traceTag`, flags, auth, path hashes, and SNR values).
|
||||
- Decoded payload type `11` (`Control/NodeDiscoverResp`) is normalized to `contact` events for node upsert parity.
|
||||
- Decoded payload type `8` is normalized to informational `PATH_UPDATED` events (`hop_count` + path hashes).
|
||||
- Decoded payload type `1` can be normalized to `TELEMETRY_RESPONSE`, `BATTERY`, `PATH_UPDATED`, or `STATUS_RESPONSE` when decrypted response content is structured and parseable.
|
||||
|
||||
---
|
||||
|
||||
## Persisted Events (Non-Webhook)
|
||||
@@ -196,7 +225,7 @@ Network trace path results showing route and signal strength.
|
||||
- `path_len`: Length of the path
|
||||
- `flags`: Trace flags/options
|
||||
- `auth`: Authentication/validation data
|
||||
- `path_hashes`: Array of 2-character node hash identifiers (ordered by hops)
|
||||
- `path_hashes`: Array of hex-encoded node hash identifiers, variable length (e.g., `"4a"` for single-byte, `"b3fa"` for multibyte), ordered by hops
|
||||
- `snr_values`: Array of SNR values corresponding to each hop
|
||||
- `hop_count`: Total number of hops
|
||||
|
||||
@@ -207,12 +236,14 @@ Network trace path results showing route and signal strength.
|
||||
"path_len": 3,
|
||||
"flags": 0,
|
||||
"auth": 1,
|
||||
"path_hashes": ["4a", "b3", "fa"],
|
||||
"path_hashes": ["4a", "b3fa", "02"],
|
||||
"snr_values": [25.3, 18.7, 12.4],
|
||||
"hop_count": 3
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: MeshCore firmware v1.14+ supports multibyte path hashes. Older nodes use single-byte (2-character) hashes. Mixed-length hash arrays are expected in heterogeneous networks where nodes run different firmware versions.
|
||||
|
||||
**Webhook Trigger**: No
|
||||
**REST API**: `GET /api/v1/trace-paths`
|
||||
|
||||
|
||||
3
TASKS.md
3
TASKS.md
@@ -753,6 +753,9 @@ This document tracks implementation progress for the MeshCore Hub project. Each
|
||||
### Decisions Made
|
||||
*(Record architectural decisions and answers to clarifying questions here)*
|
||||
|
||||
- [x] LetsMesh/native advertisement parity: in `letsmesh_upload` mode, observer `status` feed stays informational (`letsmesh_status`) and does not populate `advertisements`.
|
||||
- [x] LetsMesh advertisement persistence source: decoded packet payload type `4` maps to `advertisement`; payload type `11` maps to `contact` parity updates.
|
||||
- [x] LetsMesh native-event parity extensions: payload type `9` maps to `trace_data`, payload type `8` maps to informational `path_updated`, and payload type `1` can map to response-style native events when decryptable structured content exists.
|
||||
- [ ] Q1 (MQTT Broker):
|
||||
- [ ] Q2 (Database):
|
||||
- [ ] Q3 (Web Dashboard Separation):
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
"""add lat lon columns to nodes
|
||||
|
||||
Revision ID: 4e2e787a1660
|
||||
Revises: aa1162502616
|
||||
Create Date: 2026-01-09 20:04:04.273741+00:00
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "4e2e787a1660"
|
||||
down_revision: Union[str, None] = "aa1162502616"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("nodes", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("lat", sa.Float(), nullable=True))
|
||||
batch_op.add_column(sa.Column("lon", sa.Float(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("nodes", schema=None) as batch_op:
|
||||
batch_op.drop_column("lon")
|
||||
batch_op.drop_column("lat")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -14,7 +14,7 @@ services:
|
||||
- "${MQTT_EXTERNAL_PORT:-1883}:1883"
|
||||
- "${MQTT_WS_PORT:-9001}:9001"
|
||||
volumes:
|
||||
- ./etc/mosquitto.conf:/mosquitto/config/mosquitto.conf:ro
|
||||
# - ./etc/mosquitto.conf:/mosquitto/config/mosquitto.conf:ro
|
||||
- mosquitto_data:/mosquitto/data
|
||||
- mosquitto_log:/mosquitto/log
|
||||
healthcheck:
|
||||
@@ -48,6 +48,8 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- SERIAL_PORT=${SERIAL_PORT:-/dev/ttyUSB0}
|
||||
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
|
||||
- NODE_ADDRESS=${NODE_ADDRESS:-}
|
||||
@@ -83,6 +85,8 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- SERIAL_PORT=${SERIAL_PORT_SENDER:-/dev/ttyUSB1}
|
||||
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
|
||||
- NODE_ADDRESS=${NODE_ADDRESS_SENDER:-}
|
||||
@@ -115,6 +119,8 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- MOCK_DEVICE=true
|
||||
- NODE_ADDRESS=${NODE_ADDRESS:-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef}
|
||||
command: ["interface", "receiver", "--mock"]
|
||||
@@ -139,10 +145,10 @@ services:
|
||||
- core
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
seed:
|
||||
db-migrate:
|
||||
condition: service_completed_successfully
|
||||
volumes:
|
||||
- ${DATA_HOME:-./data}:/data
|
||||
- hub_data:/data
|
||||
- ${SEED_HOME:-./seed}:/seed
|
||||
environment:
|
||||
- LOG_LEVEL=${LOG_LEVEL:-INFO}
|
||||
@@ -152,10 +158,15 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- COLLECTOR_INGEST_MODE=${COLLECTOR_INGEST_MODE:-native}
|
||||
- COLLECTOR_LETSMESH_DECODER_ENABLED=${COLLECTOR_LETSMESH_DECODER_ENABLED:-true}
|
||||
- COLLECTOR_LETSMESH_DECODER_COMMAND=${COLLECTOR_LETSMESH_DECODER_COMMAND:-meshcore-decoder}
|
||||
- COLLECTOR_LETSMESH_DECODER_KEYS=${COLLECTOR_LETSMESH_DECODER_KEYS:-}
|
||||
- COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS=${COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS:-2.0}
|
||||
- DATA_HOME=/data
|
||||
- SEED_HOME=/seed
|
||||
# Explicitly unset to use DATA_HOME-based default path
|
||||
- DATABASE_URL=
|
||||
# Webhook configuration
|
||||
- WEBHOOK_ADVERTISEMENT_URL=${WEBHOOK_ADVERTISEMENT_URL:-}
|
||||
- WEBHOOK_ADVERTISEMENT_SECRET=${WEBHOOK_ADVERTISEMENT_SECRET:-}
|
||||
@@ -196,15 +207,14 @@ services:
|
||||
- core
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
seed:
|
||||
db-migrate:
|
||||
condition: service_completed_successfully
|
||||
collector:
|
||||
condition: service_started
|
||||
ports:
|
||||
- "${API_PORT:-8000}:8000"
|
||||
volumes:
|
||||
# Mount data directory (uses collector/meshcore.db)
|
||||
- ${DATA_HOME:-./data}:/data
|
||||
- hub_data:/data
|
||||
environment:
|
||||
- LOG_LEVEL=${LOG_LEVEL:-INFO}
|
||||
- MQTT_HOST=${MQTT_HOST:-mqtt}
|
||||
@@ -213,13 +223,15 @@ services:
|
||||
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
|
||||
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
|
||||
- MQTT_TLS=${MQTT_TLS:-false}
|
||||
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
|
||||
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
|
||||
- DATA_HOME=/data
|
||||
# Explicitly unset to use DATA_HOME-based default path
|
||||
- DATABASE_URL=
|
||||
- API_HOST=0.0.0.0
|
||||
- API_PORT=8000
|
||||
- API_READ_KEY=${API_READ_KEY:-}
|
||||
- API_ADMIN_KEY=${API_ADMIN_KEY:-}
|
||||
- METRICS_ENABLED=${METRICS_ENABLED:-true}
|
||||
- METRICS_CACHE_TTL=${METRICS_CACHE_TTL:-60}
|
||||
command: ["api"]
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
|
||||
@@ -246,12 +258,20 @@ services:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "${WEB_PORT:-8080}:8080"
|
||||
volumes:
|
||||
- ${CONTENT_HOME:-./content}:/content:ro
|
||||
environment:
|
||||
- LOG_LEVEL=${LOG_LEVEL:-INFO}
|
||||
- API_BASE_URL=http://api:8000
|
||||
- API_KEY=${API_READ_KEY:-}
|
||||
# Use ADMIN key to allow write operations from admin interface
|
||||
# Falls back to READ key if ADMIN key is not set
|
||||
- API_KEY=${API_ADMIN_KEY:-${API_READ_KEY:-}}
|
||||
- WEB_HOST=0.0.0.0
|
||||
- WEB_PORT=8080
|
||||
- WEB_THEME=${WEB_THEME:-dark}
|
||||
- WEB_LOCALE=${WEB_LOCALE:-en}
|
||||
- WEB_DATETIME_LOCALE=${WEB_DATETIME_LOCALE:-en-US}
|
||||
- WEB_ADMIN_ENABLED=${WEB_ADMIN_ENABLED:-false}
|
||||
- NETWORK_NAME=${NETWORK_NAME:-MeshCore Network}
|
||||
- NETWORK_CITY=${NETWORK_CITY:-}
|
||||
- NETWORK_COUNTRY=${NETWORK_COUNTRY:-}
|
||||
@@ -259,7 +279,19 @@ services:
|
||||
- NETWORK_CONTACT_EMAIL=${NETWORK_CONTACT_EMAIL:-}
|
||||
- NETWORK_CONTACT_DISCORD=${NETWORK_CONTACT_DISCORD:-}
|
||||
- NETWORK_CONTACT_GITHUB=${NETWORK_CONTACT_GITHUB:-}
|
||||
- NETWORK_CONTACT_YOUTUBE=${NETWORK_CONTACT_YOUTUBE:-}
|
||||
- NETWORK_WELCOME_TEXT=${NETWORK_WELCOME_TEXT:-}
|
||||
- CONTENT_HOME=/content
|
||||
- TZ=${TZ:-UTC}
|
||||
- COLLECTOR_LETSMESH_DECODER_KEYS=${COLLECTOR_LETSMESH_DECODER_KEYS:-}
|
||||
# Feature flags (set to false to disable specific pages)
|
||||
- FEATURE_DASHBOARD=${FEATURE_DASHBOARD:-true}
|
||||
- FEATURE_NODES=${FEATURE_NODES:-true}
|
||||
- FEATURE_ADVERTISEMENTS=${FEATURE_ADVERTISEMENTS:-true}
|
||||
- FEATURE_MESSAGES=${FEATURE_MESSAGES:-true}
|
||||
- FEATURE_MAP=${FEATURE_MAP:-true}
|
||||
- FEATURE_MEMBERS=${FEATURE_MEMBERS:-true}
|
||||
- FEATURE_PAGES=${FEATURE_PAGES:-true}
|
||||
command: ["web"]
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8080/health')"]
|
||||
@@ -283,16 +315,16 @@ services:
|
||||
- migrate
|
||||
restart: "no"
|
||||
volumes:
|
||||
# Mount data directory (uses collector/meshcore.db)
|
||||
- ${DATA_HOME:-./data}:/data
|
||||
- hub_data:/data
|
||||
environment:
|
||||
- DATA_HOME=/data
|
||||
# Explicitly unset to use DATA_HOME-based default path
|
||||
- DATABASE_URL=
|
||||
command: ["db", "upgrade"]
|
||||
|
||||
# ==========================================================================
|
||||
# Seed Data - Import node_tags.json and members.json from SEED_HOME
|
||||
# Seed Data - Import node_tags.yaml and members.yaml from SEED_HOME
|
||||
# NOTE: This is NOT run automatically. Use --profile seed to run explicitly.
|
||||
# Since tags are now managed via the admin UI, automatic seeding would
|
||||
# overwrite user changes.
|
||||
# ==========================================================================
|
||||
seed:
|
||||
image: ghcr.io/ipnet-mesh/meshcore-hub:${IMAGE_VERSION:-latest}
|
||||
@@ -301,32 +333,71 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
container_name: meshcore-seed
|
||||
profiles:
|
||||
- all
|
||||
- core
|
||||
- seed
|
||||
restart: "no"
|
||||
depends_on:
|
||||
db-migrate:
|
||||
condition: service_completed_successfully
|
||||
volumes:
|
||||
# Mount data directory for database (read-write)
|
||||
- ${DATA_HOME:-./data}:/data
|
||||
# Mount seed directory for seed files (read-only)
|
||||
- hub_data:/data
|
||||
- ${SEED_HOME:-./seed}:/seed:ro
|
||||
environment:
|
||||
- DATA_HOME=/data
|
||||
- SEED_HOME=/seed
|
||||
- LOG_LEVEL=${LOG_LEVEL:-INFO}
|
||||
# Explicitly unset to use DATA_HOME-based default path
|
||||
- DATABASE_URL=
|
||||
# Imports both node_tags.json and members.json if they exist
|
||||
# Imports both node_tags.yaml and members.yaml if they exist
|
||||
command: ["collector", "seed"]
|
||||
|
||||
# ==========================================================================
|
||||
# Prometheus - Metrics collection and monitoring (optional, use --profile metrics)
|
||||
# ==========================================================================
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
container_name: meshcore-prometheus
|
||||
profiles:
|
||||
- all
|
||||
- metrics
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
api:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "${PROMETHEUS_PORT:-9090}:9090"
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
- '--storage.tsdb.retention.time=30d'
|
||||
volumes:
|
||||
- ./etc/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
||||
- ./etc/prometheus/alerts.yml:/etc/prometheus/alerts.yml:ro
|
||||
- prometheus_data:/prometheus
|
||||
|
||||
# ==========================================================================
|
||||
# Alertmanager - Alert routing and notifications (optional, use --profile metrics)
|
||||
# ==========================================================================
|
||||
alertmanager:
|
||||
image: prom/alertmanager:latest
|
||||
container_name: meshcore-alertmanager
|
||||
profiles:
|
||||
- all
|
||||
- metrics
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${ALERTMANAGER_PORT:-9093}:9093"
|
||||
volumes:
|
||||
- ./etc/alertmanager/alertmanager.yml:/etc/alertmanager/alertmanager.yml:ro
|
||||
- alertmanager_data:/alertmanager
|
||||
command:
|
||||
- '--config.file=/etc/alertmanager/alertmanager.yml'
|
||||
- '--storage.path=/alertmanager'
|
||||
|
||||
# ==========================================================================
|
||||
# Volumes
|
||||
# ==========================================================================
|
||||
volumes:
|
||||
hub_data:
|
||||
name: meshcore_hub_data
|
||||
mosquitto_data:
|
||||
name: meshcore_mosquitto_data
|
||||
mosquitto_log:
|
||||
name: meshcore_mosquitto_log
|
||||
prometheus_data:
|
||||
name: meshcore_prometheus_data
|
||||
alertmanager_data:
|
||||
name: meshcore_alertmanager_data
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 124 KiB After Width: | Height: | Size: 238 KiB |
35
etc/alertmanager/alertmanager.yml
Normal file
35
etc/alertmanager/alertmanager.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
# Alertmanager configuration for MeshCore Hub
|
||||
#
|
||||
# Default configuration routes all alerts to a "blackhole" receiver
|
||||
# (logs only, no external notifications).
|
||||
#
|
||||
# To receive notifications, configure a receiver below.
|
||||
# See: https://prometheus.io/docs/alerting/latest/configuration/
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
# Email:
|
||||
# receivers:
|
||||
# - name: 'email'
|
||||
# email_configs:
|
||||
# - to: 'admin@example.com'
|
||||
# from: 'alertmanager@example.com'
|
||||
# smarthost: 'smtp.example.com:587'
|
||||
# auth_username: 'alertmanager@example.com'
|
||||
# auth_password: 'password'
|
||||
#
|
||||
# Webhook (e.g. Slack incoming webhook, ntfy, Gotify):
|
||||
# receivers:
|
||||
# - name: 'webhook'
|
||||
# webhook_configs:
|
||||
# - url: 'https://example.com/webhook'
|
||||
|
||||
route:
|
||||
receiver: 'default'
|
||||
group_by: ['alertname']
|
||||
group_wait: 30s
|
||||
group_interval: 5m
|
||||
repeat_interval: 4h
|
||||
|
||||
receivers:
|
||||
- name: 'default'
|
||||
16
etc/prometheus/alerts.yml
Normal file
16
etc/prometheus/alerts.yml
Normal file
@@ -0,0 +1,16 @@
|
||||
# Prometheus alert rules for MeshCore Hub
|
||||
#
|
||||
# These rules are evaluated by Prometheus and fired alerts are sent
|
||||
# to Alertmanager for routing and notification.
|
||||
|
||||
groups:
|
||||
- name: meshcore
|
||||
rules:
|
||||
- alert: NodeNotSeen
|
||||
expr: time() - meshcore_node_last_seen_timestamp_seconds{role="infra"} > 48 * 3600
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
annotations:
|
||||
summary: "Node {{ $labels.node_name }} ({{ $labels.role }}) not seen for 48+ hours"
|
||||
description: "Node {{ $labels.public_key }} ({{ $labels.adv_type }}, role={{ $labels.role }}) last seen {{ $value | humanizeDuration }} ago."
|
||||
29
etc/prometheus/prometheus.yml
Normal file
29
etc/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
# Prometheus scrape configuration for MeshCore Hub
|
||||
#
|
||||
# This file is used when running Prometheus via Docker Compose:
|
||||
# docker compose --profile core --profile metrics up -d
|
||||
#
|
||||
# The scrape interval matches the default metrics cache TTL (60s)
|
||||
# to avoid unnecessary database queries.
|
||||
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
evaluation_interval: 60s
|
||||
|
||||
alerting:
|
||||
alertmanagers:
|
||||
- static_configs:
|
||||
- targets: ['alertmanager:9093']
|
||||
|
||||
rule_files:
|
||||
- 'alerts.yml'
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'meshcore-hub'
|
||||
metrics_path: '/metrics'
|
||||
# Uncomment basic_auth if API_READ_KEY is configured
|
||||
# basic_auth:
|
||||
# username: 'metrics'
|
||||
# password: '<API_READ_KEY>'
|
||||
static_configs:
|
||||
- targets: ['api:8000']
|
||||
61
example/content/media/images/logo-invert.svg
Normal file
61
example/content/media/images/logo-invert.svg
Normal file
@@ -0,0 +1,61 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
viewBox="0 0 115 100"
|
||||
width="115"
|
||||
height="100"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
sodipodi:docname="logo-dark.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs4" />
|
||||
<sodipodi:namedview
|
||||
id="namedview4"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1" />
|
||||
<!-- I letter - muted -->
|
||||
<rect
|
||||
x="0"
|
||||
y="0"
|
||||
width="25"
|
||||
height="100"
|
||||
rx="2"
|
||||
fill="#ffffff"
|
||||
opacity="0.5"
|
||||
id="rect1" />
|
||||
<!-- P vertical stem -->
|
||||
<rect
|
||||
x="35"
|
||||
y="0"
|
||||
width="25"
|
||||
height="100"
|
||||
rx="2"
|
||||
fill="#ffffff"
|
||||
id="rect2" />
|
||||
<!-- WiFi arcs: center at mid-stem (90, 60), sweeping from right up to top -->
|
||||
<g
|
||||
fill="none"
|
||||
stroke="#ffffff"
|
||||
stroke-width="10"
|
||||
stroke-linecap="round"
|
||||
id="g4"
|
||||
transform="translate(-30,-10)">
|
||||
<path
|
||||
d="M 110,65 A 20,20 0 0 0 90,45"
|
||||
id="path2" />
|
||||
<path
|
||||
d="M 125,65 A 35,35 0 0 0 90,30"
|
||||
id="path3" />
|
||||
<path
|
||||
d="M 140,65 A 50,50 0 0 0 90,15"
|
||||
id="path4" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
87
example/content/pages/join.md
Normal file
87
example/content/pages/join.md
Normal file
@@ -0,0 +1,87 @@
|
||||
---
|
||||
title: Join
|
||||
slug: join
|
||||
menu_order: 10
|
||||
---
|
||||
|
||||
# Getting Started with MeshCore
|
||||
|
||||
MeshCore is an open-source off-grid LoRa mesh networking platform. This guide will help you get connected to the network.
|
||||
|
||||
For detailed documentation, see the [MeshCore FAQ](https://github.com/meshcore-dev/MeshCore/blob/main/docs/faq.md).
|
||||
|
||||
## Node Types
|
||||
|
||||
MeshCore devices operate in different modes:
|
||||
|
||||
| Mode | Description |
|
||||
|------|-------------|
|
||||
| **Companion** | Connects to your phone via Bluetooth. Use this for messaging and interacting with the network. |
|
||||
| **Repeater** | Standalone node that extends network coverage. Place these in elevated locations for best results. |
|
||||
| **Room Server** | Hosts chat rooms that persist messages for offline users. |
|
||||
|
||||
Most users start with a **Companion** node paired to their phone.
|
||||
|
||||
## Frequency Regulations
|
||||
|
||||
MeshCore uses LoRa radio, which operates on unlicensed ISM bands. You **must** use the correct frequency for your region:
|
||||
|
||||
| Region | Frequency | Notes |
|
||||
|--------|-----------|-------|
|
||||
| Europe (EU) | 868 MHz | EU868 band |
|
||||
| United Kingdom | 868 MHz | Same as EU |
|
||||
| North America | 915 MHz | US915 band |
|
||||
| Australia | 915 MHz | AU915 band |
|
||||
|
||||
Using the wrong frequency is illegal and may cause interference. Check your local regulations.
|
||||
|
||||
## Compatible Hardware
|
||||
|
||||
MeshCore runs on inexpensive low-power LoRa devices. Popular options include:
|
||||
|
||||
### Recommended Devices
|
||||
|
||||
| Device | Manufacturer | Features |
|
||||
|--------|--------------|----------|
|
||||
| [Heltec V3](https://heltec.org/project/wifi-lora-32-v3/) | Heltec | Budget-friendly, OLED display |
|
||||
| [T114](https://heltec.org/project/mesh-node-t114/) | Heltec | Compact, GPS, colour display |
|
||||
| [T1000-E](https://www.seeedstudio.com/SenseCAP-Card-Tracker-T1000-E-for-Meshtastic-p-5913.html) | Seeed Studio | Credit-card sized, GPS, weatherproof |
|
||||
| [T-Deck Plus](https://www.lilygo.cc/products/t-deck-plus) | LilyGO | Built-in keyboard, touchscreen, GPS |
|
||||
|
||||
Ensure you purchase the correct frequency variant (868MHz for EU/UK, 915MHz for US/AU).
|
||||
|
||||
### Where to Buy
|
||||
|
||||
- **Heltec**: [Official Store](https://heltec.org/) or AliExpress
|
||||
- **LilyGO**: [Official Store](https://lilygo.cc/) or AliExpress
|
||||
- **Seeed Studio**: [Official Store](https://www.seeedstudio.com/)
|
||||
- **Amazon**: Search for device name + "LoRa 868" (or 915 for US)
|
||||
|
||||
## Mobile Apps
|
||||
|
||||
Connect to your Companion node using the official MeshCore apps:
|
||||
|
||||
| Platform | App | Link |
|
||||
|----------|-----|------|
|
||||
| Android | MeshCore | [Google Play](https://play.google.com/store/apps/details?id=com.liamcottle.meshcore.android) |
|
||||
| iOS | MeshCore | [App Store](https://apps.apple.com/us/app/meshcore/id6742354151) |
|
||||
|
||||
The app connects via Bluetooth to your Companion node, allowing you to send messages, view the network, and configure your device.
|
||||
|
||||
## Flashing Firmware
|
||||
|
||||
1. Use the [MeshCore Web Flasher](https://flasher.meshcore.co.uk/) for easy browser-based flashing
|
||||
2. Select your device type and region (frequency)
|
||||
3. Connect via USB and flash
|
||||
|
||||
## Next Steps
|
||||
|
||||
Once your device is flashed and paired:
|
||||
|
||||
1. Open the MeshCore app on your phone
|
||||
2. Enable Bluetooth and pair with your device
|
||||
3. Set your node name in the app settings
|
||||
4. Configure your radio settings/profile for your region
|
||||
4. You should start seeing other nodes on the network
|
||||
|
||||
Welcome to the mesh!
|
||||
@@ -7,12 +7,12 @@
|
||||
# elevation: 150 # number
|
||||
# is_online: true # boolean
|
||||
#
|
||||
# - Explicit type (for special types like coordinate):
|
||||
# location:
|
||||
# value: "37.7749,-122.4194"
|
||||
# type: coordinate
|
||||
# - Explicit type (when you need to force a specific type):
|
||||
# altitude:
|
||||
# value: "150"
|
||||
# type: number
|
||||
#
|
||||
# Supported types: string, number, boolean, coordinate
|
||||
# Supported types: string, number, boolean
|
||||
|
||||
0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef:
|
||||
friendly_name: Gateway Node
|
||||
|
||||
58
patches/@michaelhart+meshcore-decoder+0.2.7.patch
Normal file
58
patches/@michaelhart+meshcore-decoder+0.2.7.patch
Normal file
@@ -0,0 +1,58 @@
|
||||
diff --git a/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js b/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
|
||||
index d33ffd6..8d040d0 100644
|
||||
--- a/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
|
||||
+++ b/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
|
||||
@@ -36,7 +36,27 @@ var __importStar = (this && this.__importStar) || (function () {
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Ed25519SignatureVerifier = void 0;
|
||||
-const ed25519 = __importStar(require("@noble/ed25519"));
|
||||
+let _ed25519 = null;
|
||||
+async function getEd25519() {
|
||||
+ if (_ed25519) {
|
||||
+ return _ed25519;
|
||||
+ }
|
||||
+ const mod = await import("@noble/ed25519");
|
||||
+ _ed25519 = mod.default ? mod.default : mod;
|
||||
+ try {
|
||||
+ _ed25519.etc.sha512Async = sha512Hash;
|
||||
+ }
|
||||
+ catch (error) {
|
||||
+ console.debug("Could not set async SHA-512:", error);
|
||||
+ }
|
||||
+ try {
|
||||
+ _ed25519.etc.sha512Sync = sha512HashSync;
|
||||
+ }
|
||||
+ catch (error) {
|
||||
+ console.debug("Could not set up synchronous SHA-512:", error);
|
||||
+ }
|
||||
+ return _ed25519;
|
||||
+}
|
||||
const hex_1 = require("../utils/hex");
|
||||
const orlp_ed25519_wasm_1 = require("./orlp-ed25519-wasm");
|
||||
// Cross-platform SHA-512 implementation
|
||||
@@ -90,16 +110,6 @@ function sha512HashSync(data) {
|
||||
throw new Error('No SHA-512 implementation available for synchronous operation');
|
||||
}
|
||||
}
|
||||
-// Set up SHA-512 for @noble/ed25519
|
||||
-ed25519.etc.sha512Async = sha512Hash;
|
||||
-// Always set up sync version - @noble/ed25519 requires it
|
||||
-// It will throw in browser environments, which @noble/ed25519 can handle
|
||||
-try {
|
||||
- ed25519.etc.sha512Sync = sha512HashSync;
|
||||
-}
|
||||
-catch (error) {
|
||||
- console.debug('Could not set up synchronous SHA-512:', error);
|
||||
-}
|
||||
class Ed25519SignatureVerifier {
|
||||
/**
|
||||
* Verify an Ed25519 signature for MeshCore advertisement packets
|
||||
@@ -116,6 +126,7 @@ class Ed25519SignatureVerifier {
|
||||
// Construct the signed message according to MeshCore format
|
||||
const message = this.constructAdvertSignedMessage(publicKeyHex, timestamp, appData);
|
||||
// Verify the signature using noble-ed25519
|
||||
+ const ed25519 = await getEd25519();
|
||||
return await ed25519.verify(signature, message, publicKey);
|
||||
}
|
||||
catch (error) {
|
||||
@@ -37,8 +37,11 @@ dependencies = [
|
||||
"python-multipart>=0.0.6",
|
||||
"httpx>=0.25.0",
|
||||
"aiosqlite>=0.19.0",
|
||||
"meshcore>=2.2.0",
|
||||
"meshcore>=2.3.0",
|
||||
"pyyaml>=6.0.0",
|
||||
"python-frontmatter>=1.0.0",
|
||||
"markdown>=3.5.0",
|
||||
"prometheus-client>=0.20.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
@@ -50,6 +53,7 @@ dev = [
|
||||
"flake8>=6.1.0",
|
||||
"mypy>=1.5.0",
|
||||
"pre-commit>=3.4.0",
|
||||
"beautifulsoup4>=4.12.0",
|
||||
"types-paho-mqtt>=1.6.0",
|
||||
"types-PyYAML>=6.0.0",
|
||||
]
|
||||
@@ -111,6 +115,9 @@ module = [
|
||||
"uvicorn.*",
|
||||
"alembic.*",
|
||||
"meshcore.*",
|
||||
"frontmatter.*",
|
||||
"markdown.*",
|
||||
"prometheus_client.*",
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
|
||||
6
renovate.json
Normal file
6
renovate.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended"
|
||||
]
|
||||
}
|
||||
@@ -51,9 +51,15 @@ def create_app(
|
||||
admin_key: str | None = None,
|
||||
mqtt_host: str = "localhost",
|
||||
mqtt_port: int = 1883,
|
||||
mqtt_username: str | None = None,
|
||||
mqtt_password: str | None = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
mqtt_transport: str = "tcp",
|
||||
mqtt_ws_path: str = "/mqtt",
|
||||
cors_origins: list[str] | None = None,
|
||||
metrics_enabled: bool = True,
|
||||
metrics_cache_ttl: int = 60,
|
||||
) -> FastAPI:
|
||||
"""Create and configure the FastAPI application.
|
||||
|
||||
@@ -63,9 +69,15 @@ def create_app(
|
||||
admin_key: Admin API key
|
||||
mqtt_host: MQTT broker host
|
||||
mqtt_port: MQTT broker port
|
||||
mqtt_username: MQTT username
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
mqtt_transport: MQTT transport protocol (tcp or websockets)
|
||||
mqtt_ws_path: WebSocket path (used when transport=websockets)
|
||||
cors_origins: Allowed CORS origins
|
||||
metrics_enabled: Enable Prometheus metrics endpoint at /metrics
|
||||
metrics_cache_ttl: Seconds to cache metrics output
|
||||
|
||||
Returns:
|
||||
Configured FastAPI application
|
||||
@@ -86,8 +98,13 @@ def create_app(
|
||||
app.state.admin_key = admin_key
|
||||
app.state.mqtt_host = mqtt_host
|
||||
app.state.mqtt_port = mqtt_port
|
||||
app.state.mqtt_username = mqtt_username
|
||||
app.state.mqtt_password = mqtt_password
|
||||
app.state.mqtt_prefix = mqtt_prefix
|
||||
app.state.mqtt_tls = mqtt_tls
|
||||
app.state.mqtt_transport = mqtt_transport
|
||||
app.state.mqtt_ws_path = mqtt_ws_path
|
||||
app.state.metrics_cache_ttl = metrics_cache_ttl
|
||||
|
||||
# Configure CORS
|
||||
if cors_origins is None:
|
||||
@@ -106,6 +123,12 @@ def create_app(
|
||||
|
||||
app.include_router(api_router, prefix="/api/v1")
|
||||
|
||||
# Include Prometheus metrics endpoint
|
||||
if metrics_enabled:
|
||||
from meshcore_hub.api.metrics import router as metrics_router
|
||||
|
||||
app.include_router(metrics_router)
|
||||
|
||||
# Health check endpoints
|
||||
@app.get("/health", tags=["Health"])
|
||||
async def health() -> dict:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Authentication middleware for the API."""
|
||||
|
||||
import hmac
|
||||
import logging
|
||||
from typing import Annotated
|
||||
|
||||
@@ -79,7 +80,9 @@ async def require_read(
|
||||
)
|
||||
|
||||
# Check if token matches any key
|
||||
if token == read_key or token == admin_key:
|
||||
if (read_key and hmac.compare_digest(token, read_key)) or (
|
||||
admin_key and hmac.compare_digest(token, admin_key)
|
||||
):
|
||||
return token
|
||||
|
||||
raise HTTPException(
|
||||
@@ -124,7 +127,7 @@ async def require_admin(
|
||||
)
|
||||
|
||||
# Check if token matches admin key
|
||||
if token == admin_key:
|
||||
if hmac.compare_digest(token, admin_key):
|
||||
return token
|
||||
|
||||
raise HTTPException(
|
||||
|
||||
@@ -60,11 +60,25 @@ import click
|
||||
envvar="MQTT_PORT",
|
||||
help="MQTT broker port",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-username",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MQTT_USERNAME",
|
||||
help="MQTT username",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-password",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="MQTT_PASSWORD",
|
||||
help="MQTT password",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-prefix",
|
||||
type=str,
|
||||
default="meshcore",
|
||||
envvar="MQTT_TOPIC_PREFIX",
|
||||
envvar=["MQTT_PREFIX", "MQTT_TOPIC_PREFIX"],
|
||||
help="MQTT topic prefix",
|
||||
)
|
||||
@click.option(
|
||||
@@ -74,6 +88,20 @@ import click
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-transport",
|
||||
type=click.Choice(["tcp", "websockets"], case_sensitive=False),
|
||||
default="tcp",
|
||||
envvar="MQTT_TRANSPORT",
|
||||
help="MQTT transport protocol",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-ws-path",
|
||||
type=str,
|
||||
default="/mqtt",
|
||||
envvar="MQTT_WS_PATH",
|
||||
help="MQTT WebSocket path (used when transport=websockets)",
|
||||
)
|
||||
@click.option(
|
||||
"--cors-origins",
|
||||
type=str,
|
||||
@@ -81,6 +109,19 @@ import click
|
||||
envvar="CORS_ORIGINS",
|
||||
help="Comma-separated list of allowed CORS origins",
|
||||
)
|
||||
@click.option(
|
||||
"--metrics-enabled/--no-metrics",
|
||||
default=True,
|
||||
envvar="METRICS_ENABLED",
|
||||
help="Enable Prometheus metrics endpoint at /metrics",
|
||||
)
|
||||
@click.option(
|
||||
"--metrics-cache-ttl",
|
||||
type=int,
|
||||
default=60,
|
||||
envvar="METRICS_CACHE_TTL",
|
||||
help="Seconds to cache metrics output (reduces database load)",
|
||||
)
|
||||
@click.option(
|
||||
"--reload",
|
||||
is_flag=True,
|
||||
@@ -98,9 +139,15 @@ def api(
|
||||
admin_key: str | None,
|
||||
mqtt_host: str,
|
||||
mqtt_port: int,
|
||||
mqtt_username: str | None,
|
||||
mqtt_password: str | None,
|
||||
mqtt_prefix: str,
|
||||
mqtt_tls: bool,
|
||||
mqtt_transport: str,
|
||||
mqtt_ws_path: str,
|
||||
cors_origins: str | None,
|
||||
metrics_enabled: bool,
|
||||
metrics_cache_ttl: int,
|
||||
reload: bool,
|
||||
) -> None:
|
||||
"""Run the REST API server.
|
||||
@@ -146,9 +193,12 @@ def api(
|
||||
click.echo(f"Data home: {effective_data_home}")
|
||||
click.echo(f"Database: {effective_db_url}")
|
||||
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {mqtt_prefix})")
|
||||
click.echo(f"MQTT transport: {mqtt_transport} (ws_path: {mqtt_ws_path})")
|
||||
click.echo(f"Read key configured: {read_key is not None}")
|
||||
click.echo(f"Admin key configured: {admin_key is not None}")
|
||||
click.echo(f"CORS origins: {cors_origins or 'none'}")
|
||||
click.echo(f"Metrics enabled: {metrics_enabled}")
|
||||
click.echo(f"Metrics cache TTL: {metrics_cache_ttl}s")
|
||||
click.echo(f"Reload mode: {reload}")
|
||||
click.echo("=" * 50)
|
||||
|
||||
@@ -178,9 +228,15 @@ def api(
|
||||
admin_key=admin_key,
|
||||
mqtt_host=mqtt_host,
|
||||
mqtt_port=mqtt_port,
|
||||
mqtt_username=mqtt_username,
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
cors_origins=origins_list,
|
||||
metrics_enabled=metrics_enabled,
|
||||
metrics_cache_ttl=metrics_cache_ttl,
|
||||
)
|
||||
|
||||
click.echo("\nStarting API server...")
|
||||
|
||||
@@ -56,17 +56,25 @@ def get_mqtt_client(request: Request) -> MQTTClient:
|
||||
"""
|
||||
mqtt_host = getattr(request.app.state, "mqtt_host", "localhost")
|
||||
mqtt_port = getattr(request.app.state, "mqtt_port", 1883)
|
||||
mqtt_username = getattr(request.app.state, "mqtt_username", None)
|
||||
mqtt_password = getattr(request.app.state, "mqtt_password", None)
|
||||
mqtt_prefix = getattr(request.app.state, "mqtt_prefix", "meshcore")
|
||||
mqtt_tls = getattr(request.app.state, "mqtt_tls", False)
|
||||
mqtt_transport = getattr(request.app.state, "mqtt_transport", "tcp")
|
||||
mqtt_ws_path = getattr(request.app.state, "mqtt_ws_path", "/mqtt")
|
||||
|
||||
# Use unique client ID to allow multiple API instances
|
||||
unique_id = uuid.uuid4().hex[:8]
|
||||
config = MQTTConfig(
|
||||
host=mqtt_host,
|
||||
port=mqtt_port,
|
||||
username=mqtt_username,
|
||||
password=mqtt_password,
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-api-{unique_id}",
|
||||
tls=mqtt_tls,
|
||||
transport=mqtt_transport,
|
||||
ws_path=mqtt_ws_path,
|
||||
)
|
||||
|
||||
client = MQTTClient(config)
|
||||
|
||||
334
src/meshcore_hub/api/metrics.py
Normal file
334
src/meshcore_hub/api/metrics.py
Normal file
@@ -0,0 +1,334 @@
|
||||
"""Prometheus metrics endpoint for MeshCore Hub API."""
|
||||
|
||||
import base64
|
||||
import hmac
|
||||
import logging
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Request, Response
|
||||
from fastapi.responses import PlainTextResponse
|
||||
from prometheus_client import CollectorRegistry, Gauge, generate_latest
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from meshcore_hub.common.models import (
|
||||
Advertisement,
|
||||
EventLog,
|
||||
Member,
|
||||
Message,
|
||||
Node,
|
||||
NodeTag,
|
||||
Telemetry,
|
||||
TracePath,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Module-level cache
|
||||
_cache: dict[str, Any] = {"output": b"", "expires_at": 0.0}
|
||||
|
||||
|
||||
def verify_basic_auth(request: Request) -> bool:
|
||||
"""Verify HTTP Basic Auth credentials for metrics endpoint.
|
||||
|
||||
Uses username 'metrics' and the API read key as password.
|
||||
Returns True if no read key is configured (public access).
|
||||
|
||||
Args:
|
||||
request: FastAPI request
|
||||
|
||||
Returns:
|
||||
True if authentication passes
|
||||
"""
|
||||
read_key = getattr(request.app.state, "read_key", None)
|
||||
|
||||
# No read key configured = public access
|
||||
if not read_key:
|
||||
return True
|
||||
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if not auth_header.startswith("Basic "):
|
||||
return False
|
||||
|
||||
try:
|
||||
decoded = base64.b64decode(auth_header[6:]).decode("utf-8")
|
||||
username, password = decoded.split(":", 1)
|
||||
return hmac.compare_digest(username, "metrics") and hmac.compare_digest(
|
||||
password, read_key
|
||||
)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def collect_metrics(session: Any) -> bytes:
|
||||
"""Collect all metrics from the database and generate Prometheus output.
|
||||
|
||||
Creates a fresh CollectorRegistry per call to avoid global state issues.
|
||||
|
||||
Args:
|
||||
session: SQLAlchemy database session
|
||||
|
||||
Returns:
|
||||
Prometheus text exposition format as bytes
|
||||
"""
|
||||
from meshcore_hub import __version__
|
||||
|
||||
registry = CollectorRegistry()
|
||||
|
||||
# -- Info gauge --
|
||||
info_gauge = Gauge(
|
||||
"meshcore_info",
|
||||
"MeshCore Hub application info",
|
||||
["version"],
|
||||
registry=registry,
|
||||
)
|
||||
info_gauge.labels(version=__version__).set(1)
|
||||
|
||||
# -- Nodes total --
|
||||
nodes_total = Gauge(
|
||||
"meshcore_nodes_total",
|
||||
"Total number of nodes",
|
||||
registry=registry,
|
||||
)
|
||||
count = session.execute(select(func.count(Node.id))).scalar() or 0
|
||||
nodes_total.set(count)
|
||||
|
||||
# -- Nodes active by time window --
|
||||
nodes_active = Gauge(
|
||||
"meshcore_nodes_active",
|
||||
"Number of active nodes in time window",
|
||||
["window"],
|
||||
registry=registry,
|
||||
)
|
||||
for window, hours in [("1h", 1), ("24h", 24), ("7d", 168), ("30d", 720)]:
|
||||
cutoff = time.time() - (hours * 3600)
|
||||
from datetime import datetime, timezone
|
||||
|
||||
cutoff_dt = datetime.fromtimestamp(cutoff, tz=timezone.utc)
|
||||
count = (
|
||||
session.execute(
|
||||
select(func.count(Node.id)).where(Node.last_seen >= cutoff_dt)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
nodes_active.labels(window=window).set(count)
|
||||
|
||||
# -- Nodes by type --
|
||||
nodes_by_type = Gauge(
|
||||
"meshcore_nodes_by_type",
|
||||
"Number of nodes by advertisement type",
|
||||
["adv_type"],
|
||||
registry=registry,
|
||||
)
|
||||
type_counts = session.execute(
|
||||
select(Node.adv_type, func.count(Node.id)).group_by(Node.adv_type)
|
||||
).all()
|
||||
for adv_type, count in type_counts:
|
||||
nodes_by_type.labels(adv_type=adv_type or "unknown").set(count)
|
||||
|
||||
# -- Nodes with location --
|
||||
nodes_with_location = Gauge(
|
||||
"meshcore_nodes_with_location",
|
||||
"Number of nodes with GPS coordinates",
|
||||
registry=registry,
|
||||
)
|
||||
count = (
|
||||
session.execute(
|
||||
select(func.count(Node.id)).where(
|
||||
Node.lat.isnot(None), Node.lon.isnot(None)
|
||||
)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
nodes_with_location.set(count)
|
||||
|
||||
# -- Node last seen timestamp --
|
||||
node_last_seen = Gauge(
|
||||
"meshcore_node_last_seen_timestamp_seconds",
|
||||
"Unix timestamp of when the node was last seen",
|
||||
["public_key", "node_name", "adv_type", "role"],
|
||||
registry=registry,
|
||||
)
|
||||
role_subq = (
|
||||
select(NodeTag.node_id, NodeTag.value.label("role"))
|
||||
.where(NodeTag.key == "role")
|
||||
.subquery()
|
||||
)
|
||||
nodes_with_last_seen = session.execute(
|
||||
select(
|
||||
Node.public_key,
|
||||
Node.name,
|
||||
Node.adv_type,
|
||||
Node.last_seen,
|
||||
role_subq.c.role,
|
||||
)
|
||||
.outerjoin(role_subq, Node.id == role_subq.c.node_id)
|
||||
.where(Node.last_seen.isnot(None))
|
||||
).all()
|
||||
for public_key, name, adv_type, last_seen, role in nodes_with_last_seen:
|
||||
node_last_seen.labels(
|
||||
public_key=public_key,
|
||||
node_name=name or "",
|
||||
adv_type=adv_type or "unknown",
|
||||
role=role or "",
|
||||
).set(last_seen.timestamp())
|
||||
|
||||
# -- Messages total by type --
|
||||
messages_total = Gauge(
|
||||
"meshcore_messages_total",
|
||||
"Total number of messages by type",
|
||||
["type"],
|
||||
registry=registry,
|
||||
)
|
||||
msg_type_counts = session.execute(
|
||||
select(Message.message_type, func.count(Message.id)).group_by(
|
||||
Message.message_type
|
||||
)
|
||||
).all()
|
||||
for msg_type, count in msg_type_counts:
|
||||
messages_total.labels(type=msg_type).set(count)
|
||||
|
||||
# -- Messages received by type and window --
|
||||
messages_received = Gauge(
|
||||
"meshcore_messages_received",
|
||||
"Messages received in time window by type",
|
||||
["type", "window"],
|
||||
registry=registry,
|
||||
)
|
||||
for window, hours in [("1h", 1), ("24h", 24), ("7d", 168), ("30d", 720)]:
|
||||
cutoff = time.time() - (hours * 3600)
|
||||
cutoff_dt = datetime.fromtimestamp(cutoff, tz=timezone.utc)
|
||||
window_counts = session.execute(
|
||||
select(Message.message_type, func.count(Message.id))
|
||||
.where(Message.received_at >= cutoff_dt)
|
||||
.group_by(Message.message_type)
|
||||
).all()
|
||||
for msg_type, count in window_counts:
|
||||
messages_received.labels(type=msg_type, window=window).set(count)
|
||||
|
||||
# -- Advertisements total --
|
||||
advertisements_total = Gauge(
|
||||
"meshcore_advertisements_total",
|
||||
"Total number of advertisements",
|
||||
registry=registry,
|
||||
)
|
||||
count = session.execute(select(func.count(Advertisement.id))).scalar() or 0
|
||||
advertisements_total.set(count)
|
||||
|
||||
# -- Advertisements received by window --
|
||||
advertisements_received = Gauge(
|
||||
"meshcore_advertisements_received",
|
||||
"Advertisements received in time window",
|
||||
["window"],
|
||||
registry=registry,
|
||||
)
|
||||
for window, hours in [("1h", 1), ("24h", 24), ("7d", 168), ("30d", 720)]:
|
||||
cutoff = time.time() - (hours * 3600)
|
||||
cutoff_dt = datetime.fromtimestamp(cutoff, tz=timezone.utc)
|
||||
count = (
|
||||
session.execute(
|
||||
select(func.count(Advertisement.id)).where(
|
||||
Advertisement.received_at >= cutoff_dt
|
||||
)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
advertisements_received.labels(window=window).set(count)
|
||||
|
||||
# -- Telemetry total --
|
||||
telemetry_total = Gauge(
|
||||
"meshcore_telemetry_total",
|
||||
"Total number of telemetry records",
|
||||
registry=registry,
|
||||
)
|
||||
count = session.execute(select(func.count(Telemetry.id))).scalar() or 0
|
||||
telemetry_total.set(count)
|
||||
|
||||
# -- Trace paths total --
|
||||
trace_paths_total = Gauge(
|
||||
"meshcore_trace_paths_total",
|
||||
"Total number of trace path records",
|
||||
registry=registry,
|
||||
)
|
||||
count = session.execute(select(func.count(TracePath.id))).scalar() or 0
|
||||
trace_paths_total.set(count)
|
||||
|
||||
# -- Events by type --
|
||||
events_total = Gauge(
|
||||
"meshcore_events_total",
|
||||
"Total events by type from event log",
|
||||
["event_type"],
|
||||
registry=registry,
|
||||
)
|
||||
event_counts = session.execute(
|
||||
select(EventLog.event_type, func.count(EventLog.id)).group_by(
|
||||
EventLog.event_type
|
||||
)
|
||||
).all()
|
||||
for event_type, count in event_counts:
|
||||
events_total.labels(event_type=event_type).set(count)
|
||||
|
||||
# -- Members total --
|
||||
members_total = Gauge(
|
||||
"meshcore_members_total",
|
||||
"Total number of network members",
|
||||
registry=registry,
|
||||
)
|
||||
count = session.execute(select(func.count(Member.id))).scalar() or 0
|
||||
members_total.set(count)
|
||||
|
||||
output: bytes = generate_latest(registry)
|
||||
return output
|
||||
|
||||
|
||||
@router.get("/metrics")
|
||||
async def metrics(request: Request) -> Response:
|
||||
"""Prometheus metrics endpoint.
|
||||
|
||||
Returns metrics in Prometheus text exposition format.
|
||||
Supports HTTP Basic Auth with username 'metrics' and API read key as password.
|
||||
Results are cached with a configurable TTL to reduce database load.
|
||||
"""
|
||||
# Check authentication
|
||||
if not verify_basic_auth(request):
|
||||
return PlainTextResponse(
|
||||
"Unauthorized",
|
||||
status_code=401,
|
||||
headers={"WWW-Authenticate": 'Basic realm="metrics"'},
|
||||
)
|
||||
|
||||
# Check cache
|
||||
cache_ttl = getattr(request.app.state, "metrics_cache_ttl", 60)
|
||||
now = time.time()
|
||||
|
||||
if _cache["output"] and now < _cache["expires_at"]:
|
||||
return Response(
|
||||
content=_cache["output"],
|
||||
media_type="text/plain; version=0.0.4; charset=utf-8",
|
||||
)
|
||||
|
||||
# Collect fresh metrics
|
||||
try:
|
||||
from meshcore_hub.api.app import get_db_manager
|
||||
|
||||
db_manager = get_db_manager()
|
||||
with db_manager.session_scope() as session:
|
||||
output = collect_metrics(session)
|
||||
|
||||
# Update cache
|
||||
_cache["output"] = output
|
||||
_cache["expires_at"] = now + cache_ttl
|
||||
|
||||
return Response(
|
||||
content=output,
|
||||
media_type="text/plain; version=0.0.4; charset=utf-8",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("Failed to collect metrics: %s", e)
|
||||
return PlainTextResponse(
|
||||
f"# Error collecting metrics: {e}\n",
|
||||
status_code=500,
|
||||
media_type="text/plain; version=0.0.4; charset=utf-8",
|
||||
)
|
||||
@@ -29,6 +29,16 @@ def _get_tag_name(node: Optional[Node]) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def _get_tag_description(node: Optional[Node]) -> Optional[str]:
|
||||
"""Extract description tag from a node's tags."""
|
||||
if not node or not node.tags:
|
||||
return None
|
||||
for tag in node.tags:
|
||||
if tag.key == "description":
|
||||
return tag.value
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_receivers_for_events(
|
||||
session: DbSession,
|
||||
event_type: str,
|
||||
@@ -96,6 +106,9 @@ async def list_advertisements(
|
||||
received_by: Optional[str] = Query(
|
||||
None, description="Filter by receiver node public key"
|
||||
),
|
||||
member_id: Optional[str] = Query(
|
||||
None, description="Filter by member_id tag value of source node"
|
||||
),
|
||||
since: Optional[datetime] = Query(None, description="Start timestamp"),
|
||||
until: Optional[datetime] = Query(None, description="End timestamp"),
|
||||
limit: int = Query(50, ge=1, le=100, description="Page size"),
|
||||
@@ -143,6 +156,16 @@ async def list_advertisements(
|
||||
if received_by:
|
||||
query = query.where(ReceiverNode.public_key == received_by)
|
||||
|
||||
if member_id:
|
||||
# Filter advertisements from nodes that have a member_id tag with the specified value
|
||||
query = query.where(
|
||||
SourceNode.id.in_(
|
||||
select(NodeTag.node_id).where(
|
||||
NodeTag.key == "member_id", NodeTag.value == member_id
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if since:
|
||||
query = query.where(Advertisement.received_at >= since)
|
||||
|
||||
@@ -197,6 +220,7 @@ async def list_advertisements(
|
||||
"name": adv.name,
|
||||
"node_name": row.source_name,
|
||||
"node_tag_name": _get_tag_name(source_node),
|
||||
"node_tag_description": _get_tag_description(source_node),
|
||||
"adv_type": adv.adv_type or row.source_adv_type,
|
||||
"flags": adv.flags,
|
||||
"received_at": adv.received_at,
|
||||
@@ -279,6 +303,7 @@ async def get_advertisement(
|
||||
"name": adv.name,
|
||||
"node_name": result.source_name,
|
||||
"node_tag_name": _get_tag_name(source_node),
|
||||
"node_tag_description": _get_tag_description(source_node),
|
||||
"adv_type": adv.adv_type or result.source_adv_type,
|
||||
"flags": adv.flags,
|
||||
"received_at": adv.received_at,
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi import APIRouter
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from meshcore_hub.api.auth import RequireRead
|
||||
@@ -362,175 +361,3 @@ async def get_node_count_history(
|
||||
data.append(DailyActivityPoint(date=date_str, count=count))
|
||||
|
||||
return NodeCountHistory(days=days, data=data)
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def dashboard(
|
||||
request: Request,
|
||||
session: DbSession,
|
||||
) -> HTMLResponse:
|
||||
"""Simple HTML dashboard page."""
|
||||
now = datetime.now(timezone.utc)
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
yesterday = now - timedelta(days=1)
|
||||
|
||||
# Get stats
|
||||
total_nodes = session.execute(select(func.count()).select_from(Node)).scalar() or 0
|
||||
|
||||
active_nodes = (
|
||||
session.execute(
|
||||
select(func.count()).select_from(Node).where(Node.last_seen >= yesterday)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
|
||||
total_messages = (
|
||||
session.execute(select(func.count()).select_from(Message)).scalar() or 0
|
||||
)
|
||||
|
||||
messages_today = (
|
||||
session.execute(
|
||||
select(func.count())
|
||||
.select_from(Message)
|
||||
.where(Message.received_at >= today_start)
|
||||
).scalar()
|
||||
or 0
|
||||
)
|
||||
|
||||
# Get recent nodes
|
||||
recent_nodes = (
|
||||
session.execute(select(Node).order_by(Node.last_seen.desc()).limit(10))
|
||||
.scalars()
|
||||
.all()
|
||||
)
|
||||
|
||||
# Get recent messages
|
||||
recent_messages = (
|
||||
session.execute(select(Message).order_by(Message.received_at.desc()).limit(10))
|
||||
.scalars()
|
||||
.all()
|
||||
)
|
||||
|
||||
# Build HTML
|
||||
html = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>MeshCore Hub Dashboard</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta http-equiv="refresh" content="30">
|
||||
<style>
|
||||
body {{
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
background: #f5f5f5;
|
||||
color: #333;
|
||||
}}
|
||||
h1 {{ color: #2c3e50; }}
|
||||
.container {{ max-width: 1200px; margin: 0 auto; }}
|
||||
.stats {{
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||
gap: 20px;
|
||||
margin-bottom: 30px;
|
||||
}}
|
||||
.stat-card {{
|
||||
background: white;
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||
}}
|
||||
.stat-card h3 {{ margin: 0 0 10px 0; color: #666; font-size: 14px; }}
|
||||
.stat-card .value {{ font-size: 32px; font-weight: bold; color: #2c3e50; }}
|
||||
.section {{
|
||||
background: white;
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
||||
margin-bottom: 20px;
|
||||
}}
|
||||
table {{ width: 100%; border-collapse: collapse; }}
|
||||
th, td {{ padding: 10px; text-align: left; border-bottom: 1px solid #eee; }}
|
||||
th {{ background: #f8f9fa; font-weight: 600; }}
|
||||
.text-muted {{ color: #666; }}
|
||||
.truncate {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>MeshCore Hub Dashboard</h1>
|
||||
<p class="text-muted">Last updated: {now.strftime('%Y-%m-%d %H:%M:%S UTC')}</p>
|
||||
|
||||
<div class="stats">
|
||||
<div class="stat-card">
|
||||
<h3>Total Nodes</h3>
|
||||
<div class="value">{total_nodes}</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Active Nodes (24h)</h3>
|
||||
<div class="value">{active_nodes}</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Total Messages</h3>
|
||||
<div class="value">{total_messages}</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<h3>Messages Today</h3>
|
||||
<div class="value">{messages_today}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="section">
|
||||
<h2>Recent Nodes</h2>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Public Key</th>
|
||||
<th>Type</th>
|
||||
<th>Last Seen</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{"".join(f'''
|
||||
<tr>
|
||||
<td>{n.name or '-'}</td>
|
||||
<td class="truncate">{n.public_key[:16]}...</td>
|
||||
<td>{n.adv_type or '-'}</td>
|
||||
<td>{n.last_seen.strftime('%Y-%m-%d %H:%M') if n.last_seen else '-'}</td>
|
||||
</tr>
|
||||
''' for n in recent_nodes)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="section">
|
||||
<h2>Recent Messages</h2>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Type</th>
|
||||
<th>From/Channel</th>
|
||||
<th>Text</th>
|
||||
<th>Received</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{"".join(f'''
|
||||
<tr>
|
||||
<td>{m.message_type}</td>
|
||||
<td>{m.pubkey_prefix or f'Ch {m.channel_idx}' or '-'}</td>
|
||||
<td class="truncate">{m.text[:50]}{'...' if len(m.text) > 50 else ''}</td>
|
||||
<td>{m.received_at.strftime('%Y-%m-%d %H:%M') if m.received_at else '-'}</td>
|
||||
</tr>
|
||||
''' for m in recent_messages)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return HTMLResponse(content=html)
|
||||
|
||||
@@ -6,7 +6,13 @@ from sqlalchemy import select
|
||||
from meshcore_hub.api.auth import RequireAdmin, RequireRead
|
||||
from meshcore_hub.api.dependencies import DbSession
|
||||
from meshcore_hub.common.models import Node, NodeTag
|
||||
from meshcore_hub.common.schemas.nodes import NodeTagCreate, NodeTagRead, NodeTagUpdate
|
||||
from meshcore_hub.common.schemas.nodes import (
|
||||
NodeTagCreate,
|
||||
NodeTagMove,
|
||||
NodeTagRead,
|
||||
NodeTagsCopyResult,
|
||||
NodeTagUpdate,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -130,6 +136,131 @@ async def update_node_tag(
|
||||
return NodeTagRead.model_validate(node_tag)
|
||||
|
||||
|
||||
@router.put("/nodes/{public_key}/tags/{key}/move", response_model=NodeTagRead)
|
||||
async def move_node_tag(
|
||||
_: RequireAdmin,
|
||||
session: DbSession,
|
||||
public_key: str,
|
||||
key: str,
|
||||
data: NodeTagMove,
|
||||
) -> NodeTagRead:
|
||||
"""Move a node tag to a different node."""
|
||||
# Check if source and destination are the same
|
||||
if public_key == data.new_public_key:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Source and destination nodes are the same",
|
||||
)
|
||||
|
||||
# Find source node
|
||||
source_query = select(Node).where(Node.public_key == public_key)
|
||||
source_node = session.execute(source_query).scalar_one_or_none()
|
||||
|
||||
if not source_node:
|
||||
raise HTTPException(status_code=404, detail="Source node not found")
|
||||
|
||||
# Find tag
|
||||
tag_query = select(NodeTag).where(
|
||||
(NodeTag.node_id == source_node.id) & (NodeTag.key == key)
|
||||
)
|
||||
node_tag = session.execute(tag_query).scalar_one_or_none()
|
||||
|
||||
if not node_tag:
|
||||
raise HTTPException(status_code=404, detail="Tag not found")
|
||||
|
||||
# Find destination node
|
||||
dest_query = select(Node).where(Node.public_key == data.new_public_key)
|
||||
dest_node = session.execute(dest_query).scalar_one_or_none()
|
||||
|
||||
if not dest_node:
|
||||
raise HTTPException(status_code=404, detail="Destination node not found")
|
||||
|
||||
# Check if tag already exists on destination node
|
||||
conflict_query = select(NodeTag).where(
|
||||
(NodeTag.node_id == dest_node.id) & (NodeTag.key == key)
|
||||
)
|
||||
conflict = session.execute(conflict_query).scalar_one_or_none()
|
||||
|
||||
if conflict:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Tag '{key}' already exists on destination node",
|
||||
)
|
||||
|
||||
# Move tag to destination node
|
||||
node_tag.node_id = dest_node.id
|
||||
session.commit()
|
||||
session.refresh(node_tag)
|
||||
|
||||
return NodeTagRead.model_validate(node_tag)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/nodes/{public_key}/tags/copy-to/{dest_public_key}",
|
||||
response_model=NodeTagsCopyResult,
|
||||
)
|
||||
async def copy_all_tags(
|
||||
_: RequireAdmin,
|
||||
session: DbSession,
|
||||
public_key: str,
|
||||
dest_public_key: str,
|
||||
) -> NodeTagsCopyResult:
|
||||
"""Copy all tags from one node to another.
|
||||
|
||||
Tags that already exist on the destination node are skipped.
|
||||
"""
|
||||
# Check if source and destination are the same
|
||||
if public_key == dest_public_key:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Source and destination nodes are the same",
|
||||
)
|
||||
|
||||
# Find source node
|
||||
source_query = select(Node).where(Node.public_key == public_key)
|
||||
source_node = session.execute(source_query).scalar_one_or_none()
|
||||
|
||||
if not source_node:
|
||||
raise HTTPException(status_code=404, detail="Source node not found")
|
||||
|
||||
# Find destination node
|
||||
dest_query = select(Node).where(Node.public_key == dest_public_key)
|
||||
dest_node = session.execute(dest_query).scalar_one_or_none()
|
||||
|
||||
if not dest_node:
|
||||
raise HTTPException(status_code=404, detail="Destination node not found")
|
||||
|
||||
# Get existing tags on destination node
|
||||
existing_query = select(NodeTag.key).where(NodeTag.node_id == dest_node.id)
|
||||
existing_keys = set(session.execute(existing_query).scalars().all())
|
||||
|
||||
# Copy tags
|
||||
copied = 0
|
||||
skipped_keys = []
|
||||
|
||||
for tag in source_node.tags:
|
||||
if tag.key in existing_keys:
|
||||
skipped_keys.append(tag.key)
|
||||
continue
|
||||
|
||||
new_tag = NodeTag(
|
||||
node_id=dest_node.id,
|
||||
key=tag.key,
|
||||
value=tag.value,
|
||||
value_type=tag.value_type,
|
||||
)
|
||||
session.add(new_tag)
|
||||
copied += 1
|
||||
|
||||
session.commit()
|
||||
|
||||
return NodeTagsCopyResult(
|
||||
copied=copied,
|
||||
skipped=len(skipped_keys),
|
||||
skipped_keys=skipped_keys,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/nodes/{public_key}/tags/{key}", status_code=204)
|
||||
async def delete_node_tag(
|
||||
_: RequireAdmin,
|
||||
@@ -156,3 +287,27 @@ async def delete_node_tag(
|
||||
|
||||
session.delete(node_tag)
|
||||
session.commit()
|
||||
|
||||
|
||||
@router.delete("/nodes/{public_key}/tags")
|
||||
async def delete_all_node_tags(
|
||||
_: RequireAdmin,
|
||||
session: DbSession,
|
||||
public_key: str,
|
||||
) -> dict:
|
||||
"""Delete all tags for a node."""
|
||||
# Find node
|
||||
node_query = select(Node).where(Node.public_key == public_key)
|
||||
node = session.execute(node_query).scalar_one_or_none()
|
||||
|
||||
if not node:
|
||||
raise HTTPException(status_code=404, detail="Node not found")
|
||||
|
||||
# Count and delete all tags
|
||||
count = len(node.tags)
|
||||
for tag in node.tags:
|
||||
session.delete(tag)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {"deleted": count}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from fastapi import APIRouter, HTTPException, Path, Query
|
||||
from sqlalchemy import func, or_, select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
@@ -22,6 +22,8 @@ async def list_nodes(
|
||||
None, description="Search in name tag, node name, or public key"
|
||||
),
|
||||
adv_type: Optional[str] = Query(None, description="Filter by advertisement type"),
|
||||
member_id: Optional[str] = Query(None, description="Filter by member_id tag value"),
|
||||
role: Optional[str] = Query(None, description="Filter by role tag value"),
|
||||
limit: int = Query(50, ge=1, le=500, description="Page size"),
|
||||
offset: int = Query(0, ge=0, description="Page offset"),
|
||||
) -> NodeList:
|
||||
@@ -46,7 +48,59 @@ async def list_nodes(
|
||||
)
|
||||
|
||||
if adv_type:
|
||||
query = query.where(Node.adv_type == adv_type)
|
||||
normalized_adv_type = adv_type.strip().lower()
|
||||
if normalized_adv_type == "repeater":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "repeater",
|
||||
Node.adv_type.ilike("%repeater%"),
|
||||
Node.adv_type.ilike("%relay%"),
|
||||
)
|
||||
)
|
||||
elif normalized_adv_type == "companion":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "companion",
|
||||
Node.adv_type.ilike("%companion%"),
|
||||
Node.adv_type.ilike("%observer%"),
|
||||
)
|
||||
)
|
||||
elif normalized_adv_type == "room":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "room",
|
||||
Node.adv_type.ilike("%room%"),
|
||||
)
|
||||
)
|
||||
elif normalized_adv_type == "chat":
|
||||
query = query.where(
|
||||
or_(
|
||||
Node.adv_type == "chat",
|
||||
Node.adv_type.ilike("%chat%"),
|
||||
)
|
||||
)
|
||||
else:
|
||||
query = query.where(Node.adv_type == adv_type)
|
||||
|
||||
if member_id:
|
||||
# Filter nodes that have a member_id tag with the specified value
|
||||
query = query.where(
|
||||
Node.id.in_(
|
||||
select(NodeTag.node_id).where(
|
||||
NodeTag.key == "member_id", NodeTag.value == member_id
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if role:
|
||||
# Filter nodes that have a role tag with the specified value
|
||||
query = query.where(
|
||||
Node.id.in_(
|
||||
select(NodeTag.node_id).where(
|
||||
NodeTag.key == "role", NodeTag.value == role
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.subquery())
|
||||
@@ -66,14 +120,43 @@ async def list_nodes(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{public_key}", response_model=NodeRead)
|
||||
async def get_node(
|
||||
@router.get("/prefix/{prefix}", response_model=NodeRead)
|
||||
async def get_node_by_prefix(
|
||||
_: RequireRead,
|
||||
session: DbSession,
|
||||
public_key: str,
|
||||
prefix: str = Path(description="Public key prefix to search for"),
|
||||
) -> NodeRead:
|
||||
"""Get a single node by public key."""
|
||||
query = select(Node).where(Node.public_key == public_key)
|
||||
"""Get a single node by public key prefix.
|
||||
|
||||
Returns the first node (alphabetically by public_key) that matches the prefix.
|
||||
"""
|
||||
query = (
|
||||
select(Node)
|
||||
.options(selectinload(Node.tags))
|
||||
.where(Node.public_key.startswith(prefix))
|
||||
.order_by(Node.public_key)
|
||||
.limit(1)
|
||||
)
|
||||
node = session.execute(query).scalar_one_or_none()
|
||||
|
||||
if not node:
|
||||
raise HTTPException(status_code=404, detail="Node not found")
|
||||
|
||||
return NodeRead.model_validate(node)
|
||||
|
||||
|
||||
@router.get("/{public_key}", response_model=NodeRead)
|
||||
async def get_node(
|
||||
_: RequireRead,
|
||||
session: DbSession,
|
||||
public_key: str = Path(description="Full 64-character public key"),
|
||||
) -> NodeRead:
|
||||
"""Get a single node by exact public key match."""
|
||||
query = (
|
||||
select(Node)
|
||||
.options(selectinload(Node.tags))
|
||||
.where(Node.public_key == public_key)
|
||||
)
|
||||
node = session.execute(query).scalar_one_or_none()
|
||||
|
||||
if not node:
|
||||
|
||||
@@ -54,6 +54,31 @@ if TYPE_CHECKING:
|
||||
envvar="MQTT_TLS",
|
||||
help="Enable TLS/SSL for MQTT connection",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-transport",
|
||||
type=click.Choice(["tcp", "websockets"], case_sensitive=False),
|
||||
default="tcp",
|
||||
envvar="MQTT_TRANSPORT",
|
||||
help="MQTT transport protocol",
|
||||
)
|
||||
@click.option(
|
||||
"--mqtt-ws-path",
|
||||
type=str,
|
||||
default="/mqtt",
|
||||
envvar="MQTT_WS_PATH",
|
||||
help="MQTT WebSocket path (used when transport=websockets)",
|
||||
)
|
||||
@click.option(
|
||||
"--ingest-mode",
|
||||
"collector_ingest_mode",
|
||||
type=click.Choice(["native", "letsmesh_upload"], case_sensitive=False),
|
||||
default="native",
|
||||
envvar="COLLECTOR_INGEST_MODE",
|
||||
help=(
|
||||
"Collector ingest mode: native MeshCore events or LetsMesh upload "
|
||||
"(packets/status/internal)"
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--data-home",
|
||||
type=str,
|
||||
@@ -90,6 +115,9 @@ def collector(
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
mqtt_transport: str,
|
||||
mqtt_ws_path: str,
|
||||
collector_ingest_mode: str,
|
||||
data_home: str | None,
|
||||
seed_home: str | None,
|
||||
database_url: str | None,
|
||||
@@ -134,6 +162,9 @@ def collector(
|
||||
ctx.obj["mqtt_password"] = mqtt_password
|
||||
ctx.obj["prefix"] = prefix
|
||||
ctx.obj["mqtt_tls"] = mqtt_tls
|
||||
ctx.obj["mqtt_transport"] = mqtt_transport
|
||||
ctx.obj["mqtt_ws_path"] = mqtt_ws_path
|
||||
ctx.obj["collector_ingest_mode"] = collector_ingest_mode
|
||||
ctx.obj["data_home"] = data_home or settings.data_home
|
||||
ctx.obj["seed_home"] = settings.effective_seed_home
|
||||
ctx.obj["database_url"] = effective_db_url
|
||||
@@ -149,6 +180,9 @@ def collector(
|
||||
mqtt_password=mqtt_password,
|
||||
prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
ingest_mode=collector_ingest_mode,
|
||||
database_url=effective_db_url,
|
||||
log_level=log_level,
|
||||
data_home=data_home or settings.data_home,
|
||||
@@ -163,6 +197,9 @@ def _run_collector_service(
|
||||
mqtt_password: str | None,
|
||||
prefix: str,
|
||||
mqtt_tls: bool,
|
||||
mqtt_transport: str,
|
||||
mqtt_ws_path: str,
|
||||
ingest_mode: str,
|
||||
database_url: str,
|
||||
log_level: str,
|
||||
data_home: str,
|
||||
@@ -191,6 +228,8 @@ def _run_collector_service(
|
||||
click.echo(f"Data home: {data_home}")
|
||||
click.echo(f"Seed home: {seed_home}")
|
||||
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {prefix})")
|
||||
click.echo(f"MQTT transport: {mqtt_transport} (ws_path: {mqtt_ws_path})")
|
||||
click.echo(f"Ingest mode: {ingest_mode}")
|
||||
click.echo(f"Database: {database_url}")
|
||||
|
||||
# Load webhook configuration from settings
|
||||
@@ -198,6 +237,7 @@ def _run_collector_service(
|
||||
WebhookDispatcher,
|
||||
create_webhooks_from_settings,
|
||||
)
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
from meshcore_hub.common.config import get_collector_settings
|
||||
|
||||
settings = get_collector_settings()
|
||||
@@ -234,6 +274,24 @@ def _run_collector_service(
|
||||
if settings.data_retention_enabled or settings.node_cleanup_enabled:
|
||||
click.echo(f" Interval: {settings.data_retention_interval_hours} hours")
|
||||
|
||||
if ingest_mode.lower() == "letsmesh_upload":
|
||||
click.echo("")
|
||||
click.echo("LetsMesh decode configuration:")
|
||||
if settings.collector_letsmesh_decoder_enabled:
|
||||
builtin_keys = len(LetsMeshPacketDecoder.BUILTIN_CHANNEL_KEYS)
|
||||
env_keys = len(settings.collector_letsmesh_decoder_keys_list)
|
||||
click.echo(
|
||||
" Decoder: Enabled " f"({settings.collector_letsmesh_decoder_command})"
|
||||
)
|
||||
click.echo(f" Built-in keys: {builtin_keys}")
|
||||
click.echo(" Additional keys from .env: " f"{env_keys} configured")
|
||||
click.echo(
|
||||
" Timeout: "
|
||||
f"{settings.collector_letsmesh_decoder_timeout_seconds:.2f}s"
|
||||
)
|
||||
else:
|
||||
click.echo(" Decoder: Disabled")
|
||||
|
||||
click.echo("")
|
||||
click.echo("Starting MQTT subscriber...")
|
||||
run_collector(
|
||||
@@ -243,6 +301,9 @@ def _run_collector_service(
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
ingest_mode=ingest_mode,
|
||||
database_url=database_url,
|
||||
webhook_dispatcher=webhook_dispatcher,
|
||||
cleanup_enabled=settings.data_retention_enabled,
|
||||
@@ -250,6 +311,12 @@ def _run_collector_service(
|
||||
cleanup_interval_hours=settings.data_retention_interval_hours,
|
||||
node_cleanup_enabled=settings.node_cleanup_enabled,
|
||||
node_cleanup_days=settings.node_cleanup_days,
|
||||
letsmesh_decoder_enabled=settings.collector_letsmesh_decoder_enabled,
|
||||
letsmesh_decoder_command=settings.collector_letsmesh_decoder_command,
|
||||
letsmesh_decoder_channel_keys=settings.collector_letsmesh_decoder_keys_list,
|
||||
letsmesh_decoder_timeout_seconds=(
|
||||
settings.collector_letsmesh_decoder_timeout_seconds
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -267,6 +334,9 @@ def run_cmd(ctx: click.Context) -> None:
|
||||
mqtt_password=ctx.obj["mqtt_password"],
|
||||
prefix=ctx.obj["prefix"],
|
||||
mqtt_tls=ctx.obj["mqtt_tls"],
|
||||
mqtt_transport=ctx.obj["mqtt_transport"],
|
||||
mqtt_ws_path=ctx.obj["mqtt_ws_path"],
|
||||
ingest_mode=ctx.obj["collector_ingest_mode"],
|
||||
database_url=ctx.obj["database_url"],
|
||||
log_level=ctx.obj["log_level"],
|
||||
data_home=ctx.obj["data_home"],
|
||||
@@ -433,12 +503,12 @@ def import_tags_cmd(
|
||||
\b
|
||||
0123456789abcdef...:
|
||||
friendly_name: My Node
|
||||
location:
|
||||
value: "52.0,1.0"
|
||||
type: coordinate
|
||||
altitude:
|
||||
value: "150"
|
||||
type: number
|
||||
active:
|
||||
value: "true"
|
||||
type: boolean
|
||||
|
||||
Shorthand is also supported (string values with default type):
|
||||
|
||||
@@ -447,7 +517,7 @@ def import_tags_cmd(
|
||||
friendly_name: My Node
|
||||
role: gateway
|
||||
|
||||
Supported types: string, number, boolean, coordinate
|
||||
Supported types: string, number, boolean
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@@ -14,6 +14,20 @@ from meshcore_hub.common.models import Advertisement, Node, add_event_receiver
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _coerce_float(value: Any) -> float | None:
|
||||
"""Convert int/float/string values to float when possible."""
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (int, float)):
|
||||
return float(value)
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return float(value.strip())
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def handle_advertisement(
|
||||
public_key: str,
|
||||
event_type: str,
|
||||
@@ -40,6 +54,22 @@ def handle_advertisement(
|
||||
name = payload.get("name")
|
||||
adv_type = payload.get("adv_type")
|
||||
flags = payload.get("flags")
|
||||
lat = payload.get("lat")
|
||||
lon = payload.get("lon")
|
||||
|
||||
if lat is None:
|
||||
lat = payload.get("adv_lat")
|
||||
if lon is None:
|
||||
lon = payload.get("adv_lon")
|
||||
|
||||
location = payload.get("location")
|
||||
if isinstance(location, dict):
|
||||
if lat is None:
|
||||
lat = location.get("latitude")
|
||||
if lon is None:
|
||||
lon = location.get("longitude")
|
||||
lat = _coerce_float(lat)
|
||||
lon = _coerce_float(lon)
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Compute event hash for deduplication (30-second time bucket)
|
||||
@@ -79,6 +109,10 @@ def handle_advertisement(
|
||||
node_query = select(Node).where(Node.public_key == adv_public_key)
|
||||
node = session.execute(node_query).scalar_one_or_none()
|
||||
if node:
|
||||
if lat is not None:
|
||||
node.lat = lat
|
||||
if lon is not None:
|
||||
node.lon = lon
|
||||
node.last_seen = now
|
||||
|
||||
# Add this receiver to the junction table
|
||||
@@ -110,6 +144,10 @@ def handle_advertisement(
|
||||
node.adv_type = adv_type
|
||||
if flags is not None:
|
||||
node.flags = flags
|
||||
if lat is not None:
|
||||
node.lat = lat
|
||||
if lon is not None:
|
||||
node.lon = lon
|
||||
node.last_seen = now
|
||||
else:
|
||||
# Create new node
|
||||
@@ -120,6 +158,8 @@ def handle_advertisement(
|
||||
flags=flags,
|
||||
first_seen=now,
|
||||
last_seen=now,
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
)
|
||||
session.add(node)
|
||||
session.flush()
|
||||
|
||||
@@ -47,6 +47,10 @@ def handle_contact(
|
||||
# Device uses 'adv_name' for the advertised name
|
||||
name = payload.get("adv_name") or payload.get("name")
|
||||
|
||||
# GPS coordinates (optional)
|
||||
lat = payload.get("adv_lat")
|
||||
lon = payload.get("adv_lon")
|
||||
|
||||
logger.info(f"Processing contact: {contact_key[:12]}... adv_name={name}")
|
||||
|
||||
# Device uses numeric 'type' field, convert to string
|
||||
@@ -73,6 +77,11 @@ def handle_contact(
|
||||
node.name = name
|
||||
if node_type and not node.adv_type:
|
||||
node.adv_type = node_type
|
||||
# Update GPS coordinates if provided
|
||||
if lat is not None:
|
||||
node.lat = lat
|
||||
if lon is not None:
|
||||
node.lon = lon
|
||||
# Do NOT update last_seen for contact sync - only advertisement events
|
||||
# should update last_seen since that's when the node was actually seen
|
||||
else:
|
||||
@@ -84,6 +93,8 @@ def handle_contact(
|
||||
adv_type=node_type,
|
||||
first_seen=now,
|
||||
last_seen=None, # Will be set when we receive an advertisement
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
)
|
||||
session.add(node)
|
||||
logger.info(f"Created node from contact: {contact_key[:12]}... ({name})")
|
||||
|
||||
@@ -70,7 +70,7 @@ def _handle_message(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Extract fields based on message type
|
||||
pubkey_prefix = payload.get("pubkey_prefix") if message_type == "contact" else None
|
||||
pubkey_prefix = payload.get("pubkey_prefix")
|
||||
channel_idx = payload.get("channel_idx") if message_type == "channel" else None
|
||||
path_len = payload.get("path_len")
|
||||
txt_type = payload.get("txt_type")
|
||||
|
||||
275
src/meshcore_hub/collector/letsmesh_decoder.py
Normal file
275
src/meshcore_hub/collector/letsmesh_decoder.py
Normal file
@@ -0,0 +1,275 @@
|
||||
"""LetsMesh packet decoder integration.
|
||||
|
||||
Provides an optional bridge to the external `meshcore-decoder` CLI so the
|
||||
collector can turn LetsMesh upload `raw` packet hex into decoded message data.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import shlex
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LetsMeshPacketDecoder:
|
||||
"""Decode LetsMesh packet payloads with `meshcore-decoder` CLI."""
|
||||
|
||||
class ChannelKey(NamedTuple):
|
||||
"""Channel key metadata for decryption and channel labeling."""
|
||||
|
||||
label: str | None
|
||||
key_hex: str
|
||||
channel_hash: str
|
||||
|
||||
# Built-in keys required by your deployment.
|
||||
# - Public channel
|
||||
# - #test channel
|
||||
BUILTIN_CHANNEL_KEYS: tuple[tuple[str, str], ...] = (
|
||||
("Public", "8B3387E9C5CDEA6AC9E5EDBAA115CD72"),
|
||||
("test", "9CD8FCF22A47333B591D96A2B848B73F"),
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
enabled: bool = True,
|
||||
command: str = "meshcore-decoder",
|
||||
channel_keys: list[str] | None = None,
|
||||
timeout_seconds: float = 2.0,
|
||||
) -> None:
|
||||
self._enabled = enabled
|
||||
self._command_tokens = shlex.split(command.strip()) if command.strip() else []
|
||||
self._channel_key_infos = self._normalize_channel_keys(channel_keys or [])
|
||||
self._channel_keys = [info.key_hex for info in self._channel_key_infos]
|
||||
self._channel_names_by_hash = {
|
||||
info.channel_hash: info.label
|
||||
for info in self._channel_key_infos
|
||||
if info.label
|
||||
}
|
||||
self._decode_cache: dict[str, dict[str, Any] | None] = {}
|
||||
self._decode_cache_maxsize = 2048
|
||||
self._timeout_seconds = timeout_seconds
|
||||
self._checked_command = False
|
||||
self._command_available = False
|
||||
self._warned_unavailable = False
|
||||
|
||||
@classmethod
|
||||
def _normalize_channel_keys(cls, values: list[str]) -> list[ChannelKey]:
|
||||
"""Normalize key list (labels + key + channel hash, deduplicated)."""
|
||||
normalized: list[LetsMeshPacketDecoder.ChannelKey] = []
|
||||
seen_keys: set[str] = set()
|
||||
|
||||
for label, key in cls.BUILTIN_CHANNEL_KEYS:
|
||||
entry = cls._normalize_channel_entry(f"{label}={key}")
|
||||
if not entry:
|
||||
continue
|
||||
if entry.key_hex in seen_keys:
|
||||
continue
|
||||
normalized.append(entry)
|
||||
seen_keys.add(entry.key_hex)
|
||||
|
||||
for value in values:
|
||||
entry = cls._normalize_channel_entry(value)
|
||||
if not entry:
|
||||
continue
|
||||
if entry.key_hex in seen_keys:
|
||||
continue
|
||||
normalized.append(entry)
|
||||
seen_keys.add(entry.key_hex)
|
||||
|
||||
return normalized
|
||||
|
||||
@classmethod
|
||||
def _normalize_channel_entry(cls, value: str | None) -> ChannelKey | None:
|
||||
"""Normalize one key entry (`label=hex`, `label:hex`, or `hex`)."""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
candidate = value.strip()
|
||||
if not candidate:
|
||||
return None
|
||||
|
||||
label: str | None = None
|
||||
key_candidate = candidate
|
||||
for separator in ("=", ":"):
|
||||
if separator not in candidate:
|
||||
continue
|
||||
left, right = candidate.split(separator, 1)
|
||||
right = right.strip()
|
||||
right = right.removeprefix("0x").removeprefix("0X").strip()
|
||||
if right and cls._is_hex(right):
|
||||
label = left.strip().lstrip("#")
|
||||
key_candidate = right
|
||||
break
|
||||
|
||||
key_candidate = key_candidate.strip()
|
||||
key_candidate = key_candidate.removeprefix("0x").removeprefix("0X").strip()
|
||||
if not key_candidate or not cls._is_hex(key_candidate):
|
||||
return None
|
||||
|
||||
key_hex = key_candidate.upper()
|
||||
channel_hash = cls._compute_channel_hash(key_hex)
|
||||
normalized_label = label.strip() if label and label.strip() else None
|
||||
return cls.ChannelKey(
|
||||
label=normalized_label,
|
||||
key_hex=key_hex,
|
||||
channel_hash=channel_hash,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _is_hex(value: str) -> bool:
|
||||
"""Return True if string contains only hex digits."""
|
||||
return bool(value) and all(char in string.hexdigits for char in value)
|
||||
|
||||
@staticmethod
|
||||
def _compute_channel_hash(key_hex: str) -> str:
|
||||
"""Compute channel hash (first byte of SHA-256 of channel key)."""
|
||||
return hashlib.sha256(bytes.fromhex(key_hex)).digest()[:1].hex().upper()
|
||||
|
||||
def channel_name_from_decoded(
|
||||
self,
|
||||
decoded_packet: dict[str, Any] | None,
|
||||
) -> str | None:
|
||||
"""Resolve channel label from decoded payload channel hash."""
|
||||
if not isinstance(decoded_packet, dict):
|
||||
return None
|
||||
|
||||
payload = decoded_packet.get("payload")
|
||||
if not isinstance(payload, dict):
|
||||
return None
|
||||
|
||||
decoded = payload.get("decoded")
|
||||
if not isinstance(decoded, dict):
|
||||
return None
|
||||
|
||||
channel_hash = decoded.get("channelHash")
|
||||
if not isinstance(channel_hash, str):
|
||||
return None
|
||||
|
||||
return self._channel_names_by_hash.get(channel_hash.upper())
|
||||
|
||||
def channel_labels_by_index(self) -> dict[int, str]:
|
||||
"""Return channel labels keyed by numeric channel index (0-255)."""
|
||||
labels: dict[int, str] = {}
|
||||
for info in self._channel_key_infos:
|
||||
if not info.label:
|
||||
continue
|
||||
|
||||
label = info.label.strip()
|
||||
if not label:
|
||||
continue
|
||||
|
||||
if label.lower() == "public":
|
||||
normalized_label = "Public"
|
||||
else:
|
||||
normalized_label = label if label.startswith("#") else f"#{label}"
|
||||
|
||||
channel_idx = int(info.channel_hash, 16)
|
||||
labels.setdefault(channel_idx, normalized_label)
|
||||
|
||||
return labels
|
||||
|
||||
def decode_payload(self, payload: dict[str, Any]) -> dict[str, Any] | None:
|
||||
"""Decode packet payload `raw` hex and return decoded JSON if available."""
|
||||
raw_hex = payload.get("raw")
|
||||
if not isinstance(raw_hex, str):
|
||||
return None
|
||||
clean_hex = raw_hex.strip()
|
||||
if not clean_hex:
|
||||
return None
|
||||
if not self._is_hex(clean_hex):
|
||||
logger.debug("LetsMesh decoder skipped non-hex raw payload")
|
||||
return None
|
||||
cached = self._decode_cache.get(clean_hex)
|
||||
if clean_hex in self._decode_cache:
|
||||
return cached
|
||||
|
||||
decoded = self._decode_raw(clean_hex)
|
||||
self._decode_cache[clean_hex] = decoded
|
||||
if len(self._decode_cache) > self._decode_cache_maxsize:
|
||||
# Drop oldest cached payload (insertion-order dict).
|
||||
self._decode_cache.pop(next(iter(self._decode_cache)))
|
||||
return decoded
|
||||
|
||||
def _decode_raw(self, raw_hex: str) -> dict[str, Any] | None:
|
||||
"""Decode raw packet hex with decoder CLI (cached per packet hex)."""
|
||||
if not self._enabled:
|
||||
return None
|
||||
if not self._is_command_available():
|
||||
return None
|
||||
|
||||
command = [*self._command_tokens, "decode", raw_hex, "--json"]
|
||||
if self._channel_keys:
|
||||
command.append("--key")
|
||||
command.extend(self._channel_keys)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=self._timeout_seconds,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.debug(
|
||||
"LetsMesh decoder timed out after %.2fs",
|
||||
self._timeout_seconds,
|
||||
)
|
||||
return None
|
||||
except OSError as exc:
|
||||
logger.debug("LetsMesh decoder failed to execute: %s", exc)
|
||||
return None
|
||||
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr.strip() if result.stderr else ""
|
||||
logger.debug(
|
||||
"LetsMesh decoder exited with code %s%s",
|
||||
result.returncode,
|
||||
f": {stderr}" if stderr else "",
|
||||
)
|
||||
return None
|
||||
|
||||
output = result.stdout.strip()
|
||||
if not output:
|
||||
return None
|
||||
|
||||
try:
|
||||
decoded = json.loads(output)
|
||||
except json.JSONDecodeError:
|
||||
logger.debug("LetsMesh decoder returned non-JSON output")
|
||||
return None
|
||||
|
||||
return decoded if isinstance(decoded, dict) else None
|
||||
|
||||
def _is_command_available(self) -> bool:
|
||||
"""Check decoder command availability once."""
|
||||
if self._checked_command:
|
||||
return self._command_available
|
||||
|
||||
self._checked_command = True
|
||||
if not self._command_tokens:
|
||||
self._command_available = False
|
||||
else:
|
||||
command = self._command_tokens[0]
|
||||
if "/" in command:
|
||||
self._command_available = shutil.which(command) is not None
|
||||
else:
|
||||
self._command_available = shutil.which(command) is not None
|
||||
|
||||
if not self._command_available and not self._warned_unavailable:
|
||||
self._warned_unavailable = True
|
||||
command_text = " ".join(self._command_tokens) or "<empty>"
|
||||
logger.warning(
|
||||
"LetsMesh decoder command not found (%s). "
|
||||
"Messages will remain encrypted placeholders until decoder is installed.",
|
||||
command_text,
|
||||
)
|
||||
|
||||
return self._command_available
|
||||
1081
src/meshcore_hub/collector/letsmesh_normalizer.py
Normal file
1081
src/meshcore_hub/collector/letsmesh_normalizer.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -21,6 +21,8 @@ from typing import Any, Callable, Optional, TYPE_CHECKING
|
||||
from meshcore_hub.common.database import DatabaseManager
|
||||
from meshcore_hub.common.health import HealthReporter
|
||||
from meshcore_hub.common.mqtt import MQTTClient, MQTTConfig
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
from meshcore_hub.collector.letsmesh_normalizer import LetsMeshNormalizer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from meshcore_hub.collector.webhook import WebhookDispatcher
|
||||
@@ -32,9 +34,12 @@ logger = logging.getLogger(__name__)
|
||||
EventHandler = Callable[[str, str, dict[str, Any], DatabaseManager], None]
|
||||
|
||||
|
||||
class Subscriber:
|
||||
class Subscriber(LetsMeshNormalizer):
|
||||
"""MQTT Subscriber for collecting and storing MeshCore events."""
|
||||
|
||||
INGEST_MODE_NATIVE = "native"
|
||||
INGEST_MODE_LETSMESH_UPLOAD = "letsmesh_upload"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mqtt_client: MQTTClient,
|
||||
@@ -45,6 +50,11 @@ class Subscriber:
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
ingest_mode: str = INGEST_MODE_NATIVE,
|
||||
letsmesh_decoder_enabled: bool = True,
|
||||
letsmesh_decoder_command: str = "meshcore-decoder",
|
||||
letsmesh_decoder_channel_keys: list[str] | None = None,
|
||||
letsmesh_decoder_timeout_seconds: float = 2.0,
|
||||
):
|
||||
"""Initialize subscriber.
|
||||
|
||||
@@ -57,6 +67,11 @@ class Subscriber:
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
|
||||
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
|
||||
letsmesh_decoder_command: Decoder CLI command
|
||||
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
|
||||
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
|
||||
"""
|
||||
self.mqtt = mqtt_client
|
||||
self.db = db_manager
|
||||
@@ -79,6 +94,18 @@ class Subscriber:
|
||||
self._node_cleanup_days = node_cleanup_days
|
||||
self._cleanup_thread: Optional[threading.Thread] = None
|
||||
self._last_cleanup: Optional[datetime] = None
|
||||
self._ingest_mode = ingest_mode.lower()
|
||||
if self._ingest_mode not in {
|
||||
self.INGEST_MODE_NATIVE,
|
||||
self.INGEST_MODE_LETSMESH_UPLOAD,
|
||||
}:
|
||||
raise ValueError(f"Unsupported collector ingest mode: {ingest_mode}")
|
||||
self._letsmesh_decoder = LetsMeshPacketDecoder(
|
||||
enabled=letsmesh_decoder_enabled,
|
||||
command=letsmesh_decoder_command,
|
||||
channel_keys=letsmesh_decoder_channel_keys,
|
||||
timeout_seconds=letsmesh_decoder_timeout_seconds,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_healthy(self) -> bool:
|
||||
@@ -125,14 +152,34 @@ class Subscriber:
|
||||
pattern: Subscription pattern
|
||||
payload: Message payload
|
||||
"""
|
||||
# Parse event from topic
|
||||
parsed = self.mqtt.topic_builder.parse_event_topic(topic)
|
||||
parsed: tuple[str, str, dict[str, Any]] | None
|
||||
if self._ingest_mode == self.INGEST_MODE_LETSMESH_UPLOAD:
|
||||
parsed = self._normalize_letsmesh_event(topic, payload)
|
||||
else:
|
||||
parsed_event = self.mqtt.topic_builder.parse_event_topic(topic)
|
||||
parsed = (
|
||||
(parsed_event[0], parsed_event[1], payload) if parsed_event else None
|
||||
)
|
||||
|
||||
if not parsed:
|
||||
logger.warning(f"Could not parse event topic: {topic}")
|
||||
logger.warning(
|
||||
"Could not parse topic for ingest mode %s: %s",
|
||||
self._ingest_mode,
|
||||
topic,
|
||||
)
|
||||
return
|
||||
|
||||
public_key, event_type = parsed
|
||||
logger.debug(f"Received event: {event_type} from {public_key[:12]}...")
|
||||
public_key, event_type, normalized_payload = parsed
|
||||
logger.debug("Received event: %s from %s...", event_type, public_key[:12])
|
||||
self._dispatch_event(public_key, event_type, normalized_payload)
|
||||
|
||||
def _dispatch_event(
|
||||
self,
|
||||
public_key: str,
|
||||
event_type: str,
|
||||
payload: dict[str, Any],
|
||||
) -> None:
|
||||
"""Route a normalized event to the appropriate handler."""
|
||||
|
||||
# Find and call handler
|
||||
handler = self._handlers.get(event_type)
|
||||
@@ -358,10 +405,20 @@ class Subscriber:
|
||||
logger.error(f"Failed to connect to MQTT broker: {e}")
|
||||
raise
|
||||
|
||||
# Subscribe to all event topics
|
||||
event_topic = self.mqtt.topic_builder.all_events_topic()
|
||||
self.mqtt.subscribe(event_topic, self._handle_mqtt_message)
|
||||
logger.info(f"Subscribed to event topic: {event_topic}")
|
||||
# Subscribe to topics based on ingest mode
|
||||
if self._ingest_mode == self.INGEST_MODE_LETSMESH_UPLOAD:
|
||||
letsmesh_topics = [
|
||||
f"{self.mqtt.topic_builder.prefix}/+/packets",
|
||||
f"{self.mqtt.topic_builder.prefix}/+/status",
|
||||
f"{self.mqtt.topic_builder.prefix}/+/internal",
|
||||
]
|
||||
for letsmesh_topic in letsmesh_topics:
|
||||
self.mqtt.subscribe(letsmesh_topic, self._handle_mqtt_message)
|
||||
logger.info(f"Subscribed to LetsMesh upload topic: {letsmesh_topic}")
|
||||
else:
|
||||
event_topic = self.mqtt.topic_builder.all_events_topic()
|
||||
self.mqtt.subscribe(event_topic, self._handle_mqtt_message)
|
||||
logger.info(f"Subscribed to event topic: {event_topic}")
|
||||
|
||||
self._running = True
|
||||
|
||||
@@ -429,6 +486,9 @@ def create_subscriber(
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
mqtt_transport: str = "tcp",
|
||||
mqtt_ws_path: str = "/mqtt",
|
||||
ingest_mode: str = "native",
|
||||
database_url: str = "sqlite:///./meshcore.db",
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
@@ -436,6 +496,10 @@ def create_subscriber(
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
letsmesh_decoder_enabled: bool = True,
|
||||
letsmesh_decoder_command: str = "meshcore-decoder",
|
||||
letsmesh_decoder_channel_keys: list[str] | None = None,
|
||||
letsmesh_decoder_timeout_seconds: float = 2.0,
|
||||
) -> Subscriber:
|
||||
"""Create a configured subscriber instance.
|
||||
|
||||
@@ -446,6 +510,9 @@ def create_subscriber(
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
mqtt_transport: MQTT transport protocol (tcp or websockets)
|
||||
mqtt_ws_path: WebSocket path (used when transport=websockets)
|
||||
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
|
||||
database_url: Database connection URL
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
@@ -453,6 +520,10 @@ def create_subscriber(
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
|
||||
letsmesh_decoder_command: Decoder CLI command
|
||||
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
|
||||
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
|
||||
|
||||
Returns:
|
||||
Configured Subscriber instance
|
||||
@@ -467,6 +538,8 @@ def create_subscriber(
|
||||
prefix=mqtt_prefix,
|
||||
client_id=f"meshcore-collector-{unique_id}",
|
||||
tls=mqtt_tls,
|
||||
transport=mqtt_transport,
|
||||
ws_path=mqtt_ws_path,
|
||||
)
|
||||
mqtt_client = MQTTClient(mqtt_config)
|
||||
|
||||
@@ -483,6 +556,11 @@ def create_subscriber(
|
||||
cleanup_interval_hours=cleanup_interval_hours,
|
||||
node_cleanup_enabled=node_cleanup_enabled,
|
||||
node_cleanup_days=node_cleanup_days,
|
||||
ingest_mode=ingest_mode,
|
||||
letsmesh_decoder_enabled=letsmesh_decoder_enabled,
|
||||
letsmesh_decoder_command=letsmesh_decoder_command,
|
||||
letsmesh_decoder_channel_keys=letsmesh_decoder_channel_keys,
|
||||
letsmesh_decoder_timeout_seconds=letsmesh_decoder_timeout_seconds,
|
||||
)
|
||||
|
||||
# Register handlers
|
||||
@@ -500,6 +578,9 @@ def run_collector(
|
||||
mqtt_password: Optional[str] = None,
|
||||
mqtt_prefix: str = "meshcore",
|
||||
mqtt_tls: bool = False,
|
||||
mqtt_transport: str = "tcp",
|
||||
mqtt_ws_path: str = "/mqtt",
|
||||
ingest_mode: str = "native",
|
||||
database_url: str = "sqlite:///./meshcore.db",
|
||||
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
|
||||
cleanup_enabled: bool = False,
|
||||
@@ -507,6 +588,10 @@ def run_collector(
|
||||
cleanup_interval_hours: int = 24,
|
||||
node_cleanup_enabled: bool = False,
|
||||
node_cleanup_days: int = 90,
|
||||
letsmesh_decoder_enabled: bool = True,
|
||||
letsmesh_decoder_command: str = "meshcore-decoder",
|
||||
letsmesh_decoder_channel_keys: list[str] | None = None,
|
||||
letsmesh_decoder_timeout_seconds: float = 2.0,
|
||||
) -> None:
|
||||
"""Run the collector (blocking).
|
||||
|
||||
@@ -517,6 +602,9 @@ def run_collector(
|
||||
mqtt_password: MQTT password
|
||||
mqtt_prefix: MQTT topic prefix
|
||||
mqtt_tls: Enable TLS/SSL for MQTT connection
|
||||
mqtt_transport: MQTT transport protocol (tcp or websockets)
|
||||
mqtt_ws_path: WebSocket path (used when transport=websockets)
|
||||
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
|
||||
database_url: Database connection URL
|
||||
webhook_dispatcher: Optional webhook dispatcher for event forwarding
|
||||
cleanup_enabled: Enable automatic event data cleanup
|
||||
@@ -524,6 +612,10 @@ def run_collector(
|
||||
cleanup_interval_hours: Hours between cleanup runs
|
||||
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
|
||||
node_cleanup_days: Remove nodes not seen for this many days
|
||||
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
|
||||
letsmesh_decoder_command: Decoder CLI command
|
||||
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
|
||||
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
|
||||
"""
|
||||
subscriber = create_subscriber(
|
||||
mqtt_host=mqtt_host,
|
||||
@@ -532,6 +624,9 @@ def run_collector(
|
||||
mqtt_password=mqtt_password,
|
||||
mqtt_prefix=mqtt_prefix,
|
||||
mqtt_tls=mqtt_tls,
|
||||
mqtt_transport=mqtt_transport,
|
||||
mqtt_ws_path=mqtt_ws_path,
|
||||
ingest_mode=ingest_mode,
|
||||
database_url=database_url,
|
||||
webhook_dispatcher=webhook_dispatcher,
|
||||
cleanup_enabled=cleanup_enabled,
|
||||
@@ -539,6 +634,10 @@ def run_collector(
|
||||
cleanup_interval_hours=cleanup_interval_hours,
|
||||
node_cleanup_enabled=node_cleanup_enabled,
|
||||
node_cleanup_days=node_cleanup_days,
|
||||
letsmesh_decoder_enabled=letsmesh_decoder_enabled,
|
||||
letsmesh_decoder_command=letsmesh_decoder_command,
|
||||
letsmesh_decoder_channel_keys=letsmesh_decoder_channel_keys,
|
||||
letsmesh_decoder_timeout_seconds=letsmesh_decoder_timeout_seconds,
|
||||
)
|
||||
|
||||
# Set up signal handlers
|
||||
|
||||
@@ -19,7 +19,7 @@ class TagValue(BaseModel):
|
||||
"""Schema for a tag value with type."""
|
||||
|
||||
value: str | None = None
|
||||
type: str = Field(default="string", pattern=r"^(string|number|boolean|coordinate)$")
|
||||
type: str = Field(default="string", pattern=r"^(string|number|boolean)$")
|
||||
|
||||
|
||||
class NodeTags(BaseModel):
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Pydantic Settings for MeshCore Hub configuration."""
|
||||
|
||||
from enum import Enum
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
@@ -24,6 +25,20 @@ class InterfaceMode(str, Enum):
|
||||
SENDER = "SENDER"
|
||||
|
||||
|
||||
class MQTTTransport(str, Enum):
|
||||
"""MQTT transport type."""
|
||||
|
||||
TCP = "tcp"
|
||||
WEBSOCKETS = "websockets"
|
||||
|
||||
|
||||
class CollectorIngestMode(str, Enum):
|
||||
"""Collector MQTT ingest mode."""
|
||||
|
||||
NATIVE = "native"
|
||||
LETSMESH_UPLOAD = "letsmesh_upload"
|
||||
|
||||
|
||||
class CommonSettings(BaseSettings):
|
||||
"""Common settings shared by all components."""
|
||||
|
||||
@@ -55,6 +70,14 @@ class CommonSettings(BaseSettings):
|
||||
mqtt_tls: bool = Field(
|
||||
default=False, description="Enable TLS/SSL for MQTT connection"
|
||||
)
|
||||
mqtt_transport: MQTTTransport = Field(
|
||||
default=MQTTTransport.TCP,
|
||||
description="MQTT transport protocol (tcp or websockets)",
|
||||
)
|
||||
mqtt_ws_path: str = Field(
|
||||
default="/mqtt",
|
||||
description="WebSocket path for MQTT transport (used when MQTT_TRANSPORT=websockets)",
|
||||
)
|
||||
|
||||
|
||||
class InterfaceSettings(CommonSettings):
|
||||
@@ -162,6 +185,42 @@ class CollectorSettings(CommonSettings):
|
||||
description="Remove nodes not seen for this many days (last_seen)",
|
||||
ge=1,
|
||||
)
|
||||
collector_ingest_mode: CollectorIngestMode = Field(
|
||||
default=CollectorIngestMode.NATIVE,
|
||||
description=(
|
||||
"Collector MQTT ingest mode. "
|
||||
"'native' expects <prefix>/<pubkey>/event/<event_name>. "
|
||||
"'letsmesh_upload' expects LetsMesh observer uploads on "
|
||||
"<prefix>/<pubkey>/(packets|status|internal)."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_enabled: bool = Field(
|
||||
default=True,
|
||||
description=(
|
||||
"Enable external LetsMesh packet decoding via meshcore-decoder. "
|
||||
"Only applies when COLLECTOR_INGEST_MODE=letsmesh_upload."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_command: str = Field(
|
||||
default="meshcore-decoder",
|
||||
description=(
|
||||
"Command used to run LetsMesh packet decoder CLI "
|
||||
"(for example: meshcore-decoder, /usr/local/bin/meshcore-decoder, "
|
||||
"or 'npx meshcore-decoder')."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_keys: Optional[str] = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional channel secret keys for LetsMesh message decryption. "
|
||||
"Provide as comma/space separated hex values."
|
||||
),
|
||||
)
|
||||
collector_letsmesh_decoder_timeout_seconds: float = Field(
|
||||
default=2.0,
|
||||
description="Timeout in seconds for each decoder invocation.",
|
||||
ge=0.1,
|
||||
)
|
||||
|
||||
@property
|
||||
def collector_data_dir(self) -> str:
|
||||
@@ -201,6 +260,17 @@ class CollectorSettings(CommonSettings):
|
||||
|
||||
return str(Path(self.effective_seed_home) / "members.yaml")
|
||||
|
||||
@property
|
||||
def collector_letsmesh_decoder_keys_list(self) -> list[str]:
|
||||
"""Parse configured LetsMesh decoder keys into a normalized list."""
|
||||
if not self.collector_letsmesh_decoder_keys:
|
||||
return []
|
||||
return [
|
||||
part.strip()
|
||||
for part in re.split(r"[,\s]+", self.collector_letsmesh_decoder_keys)
|
||||
if part.strip()
|
||||
]
|
||||
|
||||
@field_validator("database_url")
|
||||
@classmethod
|
||||
def validate_database_url(cls, v: Optional[str]) -> Optional[str]:
|
||||
@@ -253,6 +323,47 @@ class WebSettings(CommonSettings):
|
||||
web_host: str = Field(default="0.0.0.0", description="Web server host")
|
||||
web_port: int = Field(default=8080, description="Web server port")
|
||||
|
||||
# Timezone for date/time display (uses standard TZ environment variable)
|
||||
tz: str = Field(default="UTC", description="Timezone for displaying dates/times")
|
||||
|
||||
# Theme (dark or light, default dark)
|
||||
web_theme: str = Field(
|
||||
default="dark",
|
||||
description="Default theme for the web dashboard (dark or light)",
|
||||
)
|
||||
|
||||
# Locale / language (default: English)
|
||||
web_locale: str = Field(
|
||||
default="en",
|
||||
description="Locale/language for the web dashboard (e.g. 'en')",
|
||||
)
|
||||
web_datetime_locale: str = Field(
|
||||
default="en-US",
|
||||
description=(
|
||||
"Locale used for date/time formatting in the web dashboard "
|
||||
"(e.g. 'en-US', 'en-GB')."
|
||||
),
|
||||
)
|
||||
|
||||
# Auto-refresh interval for list pages
|
||||
web_auto_refresh_seconds: int = Field(
|
||||
default=30,
|
||||
description="Auto-refresh interval in seconds for list pages (0 to disable)",
|
||||
ge=0,
|
||||
)
|
||||
|
||||
# Trusted proxy hosts for X-Forwarded-For header processing
|
||||
web_trusted_proxy_hosts: str = Field(
|
||||
default="*",
|
||||
description="Comma-separated list of trusted proxy hosts or '*' for all",
|
||||
)
|
||||
|
||||
# Admin interface (disabled by default for security)
|
||||
web_admin_enabled: bool = Field(
|
||||
default=False,
|
||||
description="Enable admin interface at /a/ (requires OAuth2Proxy in front)",
|
||||
)
|
||||
|
||||
# API connection
|
||||
api_base_url: str = Field(
|
||||
default="http://localhost:8000",
|
||||
@@ -285,10 +396,80 @@ class WebSettings(CommonSettings):
|
||||
network_contact_github: Optional[str] = Field(
|
||||
default=None, description="GitHub repository URL"
|
||||
)
|
||||
network_contact_youtube: Optional[str] = Field(
|
||||
default=None, description="YouTube channel URL"
|
||||
)
|
||||
network_welcome_text: Optional[str] = Field(
|
||||
default=None, description="Welcome text for homepage"
|
||||
)
|
||||
|
||||
# Feature flags (control which pages are visible in the web dashboard)
|
||||
feature_dashboard: bool = Field(
|
||||
default=True, description="Enable the /dashboard page"
|
||||
)
|
||||
feature_nodes: bool = Field(default=True, description="Enable the /nodes pages")
|
||||
feature_advertisements: bool = Field(
|
||||
default=True, description="Enable the /advertisements page"
|
||||
)
|
||||
feature_messages: bool = Field(
|
||||
default=True, description="Enable the /messages page"
|
||||
)
|
||||
feature_map: bool = Field(
|
||||
default=True, description="Enable the /map page and /map/data endpoint"
|
||||
)
|
||||
feature_members: bool = Field(default=True, description="Enable the /members page")
|
||||
feature_pages: bool = Field(
|
||||
default=True, description="Enable custom markdown pages"
|
||||
)
|
||||
|
||||
# Content directory (contains pages/ and media/ subdirectories)
|
||||
content_home: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Directory containing custom content (pages/, media/) (default: ./content)",
|
||||
)
|
||||
|
||||
@property
|
||||
def features(self) -> dict[str, bool]:
|
||||
"""Get feature flags as a dictionary.
|
||||
|
||||
Automatic dependencies:
|
||||
- Dashboard requires at least one of nodes/advertisements/messages.
|
||||
- Map requires nodes (map displays node locations).
|
||||
"""
|
||||
has_dashboard_content = (
|
||||
self.feature_nodes or self.feature_advertisements or self.feature_messages
|
||||
)
|
||||
return {
|
||||
"dashboard": self.feature_dashboard and has_dashboard_content,
|
||||
"nodes": self.feature_nodes,
|
||||
"advertisements": self.feature_advertisements,
|
||||
"messages": self.feature_messages,
|
||||
"map": self.feature_map and self.feature_nodes,
|
||||
"members": self.feature_members,
|
||||
"pages": self.feature_pages,
|
||||
}
|
||||
|
||||
@property
|
||||
def effective_content_home(self) -> str:
|
||||
"""Get the effective content home directory."""
|
||||
from pathlib import Path
|
||||
|
||||
return str(Path(self.content_home or "./content"))
|
||||
|
||||
@property
|
||||
def effective_pages_home(self) -> str:
|
||||
"""Get the effective pages directory (content_home/pages)."""
|
||||
from pathlib import Path
|
||||
|
||||
return str(Path(self.effective_content_home) / "pages")
|
||||
|
||||
@property
|
||||
def effective_media_home(self) -> str:
|
||||
"""Get the effective media directory (content_home/media)."""
|
||||
from pathlib import Path
|
||||
|
||||
return str(Path(self.effective_content_home) / "media")
|
||||
|
||||
@property
|
||||
def web_data_dir(self) -> str:
|
||||
"""Get the web data directory path."""
|
||||
|
||||
@@ -98,6 +98,15 @@ class DatabaseManager:
|
||||
echo: Enable SQL query logging
|
||||
"""
|
||||
self.database_url = database_url
|
||||
|
||||
# Ensure parent directory exists for SQLite databases
|
||||
if database_url.startswith("sqlite:///"):
|
||||
from pathlib import Path
|
||||
|
||||
# Extract path from sqlite:///path/to/db.sqlite
|
||||
db_path = Path(database_url.replace("sqlite:///", ""))
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.engine = create_database_engine(database_url, echo=echo)
|
||||
self.session_factory = create_session_factory(self.engine)
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ def compute_advertisement_hash(
|
||||
adv_type: Optional[str] = None,
|
||||
flags: Optional[int] = None,
|
||||
received_at: Optional[datetime] = None,
|
||||
bucket_seconds: int = 30,
|
||||
bucket_seconds: int = 120,
|
||||
) -> str:
|
||||
"""Compute a deterministic hash for an advertisement.
|
||||
|
||||
@@ -104,7 +104,7 @@ def compute_telemetry_hash(
|
||||
node_public_key: str,
|
||||
parsed_data: Optional[dict] = None,
|
||||
received_at: Optional[datetime] = None,
|
||||
bucket_seconds: int = 30,
|
||||
bucket_seconds: int = 120,
|
||||
) -> str:
|
||||
"""Compute a deterministic hash for a telemetry record.
|
||||
|
||||
|
||||
81
src/meshcore_hub/common/i18n.py
Normal file
81
src/meshcore_hub/common/i18n.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Lightweight i18n support for MeshCore Hub.
|
||||
|
||||
Loads JSON translation files and provides a ``t()`` lookup function
|
||||
that is shared between the Python (Jinja2) and JavaScript (SPA) sides.
|
||||
The same ``en.json`` file is served as a static asset for the client and
|
||||
read from disk for server-side template rendering.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_translations: dict[str, Any] = {}
|
||||
_locale: str = "en"
|
||||
|
||||
# Directory where locale JSON files live (web/static/locales/)
|
||||
LOCALES_DIR = Path(__file__).parent.parent / "web" / "static" / "locales"
|
||||
|
||||
|
||||
def load_locale(locale: str = "en", locales_dir: Path | None = None) -> None:
|
||||
"""Load a locale's translation file into memory.
|
||||
|
||||
Args:
|
||||
locale: Language code (e.g. ``"en"``).
|
||||
locales_dir: Override directory containing ``<locale>.json`` files.
|
||||
"""
|
||||
global _translations, _locale
|
||||
directory = locales_dir or LOCALES_DIR
|
||||
path = directory / f"{locale}.json"
|
||||
if not path.exists():
|
||||
logger.warning("Locale file not found: %s – falling back to 'en'", path)
|
||||
path = directory / "en.json"
|
||||
if path.exists():
|
||||
_translations = json.loads(path.read_text(encoding="utf-8"))
|
||||
_locale = locale
|
||||
logger.info("Loaded locale '%s' from %s", locale, path)
|
||||
else:
|
||||
logger.error("No locale files found in %s", directory)
|
||||
|
||||
|
||||
def _resolve(key: str) -> Any:
|
||||
"""Walk a dot-separated key through the nested translation dict."""
|
||||
value: Any = _translations
|
||||
for part in key.split("."):
|
||||
if isinstance(value, dict):
|
||||
value = value.get(part)
|
||||
else:
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def t(key: str, **kwargs: Any) -> str:
|
||||
"""Translate a key with optional interpolation.
|
||||
|
||||
Supports ``{{var}}`` placeholders in translation strings.
|
||||
|
||||
Args:
|
||||
key: Dot-separated translation key (e.g. ``"nav.home"``).
|
||||
**kwargs: Interpolation values.
|
||||
|
||||
Returns:
|
||||
Translated string, or the key itself as fallback.
|
||||
"""
|
||||
val = _resolve(key)
|
||||
|
||||
if not isinstance(val, str):
|
||||
return key
|
||||
|
||||
# Interpolation: replace {{var}} placeholders
|
||||
for k, v in kwargs.items():
|
||||
val = val.replace("{{" + k + "}}", str(v))
|
||||
|
||||
return val
|
||||
|
||||
|
||||
def get_locale() -> str:
|
||||
"""Return the currently loaded locale code."""
|
||||
return _locale
|
||||
@@ -3,7 +3,7 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import DateTime, Index, Integer, String
|
||||
from sqlalchemy import DateTime, Float, Index, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
|
||||
@@ -23,6 +23,8 @@ class Node(Base, UUIDMixin, TimestampMixin):
|
||||
flags: Capability/status flags bitmask
|
||||
first_seen: Timestamp of first advertisement
|
||||
last_seen: Timestamp of most recent activity
|
||||
lat: GPS latitude coordinate (if available)
|
||||
lon: GPS longitude coordinate (if available)
|
||||
created_at: Record creation timestamp
|
||||
updated_at: Record update timestamp
|
||||
"""
|
||||
@@ -57,6 +59,14 @@ class Node(Base, UUIDMixin, TimestampMixin):
|
||||
default=None,
|
||||
nullable=True,
|
||||
)
|
||||
lat: Mapped[Optional[float]] = mapped_column(
|
||||
Float,
|
||||
nullable=True,
|
||||
)
|
||||
lon: Mapped[Optional[float]] = mapped_column(
|
||||
Float,
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
tags: Mapped[list["NodeTag"]] = relationship(
|
||||
|
||||
@@ -21,7 +21,7 @@ class NodeTag(Base, UUIDMixin, TimestampMixin):
|
||||
node_id: Foreign key to nodes table
|
||||
key: Tag name/key
|
||||
value: Tag value (stored as text, can be JSON for typed values)
|
||||
value_type: Type hint (string, number, boolean, coordinate)
|
||||
value_type: Type hint (string, number, boolean)
|
||||
created_at: Record creation timestamp
|
||||
updated_at: Record update timestamp
|
||||
"""
|
||||
|
||||
@@ -20,7 +20,7 @@ class TracePath(Base, UUIDMixin, TimestampMixin):
|
||||
path_len: Path length
|
||||
flags: Trace flags
|
||||
auth: Authentication data
|
||||
path_hashes: JSON array of node hash identifiers
|
||||
path_hashes: JSON array of hex-encoded node hash identifiers (variable length)
|
||||
snr_values: JSON array of SNR values per hop
|
||||
hop_count: Total number of hops
|
||||
received_at: When received by interface
|
||||
|
||||
@@ -24,6 +24,8 @@ class MQTTConfig:
|
||||
keepalive: int = 60
|
||||
clean_session: bool = True
|
||||
tls: bool = False
|
||||
transport: str = "tcp"
|
||||
ws_path: str = "/mqtt"
|
||||
|
||||
|
||||
class TopicBuilder:
|
||||
@@ -37,6 +39,10 @@ class TopicBuilder:
|
||||
"""
|
||||
self.prefix = prefix
|
||||
|
||||
def _prefix_parts(self) -> list[str]:
|
||||
"""Split configured prefix into path segments."""
|
||||
return [part for part in self.prefix.strip("/").split("/") if part]
|
||||
|
||||
def event_topic(self, public_key: str, event_name: str) -> str:
|
||||
"""Build an event topic.
|
||||
|
||||
@@ -86,10 +92,16 @@ class TopicBuilder:
|
||||
Returns:
|
||||
Tuple of (public_key, event_name) or None if invalid
|
||||
"""
|
||||
parts = topic.split("/")
|
||||
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "event":
|
||||
public_key = parts[1]
|
||||
event_name = "/".join(parts[3:])
|
||||
parts = [part for part in topic.strip("/").split("/") if part]
|
||||
prefix_parts = self._prefix_parts()
|
||||
prefix_len = len(prefix_parts)
|
||||
if (
|
||||
len(parts) >= prefix_len + 3
|
||||
and parts[:prefix_len] == prefix_parts
|
||||
and parts[prefix_len + 1] == "event"
|
||||
):
|
||||
public_key = parts[prefix_len]
|
||||
event_name = "/".join(parts[prefix_len + 2 :])
|
||||
return (public_key, event_name)
|
||||
return None
|
||||
|
||||
@@ -102,13 +114,39 @@ class TopicBuilder:
|
||||
Returns:
|
||||
Tuple of (public_key, command_name) or None if invalid
|
||||
"""
|
||||
parts = topic.split("/")
|
||||
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "command":
|
||||
public_key = parts[1]
|
||||
command_name = "/".join(parts[3:])
|
||||
parts = [part for part in topic.strip("/").split("/") if part]
|
||||
prefix_parts = self._prefix_parts()
|
||||
prefix_len = len(prefix_parts)
|
||||
if (
|
||||
len(parts) >= prefix_len + 3
|
||||
and parts[:prefix_len] == prefix_parts
|
||||
and parts[prefix_len + 1] == "command"
|
||||
):
|
||||
public_key = parts[prefix_len]
|
||||
command_name = "/".join(parts[prefix_len + 2 :])
|
||||
return (public_key, command_name)
|
||||
return None
|
||||
|
||||
def parse_letsmesh_upload_topic(self, topic: str) -> tuple[str, str] | None:
|
||||
"""Parse a LetsMesh upload topic to extract public key and feed type.
|
||||
|
||||
LetsMesh upload topics are expected in this form:
|
||||
<prefix>/<public_key>/(packets|status|internal)
|
||||
"""
|
||||
parts = [part for part in topic.strip("/").split("/") if part]
|
||||
prefix_parts = self._prefix_parts()
|
||||
prefix_len = len(prefix_parts)
|
||||
|
||||
if len(parts) != prefix_len + 2 or parts[:prefix_len] != prefix_parts:
|
||||
return None
|
||||
|
||||
public_key = parts[prefix_len]
|
||||
feed_type = parts[prefix_len + 1]
|
||||
if feed_type not in {"packets", "status", "internal"}:
|
||||
return None
|
||||
|
||||
return (public_key, feed_type)
|
||||
|
||||
|
||||
MessageHandler = Callable[[str, str, dict[str, Any]], None]
|
||||
|
||||
@@ -124,14 +162,24 @@ class MQTTClient:
|
||||
"""
|
||||
self.config = config
|
||||
self.topic_builder = TopicBuilder(config.prefix)
|
||||
transport = config.transport.lower()
|
||||
if transport not in {"tcp", "websockets"}:
|
||||
raise ValueError(f"Unsupported MQTT transport: {config.transport}")
|
||||
|
||||
self._client = mqtt.Client(
|
||||
callback_api_version=CallbackAPIVersion.VERSION2, # type: ignore[call-arg]
|
||||
client_id=config.client_id,
|
||||
clean_session=config.clean_session,
|
||||
transport=transport,
|
||||
)
|
||||
self._connected = False
|
||||
self._message_handlers: dict[str, list[MessageHandler]] = {}
|
||||
|
||||
# Set WebSocket path when using MQTT over WebSockets.
|
||||
if transport == "websockets":
|
||||
self._client.ws_set_options(path=config.ws_path)
|
||||
logger.debug("MQTT WebSocket transport enabled (path=%s)", config.ws_path)
|
||||
|
||||
# Set up TLS if enabled
|
||||
if config.tls:
|
||||
self._client.tls_set()
|
||||
|
||||
@@ -28,6 +28,14 @@ class AdvertisementEvent(BaseModel):
|
||||
default=None,
|
||||
description="Capability/status flags bitmask",
|
||||
)
|
||||
lat: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Node latitude when location metadata is available",
|
||||
)
|
||||
lon: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Node longitude when location metadata is available",
|
||||
)
|
||||
|
||||
|
||||
class ContactMessageEvent(BaseModel):
|
||||
@@ -125,7 +133,7 @@ class TraceDataEvent(BaseModel):
|
||||
)
|
||||
path_hashes: Optional[list[str]] = Field(
|
||||
default=None,
|
||||
description="Array of 2-character node hash identifiers",
|
||||
description="Array of hex-encoded node hash identifiers (variable length, e.g. '4a' for single-byte or 'b3fa' for multibyte)",
|
||||
)
|
||||
snr_values: Optional[list[float]] = Field(
|
||||
default=None,
|
||||
|
||||
@@ -119,6 +119,9 @@ class AdvertisementRead(BaseModel):
|
||||
node_tag_name: Optional[str] = Field(
|
||||
default=None, description="Node name from tags"
|
||||
)
|
||||
node_tag_description: Optional[str] = Field(
|
||||
default=None, description="Node description from tags"
|
||||
)
|
||||
adv_type: Optional[str] = Field(default=None, description="Node type")
|
||||
flags: Optional[int] = Field(default=None, description="Capability flags")
|
||||
received_at: datetime = Field(..., description="When received")
|
||||
@@ -152,7 +155,8 @@ class TracePathRead(BaseModel):
|
||||
flags: Optional[int] = Field(default=None, description="Trace flags")
|
||||
auth: Optional[int] = Field(default=None, description="Auth data")
|
||||
path_hashes: Optional[list[str]] = Field(
|
||||
default=None, description="Node hash identifiers"
|
||||
default=None,
|
||||
description="Hex-encoded node hash identifiers (variable length, e.g. '4a' for single-byte or 'b3fa' for multibyte)",
|
||||
)
|
||||
snr_values: Optional[list[float]] = Field(
|
||||
default=None, description="SNR values per hop"
|
||||
|
||||
@@ -19,7 +19,7 @@ class NodeTagCreate(BaseModel):
|
||||
default=None,
|
||||
description="Tag value",
|
||||
)
|
||||
value_type: Literal["string", "number", "boolean", "coordinate"] = Field(
|
||||
value_type: Literal["string", "number", "boolean"] = Field(
|
||||
default="string",
|
||||
description="Value type hint",
|
||||
)
|
||||
@@ -32,12 +32,33 @@ class NodeTagUpdate(BaseModel):
|
||||
default=None,
|
||||
description="Tag value",
|
||||
)
|
||||
value_type: Optional[Literal["string", "number", "boolean", "coordinate"]] = Field(
|
||||
value_type: Optional[Literal["string", "number", "boolean"]] = Field(
|
||||
default=None,
|
||||
description="Value type hint",
|
||||
)
|
||||
|
||||
|
||||
class NodeTagMove(BaseModel):
|
||||
"""Schema for moving a node tag to a different node."""
|
||||
|
||||
new_public_key: str = Field(
|
||||
...,
|
||||
min_length=64,
|
||||
max_length=64,
|
||||
description="Public key of the destination node",
|
||||
)
|
||||
|
||||
|
||||
class NodeTagsCopyResult(BaseModel):
|
||||
"""Schema for bulk copy tags result."""
|
||||
|
||||
copied: int = Field(..., description="Number of tags copied")
|
||||
skipped: int = Field(..., description="Number of tags skipped (already exist)")
|
||||
skipped_keys: list[str] = Field(
|
||||
default_factory=list, description="Keys of skipped tags"
|
||||
)
|
||||
|
||||
|
||||
class NodeTagRead(BaseModel):
|
||||
"""Schema for reading a node tag."""
|
||||
|
||||
@@ -62,6 +83,8 @@ class NodeRead(BaseModel):
|
||||
last_seen: Optional[datetime] = Field(
|
||||
default=None, description="Last activity timestamp"
|
||||
)
|
||||
lat: Optional[float] = Field(default=None, description="GPS latitude coordinate")
|
||||
lon: Optional[float] = Field(default=None, description="GPS longitude coordinate")
|
||||
created_at: datetime = Field(..., description="Record creation timestamp")
|
||||
updated_at: datetime = Field(..., description="Record update timestamp")
|
||||
tags: list[NodeTagRead] = Field(default_factory=list, description="Node tags")
|
||||
|
||||
@@ -1,17 +1,28 @@
|
||||
"""FastAPI application for MeshCore Hub Web Dashboard."""
|
||||
"""FastAPI application for MeshCore Hub Web Dashboard (SPA)."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
from typing import Any, AsyncGenerator
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import httpx
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi import FastAPI, Request, Response
|
||||
from fastapi.responses import HTMLResponse, JSONResponse, PlainTextResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
|
||||
|
||||
from meshcore_hub import __version__
|
||||
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
|
||||
from meshcore_hub.common.i18n import load_locale, t
|
||||
from meshcore_hub.common.schemas import RadioConfig
|
||||
from meshcore_hub.web.middleware import CacheControlMiddleware
|
||||
from meshcore_hub.web.pages import PageLoader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,6 +32,60 @@ TEMPLATES_DIR = PACKAGE_DIR / "templates"
|
||||
STATIC_DIR = PACKAGE_DIR / "static"
|
||||
|
||||
|
||||
def _parse_decoder_key_entries(raw: str | None) -> list[str]:
|
||||
"""Parse COLLECTOR_LETSMESH_DECODER_KEYS into key entries."""
|
||||
if not raw:
|
||||
return []
|
||||
return [part.strip() for part in re.split(r"[,\s]+", raw) if part.strip()]
|
||||
|
||||
|
||||
def _build_channel_labels() -> dict[str, str]:
|
||||
"""Build UI channel labels from built-in + configured decoder keys."""
|
||||
raw_keys = os.getenv("COLLECTOR_LETSMESH_DECODER_KEYS")
|
||||
decoder = LetsMeshPacketDecoder(
|
||||
enabled=False,
|
||||
channel_keys=_parse_decoder_key_entries(raw_keys),
|
||||
)
|
||||
labels = decoder.channel_labels_by_index()
|
||||
return {str(idx): label for idx, label in sorted(labels.items())}
|
||||
|
||||
|
||||
def _resolve_logo(media_home: Path) -> tuple[str, bool, Path | None]:
|
||||
"""Resolve logo URL and whether light-mode inversion should be applied.
|
||||
|
||||
Returns:
|
||||
tuple of (logo_url, invert_in_light_mode, resolved_path)
|
||||
"""
|
||||
custom_logo_candidates = (
|
||||
("logo-invert.svg", "/media/images/logo-invert.svg", True),
|
||||
("logo.svg", "/media/images/logo.svg", False),
|
||||
)
|
||||
for filename, url, invert_in_light_mode in custom_logo_candidates:
|
||||
path = media_home / "images" / filename
|
||||
if path.exists():
|
||||
cache_buster = int(path.stat().st_mtime)
|
||||
return f"{url}?v={cache_buster}", invert_in_light_mode, path
|
||||
|
||||
# Default packaged logo is monochrome and needs darkening in light mode.
|
||||
return "/static/img/logo.svg", True, None
|
||||
|
||||
|
||||
def _is_authenticated_proxy_request(request: Request) -> bool:
|
||||
"""Check whether request is authenticated by an upstream auth proxy.
|
||||
|
||||
Supported patterns:
|
||||
- OAuth2/OIDC proxy headers: X-Forwarded-User, X-Auth-Request-User
|
||||
- Forwarded Basic auth header: Authorization: Basic ...
|
||||
"""
|
||||
if request.headers.get("x-forwarded-user"):
|
||||
return True
|
||||
if request.headers.get("x-auth-request-user"):
|
||||
return True
|
||||
|
||||
auth_header = request.headers.get("authorization", "")
|
||||
return auth_header.lower().startswith("basic ")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Application lifespan handler."""
|
||||
@@ -47,9 +112,85 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
logger.info("Web dashboard stopped")
|
||||
|
||||
|
||||
def _build_config_json(app: FastAPI, request: Request) -> str:
|
||||
"""Build the JSON config object to embed in the SPA shell.
|
||||
|
||||
Args:
|
||||
app: The FastAPI application instance.
|
||||
request: The current HTTP request.
|
||||
|
||||
Returns:
|
||||
JSON string with app configuration.
|
||||
"""
|
||||
# Parse radio config
|
||||
radio_config = RadioConfig.from_config_string(app.state.network_radio_config)
|
||||
radio_config_dict = None
|
||||
if radio_config:
|
||||
radio_config_dict = {
|
||||
"profile": radio_config.profile,
|
||||
"frequency": radio_config.frequency,
|
||||
"bandwidth": radio_config.bandwidth,
|
||||
"spreading_factor": radio_config.spreading_factor,
|
||||
"coding_rate": radio_config.coding_rate,
|
||||
"tx_power": radio_config.tx_power,
|
||||
}
|
||||
|
||||
# Get feature flags
|
||||
features = app.state.features
|
||||
|
||||
# Get custom pages for navigation (empty when pages feature is disabled)
|
||||
page_loader = app.state.page_loader
|
||||
custom_pages = (
|
||||
[
|
||||
{
|
||||
"slug": p.slug,
|
||||
"title": p.title,
|
||||
"url": p.url,
|
||||
"menu_order": p.menu_order,
|
||||
}
|
||||
for p in page_loader.get_menu_pages()
|
||||
]
|
||||
if features.get("pages", True)
|
||||
else []
|
||||
)
|
||||
|
||||
config = {
|
||||
"network_name": app.state.network_name,
|
||||
"network_city": app.state.network_city,
|
||||
"network_country": app.state.network_country,
|
||||
"network_radio_config": radio_config_dict,
|
||||
"network_contact_email": app.state.network_contact_email,
|
||||
"network_contact_discord": app.state.network_contact_discord,
|
||||
"network_contact_github": app.state.network_contact_github,
|
||||
"network_contact_youtube": app.state.network_contact_youtube,
|
||||
"network_welcome_text": app.state.network_welcome_text,
|
||||
"admin_enabled": app.state.admin_enabled,
|
||||
"features": features,
|
||||
"custom_pages": custom_pages,
|
||||
"logo_url": app.state.logo_url,
|
||||
"version": __version__,
|
||||
"timezone": app.state.timezone_abbr,
|
||||
"timezone_iana": app.state.timezone,
|
||||
"is_authenticated": _is_authenticated_proxy_request(request),
|
||||
"default_theme": app.state.web_theme,
|
||||
"locale": app.state.web_locale,
|
||||
"datetime_locale": app.state.web_datetime_locale,
|
||||
"auto_refresh_seconds": app.state.auto_refresh_seconds,
|
||||
"channel_labels": app.state.channel_labels,
|
||||
"logo_invert_light": app.state.logo_invert_light,
|
||||
}
|
||||
|
||||
# Escape "</script>" sequences to prevent XSS breakout from the
|
||||
# <script> block where this JSON is embedded via |safe in the
|
||||
# Jinja2 template. "<\/" is valid JSON per the spec and parsed
|
||||
# correctly by JavaScript's JSON.parse().
|
||||
return json.dumps(config).replace("</", "<\\/")
|
||||
|
||||
|
||||
def create_app(
|
||||
api_url: str | None = None,
|
||||
api_key: str | None = None,
|
||||
admin_enabled: bool | None = None,
|
||||
network_name: str | None = None,
|
||||
network_city: str | None = None,
|
||||
network_country: str | None = None,
|
||||
@@ -57,7 +198,9 @@ def create_app(
|
||||
network_contact_email: str | None = None,
|
||||
network_contact_discord: str | None = None,
|
||||
network_contact_github: str | None = None,
|
||||
network_contact_youtube: str | None = None,
|
||||
network_welcome_text: str | None = None,
|
||||
features: dict[str, bool] | None = None,
|
||||
) -> FastAPI:
|
||||
"""Create and configure the web dashboard application.
|
||||
|
||||
@@ -67,6 +210,7 @@ def create_app(
|
||||
Args:
|
||||
api_url: Base URL of the MeshCore Hub API
|
||||
api_key: API key for authentication
|
||||
admin_enabled: Enable admin interface at /a/
|
||||
network_name: Display name for the network
|
||||
network_city: City where the network is located
|
||||
network_country: Country where the network is located
|
||||
@@ -74,7 +218,9 @@ def create_app(
|
||||
network_contact_email: Contact email address
|
||||
network_contact_discord: Discord invite/server info
|
||||
network_contact_github: GitHub repository URL
|
||||
network_contact_youtube: YouTube channel URL
|
||||
network_welcome_text: Welcome text for homepage
|
||||
features: Feature flags dict (default: all enabled from settings)
|
||||
|
||||
Returns:
|
||||
Configured FastAPI application
|
||||
@@ -93,9 +239,45 @@ def create_app(
|
||||
redoc_url=None,
|
||||
)
|
||||
|
||||
# Trust proxy headers (X-Forwarded-Proto, X-Forwarded-For) for HTTPS detection
|
||||
trusted_hosts_raw = settings.web_trusted_proxy_hosts
|
||||
if trusted_hosts_raw == "*":
|
||||
trusted_hosts: str | list[str] = "*"
|
||||
else:
|
||||
trusted_hosts = [h.strip() for h in trusted_hosts_raw.split(",") if h.strip()]
|
||||
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts=trusted_hosts)
|
||||
|
||||
# Compute effective admin flag (parameter overrides setting)
|
||||
effective_admin = (
|
||||
admin_enabled if admin_enabled is not None else settings.web_admin_enabled
|
||||
)
|
||||
|
||||
# Warn when admin is enabled but proxy trust is wide open
|
||||
if effective_admin and settings.web_trusted_proxy_hosts == "*":
|
||||
logger.warning(
|
||||
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*' (trust all). "
|
||||
"Consider restricting to your reverse proxy IP for production deployments."
|
||||
)
|
||||
|
||||
# Add cache control headers based on resource type
|
||||
app.add_middleware(CacheControlMiddleware)
|
||||
|
||||
# Load i18n translations
|
||||
app.state.web_locale = settings.web_locale or "en"
|
||||
app.state.web_datetime_locale = settings.web_datetime_locale or "en-US"
|
||||
load_locale(app.state.web_locale)
|
||||
|
||||
# Auto-refresh interval
|
||||
app.state.auto_refresh_seconds = settings.web_auto_refresh_seconds
|
||||
app.state.channel_labels = _build_channel_labels()
|
||||
|
||||
# Store configuration in app state (use args if provided, else settings)
|
||||
app.state.web_theme = (
|
||||
settings.web_theme if settings.web_theme in ("dark", "light") else "dark"
|
||||
)
|
||||
app.state.api_url = api_url or settings.api_base_url
|
||||
app.state.api_key = api_key or settings.api_key
|
||||
app.state.admin_enabled = effective_admin
|
||||
app.state.network_name = network_name or settings.network_name
|
||||
app.state.network_city = network_city or settings.network_city
|
||||
app.state.network_country = network_country or settings.network_country
|
||||
@@ -111,24 +293,309 @@ def create_app(
|
||||
app.state.network_contact_github = (
|
||||
network_contact_github or settings.network_contact_github
|
||||
)
|
||||
app.state.network_contact_youtube = (
|
||||
network_contact_youtube or settings.network_contact_youtube
|
||||
)
|
||||
app.state.network_welcome_text = (
|
||||
network_welcome_text or settings.network_welcome_text
|
||||
)
|
||||
|
||||
# Set up templates
|
||||
# Store feature flags with automatic dependencies:
|
||||
# - Dashboard requires at least one of nodes/advertisements/messages
|
||||
# - Map requires nodes (map displays node locations)
|
||||
effective_features = features if features is not None else settings.features
|
||||
overrides: dict[str, bool] = {}
|
||||
has_dashboard_content = (
|
||||
effective_features.get("nodes", True)
|
||||
or effective_features.get("advertisements", True)
|
||||
or effective_features.get("messages", True)
|
||||
)
|
||||
if not has_dashboard_content:
|
||||
overrides["dashboard"] = False
|
||||
if not effective_features.get("nodes", True):
|
||||
overrides["map"] = False
|
||||
if overrides:
|
||||
effective_features = {**effective_features, **overrides}
|
||||
app.state.features = effective_features
|
||||
|
||||
# Set up templates (for SPA shell only)
|
||||
templates = Jinja2Templates(directory=str(TEMPLATES_DIR))
|
||||
templates.env.trim_blocks = True
|
||||
templates.env.lstrip_blocks = True
|
||||
templates.env.globals["t"] = t
|
||||
app.state.templates = templates
|
||||
|
||||
# Compute timezone
|
||||
app.state.timezone = settings.tz
|
||||
try:
|
||||
tz = ZoneInfo(settings.tz)
|
||||
app.state.timezone_abbr = datetime.now(tz).strftime("%Z")
|
||||
except Exception:
|
||||
app.state.timezone_abbr = "UTC"
|
||||
|
||||
# Initialize page loader for custom markdown pages
|
||||
page_loader = PageLoader(settings.effective_pages_home)
|
||||
page_loader.load_pages()
|
||||
app.state.page_loader = page_loader
|
||||
|
||||
# Check for custom logo and store media path
|
||||
media_home = Path(settings.effective_media_home)
|
||||
logo_url, logo_invert_light, logo_path = _resolve_logo(media_home)
|
||||
app.state.logo_url = logo_url
|
||||
app.state.logo_invert_light = logo_invert_light
|
||||
if logo_path is not None:
|
||||
logger.info("Using custom logo from %s", logo_path)
|
||||
|
||||
# Mount static files
|
||||
if STATIC_DIR.exists():
|
||||
app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static")
|
||||
|
||||
# Include routers
|
||||
from meshcore_hub.web.routes import web_router
|
||||
# Mount custom media files if directory exists
|
||||
if media_home.exists() and media_home.is_dir():
|
||||
app.mount("/media", StaticFiles(directory=str(media_home)), name="media")
|
||||
|
||||
app.include_router(web_router)
|
||||
# --- API Proxy ---
|
||||
@app.api_route(
|
||||
"/api/{path:path}",
|
||||
methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
|
||||
tags=["API Proxy"],
|
||||
)
|
||||
async def api_proxy(request: Request, path: str) -> Response:
|
||||
"""Proxy API requests to the backend API server."""
|
||||
client: httpx.AsyncClient = request.app.state.http_client
|
||||
url = f"/api/{path}"
|
||||
|
||||
# Health check endpoint
|
||||
# Forward query parameters
|
||||
params = dict(request.query_params)
|
||||
|
||||
# Forward body for write methods
|
||||
body = None
|
||||
if request.method in ("POST", "PUT", "PATCH"):
|
||||
body = await request.body()
|
||||
|
||||
# Forward content-type header
|
||||
headers: dict[str, str] = {}
|
||||
if "content-type" in request.headers:
|
||||
headers["content-type"] = request.headers["content-type"]
|
||||
|
||||
# Forward auth proxy headers for admin operations
|
||||
for h in ("x-forwarded-user", "x-forwarded-email", "x-forwarded-groups"):
|
||||
if h in request.headers:
|
||||
headers[h] = request.headers[h]
|
||||
|
||||
# Block mutating requests from unauthenticated users when admin is
|
||||
# enabled. OAuth2Proxy is expected to set X-Forwarded-User for
|
||||
# authenticated sessions; without it, write operations must be
|
||||
# rejected server-side to prevent auth bypass.
|
||||
if (
|
||||
request.method in ("POST", "PUT", "DELETE", "PATCH")
|
||||
and request.app.state.admin_enabled
|
||||
and not _is_authenticated_proxy_request(request)
|
||||
):
|
||||
return JSONResponse(
|
||||
{"detail": "Authentication required"},
|
||||
status_code=401,
|
||||
)
|
||||
|
||||
try:
|
||||
response = await client.request(
|
||||
method=request.method,
|
||||
url=url,
|
||||
params=params,
|
||||
content=body,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
# Filter response headers (remove hop-by-hop headers)
|
||||
resp_headers: dict[str, str] = {}
|
||||
for k, v in response.headers.items():
|
||||
if k.lower() not in (
|
||||
"transfer-encoding",
|
||||
"connection",
|
||||
"keep-alive",
|
||||
"content-encoding",
|
||||
):
|
||||
resp_headers[k] = v
|
||||
|
||||
return Response(
|
||||
content=response.content,
|
||||
status_code=response.status_code,
|
||||
headers=resp_headers,
|
||||
)
|
||||
except httpx.ConnectError:
|
||||
return JSONResponse(
|
||||
{"detail": "API server unavailable"},
|
||||
status_code=502,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"API proxy error: {e}")
|
||||
return JSONResponse(
|
||||
{"detail": "API proxy error"},
|
||||
status_code=502,
|
||||
)
|
||||
|
||||
# --- Map Data Endpoint (server-side aggregation) ---
|
||||
@app.get("/map/data", tags=["Map"])
|
||||
async def map_data(request: Request) -> JSONResponse:
|
||||
"""Return node location data as JSON for the map."""
|
||||
if not request.app.state.features.get("map", True):
|
||||
return JSONResponse({"detail": "Map feature is disabled"}, status_code=404)
|
||||
nodes_with_location: list[dict[str, Any]] = []
|
||||
members_list: list[dict[str, Any]] = []
|
||||
members_by_id: dict[str, dict[str, Any]] = {}
|
||||
error: str | None = None
|
||||
total_nodes = 0
|
||||
nodes_with_coords = 0
|
||||
|
||||
try:
|
||||
# Fetch all members to build lookup by member_id
|
||||
members_response = await request.app.state.http_client.get(
|
||||
"/api/v1/members", params={"limit": 500}
|
||||
)
|
||||
if members_response.status_code == 200:
|
||||
members_data = members_response.json()
|
||||
for member in members_data.get("items", []):
|
||||
member_info = {
|
||||
"member_id": member.get("member_id"),
|
||||
"name": member.get("name"),
|
||||
"callsign": member.get("callsign"),
|
||||
}
|
||||
members_list.append(member_info)
|
||||
if member.get("member_id"):
|
||||
members_by_id[member["member_id"]] = member_info
|
||||
|
||||
# Fetch all nodes from API
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/nodes", params={"limit": 500}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
nodes = data.get("items", [])
|
||||
total_nodes = len(nodes)
|
||||
|
||||
for node in nodes:
|
||||
tags = node.get("tags", [])
|
||||
tag_lat = None
|
||||
tag_lon = None
|
||||
friendly_name = None
|
||||
role = None
|
||||
node_member_id = None
|
||||
|
||||
for tag in tags:
|
||||
key = tag.get("key")
|
||||
if key == "lat":
|
||||
try:
|
||||
tag_lat = float(tag.get("value"))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif key == "lon":
|
||||
try:
|
||||
tag_lon = float(tag.get("value"))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif key == "friendly_name":
|
||||
friendly_name = tag.get("value")
|
||||
elif key == "role":
|
||||
role = tag.get("value")
|
||||
elif key == "member_id":
|
||||
node_member_id = tag.get("value")
|
||||
|
||||
lat = tag_lat if tag_lat is not None else node.get("lat")
|
||||
lon = tag_lon if tag_lon is not None else node.get("lon")
|
||||
|
||||
if lat is None or lon is None:
|
||||
continue
|
||||
if lat == 0.0 and lon == 0.0:
|
||||
continue
|
||||
|
||||
nodes_with_coords += 1
|
||||
display_name = (
|
||||
friendly_name
|
||||
or node.get("name")
|
||||
or node.get("public_key", "")[:12]
|
||||
)
|
||||
public_key = node.get("public_key")
|
||||
owner = (
|
||||
members_by_id.get(node_member_id) if node_member_id else None
|
||||
)
|
||||
|
||||
nodes_with_location.append(
|
||||
{
|
||||
"public_key": public_key,
|
||||
"name": display_name,
|
||||
"adv_type": node.get("adv_type"),
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"last_seen": node.get("last_seen"),
|
||||
"role": role,
|
||||
"is_infra": role == "infra",
|
||||
"member_id": node_member_id,
|
||||
"owner": owner,
|
||||
}
|
||||
)
|
||||
else:
|
||||
error = f"API returned status {response.status_code}"
|
||||
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
logger.warning(f"Failed to fetch nodes for map: {e}")
|
||||
|
||||
infra_nodes = [n for n in nodes_with_location if n.get("is_infra")]
|
||||
infra_count = len(infra_nodes)
|
||||
|
||||
center_lat = 0.0
|
||||
center_lon = 0.0
|
||||
if nodes_with_location:
|
||||
center_lat = sum(n["lat"] for n in nodes_with_location) / len(
|
||||
nodes_with_location
|
||||
)
|
||||
center_lon = sum(n["lon"] for n in nodes_with_location) / len(
|
||||
nodes_with_location
|
||||
)
|
||||
|
||||
infra_center: dict[str, float] | None = None
|
||||
if infra_nodes:
|
||||
infra_center = {
|
||||
"lat": sum(n["lat"] for n in infra_nodes) / len(infra_nodes),
|
||||
"lon": sum(n["lon"] for n in infra_nodes) / len(infra_nodes),
|
||||
}
|
||||
|
||||
return JSONResponse(
|
||||
{
|
||||
"nodes": nodes_with_location,
|
||||
"members": members_list,
|
||||
"center": {"lat": center_lat, "lon": center_lon},
|
||||
"infra_center": infra_center,
|
||||
"debug": {
|
||||
"total_nodes": total_nodes,
|
||||
"nodes_with_coords": nodes_with_coords,
|
||||
"infra_nodes": infra_count,
|
||||
"error": error,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
# --- Custom Pages API ---
|
||||
@app.get("/spa/pages/{slug}", tags=["SPA"])
|
||||
async def get_custom_page(request: Request, slug: str) -> JSONResponse:
|
||||
"""Get a custom page by slug."""
|
||||
if not request.app.state.features.get("pages", True):
|
||||
return JSONResponse(
|
||||
{"detail": "Pages feature is disabled"}, status_code=404
|
||||
)
|
||||
page_loader = request.app.state.page_loader
|
||||
page = page_loader.get_page(slug)
|
||||
if not page:
|
||||
return JSONResponse({"detail": "Page not found"}, status_code=404)
|
||||
return JSONResponse(
|
||||
{
|
||||
"slug": page.slug,
|
||||
"title": page.title,
|
||||
"content_html": page.content_html,
|
||||
}
|
||||
)
|
||||
|
||||
# --- Health Endpoints ---
|
||||
@app.get("/health", tags=["Health"])
|
||||
async def health() -> dict:
|
||||
"""Basic health check."""
|
||||
@@ -145,30 +612,135 @@ def create_app(
|
||||
except Exception as e:
|
||||
return {"status": "not_ready", "api": str(e)}
|
||||
|
||||
# --- SEO Endpoints ---
|
||||
def _get_https_base_url(request: Request) -> str:
|
||||
"""Get base URL, ensuring HTTPS is used for public-facing URLs."""
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
if base_url.startswith("http://"):
|
||||
base_url = "https://" + base_url[7:]
|
||||
return base_url
|
||||
|
||||
@app.get("/robots.txt", response_class=PlainTextResponse)
|
||||
async def robots_txt(request: Request) -> str:
|
||||
"""Serve robots.txt."""
|
||||
base_url = _get_https_base_url(request)
|
||||
features = request.app.state.features
|
||||
|
||||
# Always disallow message and node detail pages
|
||||
disallow_lines = [
|
||||
"Disallow: /messages",
|
||||
"Disallow: /nodes/",
|
||||
]
|
||||
|
||||
# Add disallow for disabled features
|
||||
feature_paths = {
|
||||
"dashboard": "/dashboard",
|
||||
"nodes": "/nodes",
|
||||
"advertisements": "/advertisements",
|
||||
"map": "/map",
|
||||
"members": "/members",
|
||||
"pages": "/pages",
|
||||
}
|
||||
for feature, path in feature_paths.items():
|
||||
if not features.get(feature, True):
|
||||
line = f"Disallow: {path}"
|
||||
if line not in disallow_lines:
|
||||
disallow_lines.append(line)
|
||||
|
||||
disallow_block = "\n".join(disallow_lines)
|
||||
return (
|
||||
f"User-agent: *\n"
|
||||
f"{disallow_block}\n"
|
||||
f"\n"
|
||||
f"Sitemap: {base_url}/sitemap.xml\n"
|
||||
)
|
||||
|
||||
@app.get("/sitemap.xml")
|
||||
async def sitemap_xml(request: Request) -> Response:
|
||||
"""Generate dynamic sitemap."""
|
||||
base_url = _get_https_base_url(request)
|
||||
features = request.app.state.features
|
||||
|
||||
# Home is always included; other pages depend on feature flags
|
||||
all_static_pages = [
|
||||
("", "daily", "1.0", None),
|
||||
("/dashboard", "hourly", "0.9", "dashboard"),
|
||||
("/nodes", "hourly", "0.9", "nodes"),
|
||||
("/advertisements", "hourly", "0.8", "advertisements"),
|
||||
("/map", "daily", "0.7", "map"),
|
||||
("/members", "weekly", "0.6", "members"),
|
||||
]
|
||||
|
||||
static_pages = [
|
||||
(path, freq, prio)
|
||||
for path, freq, prio, feature in all_static_pages
|
||||
if feature is None or features.get(feature, True)
|
||||
]
|
||||
|
||||
urls = []
|
||||
for path, changefreq, priority in static_pages:
|
||||
urls.append(
|
||||
f" <url>\n"
|
||||
f" <loc>{base_url}{path}</loc>\n"
|
||||
f" <changefreq>{changefreq}</changefreq>\n"
|
||||
f" <priority>{priority}</priority>\n"
|
||||
f" </url>"
|
||||
)
|
||||
|
||||
if features.get("pages", True):
|
||||
page_loader = request.app.state.page_loader
|
||||
for page in page_loader.get_menu_pages():
|
||||
urls.append(
|
||||
f" <url>\n"
|
||||
f" <loc>{base_url}{page.url}</loc>\n"
|
||||
f" <changefreq>weekly</changefreq>\n"
|
||||
f" <priority>0.6</priority>\n"
|
||||
f" </url>"
|
||||
)
|
||||
|
||||
xml = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n'
|
||||
+ "\n".join(urls)
|
||||
+ "\n</urlset>"
|
||||
)
|
||||
|
||||
return Response(content=xml, media_type="application/xml")
|
||||
|
||||
# --- SPA Catch-All (MUST be last) ---
|
||||
@app.api_route("/{path:path}", methods=["GET"], tags=["SPA"])
|
||||
async def spa_catchall(request: Request, path: str = "") -> HTMLResponse:
|
||||
"""Serve the SPA shell for all non-API routes."""
|
||||
templates_inst: Jinja2Templates = request.app.state.templates
|
||||
features = request.app.state.features
|
||||
page_loader = request.app.state.page_loader
|
||||
custom_pages = (
|
||||
page_loader.get_menu_pages() if features.get("pages", True) else []
|
||||
)
|
||||
|
||||
config_json = _build_config_json(request.app, request)
|
||||
|
||||
return templates_inst.TemplateResponse(
|
||||
"spa.html",
|
||||
{
|
||||
"request": request,
|
||||
"network_name": request.app.state.network_name,
|
||||
"network_city": request.app.state.network_city,
|
||||
"network_country": request.app.state.network_country,
|
||||
"network_contact_email": request.app.state.network_contact_email,
|
||||
"network_contact_discord": request.app.state.network_contact_discord,
|
||||
"network_contact_github": request.app.state.network_contact_github,
|
||||
"network_contact_youtube": request.app.state.network_contact_youtube,
|
||||
"network_welcome_text": request.app.state.network_welcome_text,
|
||||
"admin_enabled": request.app.state.admin_enabled,
|
||||
"features": features,
|
||||
"custom_pages": custom_pages,
|
||||
"logo_url": request.app.state.logo_url,
|
||||
"logo_invert_light": request.app.state.logo_invert_light,
|
||||
"version": __version__,
|
||||
"default_theme": request.app.state.web_theme,
|
||||
"config_json": config_json,
|
||||
},
|
||||
)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def get_templates(request: Request) -> Jinja2Templates:
|
||||
"""Get templates from app state."""
|
||||
templates: Jinja2Templates = request.app.state.templates
|
||||
return templates
|
||||
|
||||
|
||||
def get_network_context(request: Request) -> dict:
|
||||
"""Get network configuration context for templates."""
|
||||
# Parse radio config from comma-delimited string
|
||||
radio_config = RadioConfig.from_config_string(
|
||||
request.app.state.network_radio_config
|
||||
)
|
||||
|
||||
return {
|
||||
"network_name": request.app.state.network_name,
|
||||
"network_city": request.app.state.network_city,
|
||||
"network_country": request.app.state.network_country,
|
||||
"network_radio_config": radio_config,
|
||||
"network_contact_email": request.app.state.network_contact_email,
|
||||
"network_contact_discord": request.app.state.network_contact_discord,
|
||||
"network_contact_github": request.app.state.network_contact_github,
|
||||
"network_welcome_text": request.app.state.network_welcome_text,
|
||||
"version": __version__,
|
||||
}
|
||||
|
||||
@@ -88,6 +88,13 @@ import click
|
||||
envvar="NETWORK_CONTACT_GITHUB",
|
||||
help="GitHub repository URL",
|
||||
)
|
||||
@click.option(
|
||||
"--network-contact-youtube",
|
||||
type=str,
|
||||
default=None,
|
||||
envvar="NETWORK_CONTACT_YOUTUBE",
|
||||
help="YouTube channel URL",
|
||||
)
|
||||
@click.option(
|
||||
"--network-welcome-text",
|
||||
type=str,
|
||||
@@ -116,6 +123,7 @@ def web(
|
||||
network_contact_email: str | None,
|
||||
network_contact_discord: str | None,
|
||||
network_contact_github: str | None,
|
||||
network_contact_youtube: str | None,
|
||||
network_welcome_text: str | None,
|
||||
reload: bool,
|
||||
) -> None:
|
||||
@@ -175,6 +183,11 @@ def web(
|
||||
if effective_city and effective_country:
|
||||
click.echo(f"Location: {effective_city}, {effective_country}")
|
||||
click.echo(f"Reload mode: {reload}")
|
||||
disabled_features = [
|
||||
name for name, enabled in settings.features.items() if not enabled
|
||||
]
|
||||
if disabled_features:
|
||||
click.echo(f"Disabled features: {', '.join(disabled_features)}")
|
||||
click.echo("=" * 50)
|
||||
|
||||
if reload:
|
||||
@@ -201,6 +214,7 @@ def web(
|
||||
network_contact_email=network_contact_email,
|
||||
network_contact_discord=network_contact_discord,
|
||||
network_contact_github=network_contact_github,
|
||||
network_contact_youtube=network_contact_youtube,
|
||||
network_welcome_text=network_welcome_text,
|
||||
)
|
||||
|
||||
|
||||
85
src/meshcore_hub/web/middleware.py
Normal file
85
src/meshcore_hub/web/middleware.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""HTTP caching middleware for the web component."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
from starlette.types import ASGIApp
|
||||
|
||||
|
||||
class CacheControlMiddleware(BaseHTTPMiddleware):
|
||||
"""Middleware to set appropriate Cache-Control headers based on resource type."""
|
||||
|
||||
def __init__(self, app: ASGIApp) -> None:
|
||||
"""Initialize the middleware.
|
||||
|
||||
Args:
|
||||
app: The ASGI application to wrap.
|
||||
"""
|
||||
super().__init__(app)
|
||||
|
||||
async def dispatch(
|
||||
self,
|
||||
request: Request,
|
||||
call_next: Callable[[Request], Awaitable[Response]],
|
||||
) -> Response:
|
||||
"""Process the request and add appropriate caching headers.
|
||||
|
||||
Args:
|
||||
request: The incoming HTTP request.
|
||||
call_next: The next middleware or route handler.
|
||||
|
||||
Returns:
|
||||
The response with cache headers added.
|
||||
"""
|
||||
response: Response = await call_next(request)
|
||||
|
||||
# Skip if Cache-Control already set (explicit override)
|
||||
if "cache-control" in response.headers:
|
||||
return response
|
||||
|
||||
path = request.url.path
|
||||
query_params = request.url.query
|
||||
|
||||
# Health endpoints - never cache
|
||||
if path.startswith("/health"):
|
||||
response.headers["cache-control"] = "no-cache, no-store, must-revalidate"
|
||||
|
||||
# Static files with version parameter - long-term cache
|
||||
elif path.startswith("/static/") and "v=" in query_params:
|
||||
response.headers["cache-control"] = "public, max-age=31536000, immutable"
|
||||
|
||||
# Static files without version - short cache as fallback
|
||||
elif path.startswith("/static/"):
|
||||
response.headers["cache-control"] = "public, max-age=3600"
|
||||
|
||||
# Media files with version parameter - long-term cache
|
||||
elif path.startswith("/media/") and "v=" in query_params:
|
||||
response.headers["cache-control"] = "public, max-age=31536000, immutable"
|
||||
|
||||
# Media files without version - short cache (user may update)
|
||||
elif path.startswith("/media/"):
|
||||
response.headers["cache-control"] = "public, max-age=3600"
|
||||
|
||||
# Map data - short cache (5 minutes)
|
||||
elif path == "/map/data":
|
||||
response.headers["cache-control"] = "public, max-age=300"
|
||||
|
||||
# Custom pages - moderate cache (1 hour)
|
||||
elif path.startswith("/spa/pages/"):
|
||||
response.headers["cache-control"] = "public, max-age=3600"
|
||||
|
||||
# SEO files - moderate cache (1 hour)
|
||||
elif path in ("/robots.txt", "/sitemap.xml"):
|
||||
response.headers["cache-control"] = "public, max-age=3600"
|
||||
|
||||
# API proxy - don't add headers (pass through backend)
|
||||
elif path.startswith("/api/"):
|
||||
pass
|
||||
|
||||
# SPA shell HTML (catch-all for client-side routes) - no cache
|
||||
elif response.headers.get("content-type", "").startswith("text/html"):
|
||||
response.headers["cache-control"] = "no-cache, public"
|
||||
|
||||
return response
|
||||
119
src/meshcore_hub/web/pages.py
Normal file
119
src/meshcore_hub/web/pages.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Custom markdown pages loader for MeshCore Hub Web Dashboard."""
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import frontmatter
|
||||
import markdown
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CustomPage:
|
||||
"""Represents a custom markdown page."""
|
||||
|
||||
slug: str
|
||||
title: str
|
||||
menu_order: int
|
||||
content_html: str
|
||||
file_path: str
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""Get the URL path for this page."""
|
||||
return f"/pages/{self.slug}"
|
||||
|
||||
|
||||
class PageLoader:
|
||||
"""Loads and manages custom markdown pages from a directory."""
|
||||
|
||||
def __init__(self, pages_dir: str) -> None:
|
||||
"""Initialize the page loader.
|
||||
|
||||
Args:
|
||||
pages_dir: Path to the directory containing markdown pages.
|
||||
"""
|
||||
self.pages_dir = Path(pages_dir)
|
||||
self._pages: dict[str, CustomPage] = {}
|
||||
self._md = markdown.Markdown(
|
||||
extensions=["tables", "fenced_code", "toc"],
|
||||
output_format="html",
|
||||
)
|
||||
|
||||
def load_pages(self) -> None:
|
||||
"""Load all markdown pages from the pages directory."""
|
||||
self._pages.clear()
|
||||
|
||||
if not self.pages_dir.exists():
|
||||
logger.debug(f"Pages directory does not exist: {self.pages_dir}")
|
||||
return
|
||||
|
||||
if not self.pages_dir.is_dir():
|
||||
logger.warning(f"Pages path is not a directory: {self.pages_dir}")
|
||||
return
|
||||
|
||||
for md_file in self.pages_dir.glob("*.md"):
|
||||
try:
|
||||
page = self._load_page(md_file)
|
||||
if page:
|
||||
self._pages[page.slug] = page
|
||||
logger.info(f"Loaded custom page: {page.slug} ({md_file.name})")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load page {md_file}: {e}")
|
||||
|
||||
logger.info(f"Loaded {len(self._pages)} custom page(s)")
|
||||
|
||||
def _load_page(self, file_path: Path) -> Optional[CustomPage]:
|
||||
"""Load a single markdown page.
|
||||
|
||||
Args:
|
||||
file_path: Path to the markdown file.
|
||||
|
||||
Returns:
|
||||
CustomPage instance or None if loading failed.
|
||||
"""
|
||||
content = file_path.read_text(encoding="utf-8")
|
||||
post = frontmatter.loads(content)
|
||||
|
||||
# Extract frontmatter fields
|
||||
slug = post.get("slug", file_path.stem)
|
||||
title = post.get("title", slug.replace("-", " ").replace("_", " ").title())
|
||||
menu_order = post.get("menu_order", 100)
|
||||
|
||||
# Convert markdown to HTML
|
||||
self._md.reset()
|
||||
content_html = self._md.convert(post.content)
|
||||
|
||||
return CustomPage(
|
||||
slug=slug,
|
||||
title=title,
|
||||
menu_order=menu_order,
|
||||
content_html=content_html,
|
||||
file_path=str(file_path),
|
||||
)
|
||||
|
||||
def get_page(self, slug: str) -> Optional[CustomPage]:
|
||||
"""Get a page by its slug.
|
||||
|
||||
Args:
|
||||
slug: The page slug.
|
||||
|
||||
Returns:
|
||||
CustomPage instance or None if not found.
|
||||
"""
|
||||
return self._pages.get(slug)
|
||||
|
||||
def get_menu_pages(self) -> list[CustomPage]:
|
||||
"""Get all pages sorted by menu_order for navigation.
|
||||
|
||||
Returns:
|
||||
List of CustomPage instances sorted by menu_order.
|
||||
"""
|
||||
return sorted(self._pages.values(), key=lambda p: (p.menu_order, p.title))
|
||||
|
||||
def reload(self) -> None:
|
||||
"""Reload all pages from disk."""
|
||||
self.load_pages()
|
||||
@@ -1,25 +0,0 @@
|
||||
"""Web routes for MeshCore Hub Dashboard."""
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from meshcore_hub.web.routes.home import router as home_router
|
||||
from meshcore_hub.web.routes.network import router as network_router
|
||||
from meshcore_hub.web.routes.nodes import router as nodes_router
|
||||
from meshcore_hub.web.routes.messages import router as messages_router
|
||||
from meshcore_hub.web.routes.advertisements import router as advertisements_router
|
||||
from meshcore_hub.web.routes.map import router as map_router
|
||||
from meshcore_hub.web.routes.members import router as members_router
|
||||
|
||||
# Create main web router
|
||||
web_router = APIRouter()
|
||||
|
||||
# Include all sub-routers
|
||||
web_router.include_router(home_router)
|
||||
web_router.include_router(network_router)
|
||||
web_router.include_router(nodes_router)
|
||||
web_router.include_router(messages_router)
|
||||
web_router.include_router(advertisements_router)
|
||||
web_router.include_router(map_router)
|
||||
web_router.include_router(members_router)
|
||||
|
||||
__all__ = ["web_router"]
|
||||
@@ -1,64 +0,0 @@
|
||||
"""Advertisements page route."""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Query, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/advertisements", response_class=HTMLResponse)
|
||||
async def advertisements_list(
|
||||
request: Request,
|
||||
search: str | None = Query(None, description="Search term"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
limit: int = Query(50, ge=1, le=100, description="Items per page"),
|
||||
) -> HTMLResponse:
|
||||
"""Render the advertisements list page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# Build query params
|
||||
params: dict[str, int | str] = {"limit": limit, "offset": offset}
|
||||
if search:
|
||||
params["search"] = search
|
||||
|
||||
# Fetch advertisements from API
|
||||
advertisements = []
|
||||
total = 0
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/advertisements", params=params
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
advertisements = data.get("items", [])
|
||||
total = data.get("total", 0)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch advertisements from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
# Calculate pagination
|
||||
total_pages = (total + limit - 1) // limit if total > 0 else 1
|
||||
|
||||
context.update(
|
||||
{
|
||||
"advertisements": advertisements,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total_pages": total_pages,
|
||||
"search": search or "",
|
||||
}
|
||||
)
|
||||
|
||||
return templates.TemplateResponse("advertisements.html", context)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""Home page route."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def home(request: Request) -> HTMLResponse:
|
||||
"""Render the home page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
# Fetch stats from API
|
||||
stats = {
|
||||
"total_nodes": 0,
|
||||
"active_nodes": 0,
|
||||
"total_messages": 0,
|
||||
"messages_today": 0,
|
||||
"total_advertisements": 0,
|
||||
"advertisements_24h": 0,
|
||||
}
|
||||
|
||||
# Fetch activity data for charts
|
||||
advert_activity = {"days": 7, "data": []}
|
||||
message_activity = {"days": 7, "data": []}
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get("/api/v1/dashboard/stats")
|
||||
if response.status_code == 200:
|
||||
stats = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch stats from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/dashboard/activity", params={"days": 7}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
advert_activity = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch activity from API: {e}")
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/dashboard/message-activity", params={"days": 7}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
message_activity = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch message activity from API: {e}")
|
||||
|
||||
context["stats"] = stats
|
||||
# Pass activity data as JSON strings for the chart
|
||||
context["advert_activity_json"] = json.dumps(advert_activity)
|
||||
context["message_activity_json"] = json.dumps(message_activity)
|
||||
|
||||
return templates.TemplateResponse("home.html", context)
|
||||
@@ -1,157 +0,0 @@
|
||||
"""Map page route."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse, JSONResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/map", response_class=HTMLResponse)
|
||||
async def map_page(request: Request) -> HTMLResponse:
|
||||
"""Render the map page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
return templates.TemplateResponse("map.html", context)
|
||||
|
||||
|
||||
@router.get("/map/data")
|
||||
async def map_data(request: Request) -> JSONResponse:
|
||||
"""Return node location data as JSON for the map.
|
||||
|
||||
Includes role tag, member ownership info, and all data needed for filtering.
|
||||
"""
|
||||
nodes_with_location: list[dict[str, Any]] = []
|
||||
members_list: list[dict[str, Any]] = []
|
||||
members_by_key: dict[str, dict[str, Any]] = {}
|
||||
error: str | None = None
|
||||
total_nodes = 0
|
||||
nodes_with_coords = 0
|
||||
|
||||
try:
|
||||
# Fetch all members to build lookup by public_key
|
||||
members_response = await request.app.state.http_client.get(
|
||||
"/api/v1/members", params={"limit": 500}
|
||||
)
|
||||
if members_response.status_code == 200:
|
||||
members_data = members_response.json()
|
||||
for member in members_data.get("items", []):
|
||||
# Only include members with public_key (required for node ownership)
|
||||
if member.get("public_key"):
|
||||
member_info = {
|
||||
"public_key": member.get("public_key"),
|
||||
"name": member.get("name"),
|
||||
"callsign": member.get("callsign"),
|
||||
}
|
||||
members_list.append(member_info)
|
||||
members_by_key[member["public_key"]] = member_info
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to fetch members: status {members_response.status_code}"
|
||||
)
|
||||
|
||||
# Fetch all nodes from API
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/nodes", params={"limit": 500}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
nodes = data.get("items", [])
|
||||
total_nodes = len(nodes)
|
||||
|
||||
# Filter nodes with location tags
|
||||
for node in nodes:
|
||||
tags = node.get("tags", [])
|
||||
lat = None
|
||||
lon = None
|
||||
friendly_name = None
|
||||
role = None
|
||||
|
||||
for tag in tags:
|
||||
key = tag.get("key")
|
||||
if key == "lat":
|
||||
try:
|
||||
lat = float(tag.get("value"))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif key == "lon":
|
||||
try:
|
||||
lon = float(tag.get("value"))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif key == "friendly_name":
|
||||
friendly_name = tag.get("value")
|
||||
elif key == "role":
|
||||
role = tag.get("value")
|
||||
|
||||
if lat is not None and lon is not None:
|
||||
nodes_with_coords += 1
|
||||
# Use friendly_name, then node name, then public key prefix
|
||||
display_name = (
|
||||
friendly_name
|
||||
or node.get("name")
|
||||
or node.get("public_key", "")[:12]
|
||||
)
|
||||
public_key = node.get("public_key")
|
||||
|
||||
# Find owner member if exists
|
||||
owner = members_by_key.get(public_key)
|
||||
|
||||
nodes_with_location.append(
|
||||
{
|
||||
"public_key": public_key,
|
||||
"name": display_name,
|
||||
"adv_type": node.get("adv_type"),
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"last_seen": node.get("last_seen"),
|
||||
"role": role,
|
||||
"is_infra": role == "infra",
|
||||
"owner": owner,
|
||||
}
|
||||
)
|
||||
else:
|
||||
error = f"API returned status {response.status_code}"
|
||||
logger.warning(f"Failed to fetch nodes: {error}")
|
||||
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
logger.warning(f"Failed to fetch nodes for map: {e}")
|
||||
|
||||
logger.info(
|
||||
f"Map data: {total_nodes} total nodes, " f"{nodes_with_coords} with coordinates"
|
||||
)
|
||||
|
||||
# Calculate center from nodes, or use default (0, 0)
|
||||
center_lat = 0.0
|
||||
center_lon = 0.0
|
||||
if nodes_with_location:
|
||||
center_lat = sum(n["lat"] for n in nodes_with_location) / len(
|
||||
nodes_with_location
|
||||
)
|
||||
center_lon = sum(n["lon"] for n in nodes_with_location) / len(
|
||||
nodes_with_location
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
{
|
||||
"nodes": nodes_with_location,
|
||||
"members": members_list,
|
||||
"center": {
|
||||
"lat": center_lat,
|
||||
"lon": center_lon,
|
||||
},
|
||||
"debug": {
|
||||
"total_nodes": total_nodes,
|
||||
"nodes_with_coords": nodes_with_coords,
|
||||
"error": error,
|
||||
},
|
||||
}
|
||||
)
|
||||
@@ -1,98 +0,0 @@
|
||||
"""Members page route."""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/members", response_class=HTMLResponse)
|
||||
async def members_page(request: Request) -> HTMLResponse:
|
||||
"""Render the members page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
# Fetch members from API
|
||||
members = []
|
||||
|
||||
def node_sort_key(node: dict) -> int:
|
||||
"""Sort nodes: repeater first, then chat, then others."""
|
||||
adv_type = (node.get("adv_type") or "").lower()
|
||||
if adv_type == "repeater":
|
||||
return 0
|
||||
if adv_type == "chat":
|
||||
return 1
|
||||
return 2
|
||||
|
||||
try:
|
||||
# Fetch all members
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/members", params={"limit": 100}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
members = data.get("items", [])
|
||||
|
||||
# Fetch all nodes with member_id tags in one query
|
||||
nodes_response = await request.app.state.http_client.get(
|
||||
"/api/v1/nodes", params={"has_tag": "member_id", "limit": 500}
|
||||
)
|
||||
|
||||
# Build a map of member_id -> nodes
|
||||
member_nodes_map: dict[str, list] = {}
|
||||
if nodes_response.status_code == 200:
|
||||
nodes_data = nodes_response.json()
|
||||
all_nodes = nodes_data.get("items", [])
|
||||
|
||||
for node in all_nodes:
|
||||
# Find member_id tag
|
||||
for tag in node.get("tags", []):
|
||||
if tag.get("key") == "member_id":
|
||||
member_id_value = tag.get("value")
|
||||
if member_id_value:
|
||||
if member_id_value not in member_nodes_map:
|
||||
member_nodes_map[member_id_value] = []
|
||||
member_nodes_map[member_id_value].append(node)
|
||||
break
|
||||
|
||||
# Assign nodes to members and sort
|
||||
for member in members:
|
||||
member_id = member.get("member_id")
|
||||
if member_id and member_id in member_nodes_map:
|
||||
# Sort nodes (repeater first, then chat, then by name tag)
|
||||
nodes = member_nodes_map[member_id]
|
||||
|
||||
# Sort by advertisement type first, then by name
|
||||
def full_sort_key(node: dict) -> tuple:
|
||||
adv_type = (node.get("adv_type") or "").lower()
|
||||
type_priority = (
|
||||
0
|
||||
if adv_type == "repeater"
|
||||
else (1 if adv_type == "chat" else 2)
|
||||
)
|
||||
|
||||
# Get name from tags
|
||||
node_name = node.get("name") or ""
|
||||
for tag in node.get("tags", []):
|
||||
if tag.get("key") == "name":
|
||||
node_name = tag.get("value") or node_name
|
||||
break
|
||||
|
||||
return (type_priority, node_name.lower())
|
||||
|
||||
member["nodes"] = sorted(nodes, key=full_sort_key)
|
||||
else:
|
||||
member["nodes"] = []
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch members from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
context["members"] = members
|
||||
|
||||
return templates.TemplateResponse("members.html", context)
|
||||
@@ -1,78 +0,0 @@
|
||||
"""Messages page route."""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Query, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/messages", response_class=HTMLResponse)
|
||||
async def messages_list(
|
||||
request: Request,
|
||||
message_type: str | None = Query(None, description="Filter by message type"),
|
||||
channel_idx: str | None = Query(None, description="Filter by channel"),
|
||||
search: str | None = Query(None, description="Search in message text"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
limit: int = Query(50, ge=1, le=100, description="Items per page"),
|
||||
) -> HTMLResponse:
|
||||
"""Render the messages list page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# Parse channel_idx, treating empty string as None
|
||||
channel_idx_int: int | None = None
|
||||
if channel_idx and channel_idx.strip():
|
||||
try:
|
||||
channel_idx_int = int(channel_idx)
|
||||
except ValueError:
|
||||
logger.warning(f"Invalid channel_idx value: {channel_idx}")
|
||||
|
||||
# Build query params
|
||||
params: dict[str, int | str] = {"limit": limit, "offset": offset}
|
||||
if message_type:
|
||||
params["message_type"] = message_type
|
||||
if channel_idx_int is not None:
|
||||
params["channel_idx"] = channel_idx_int
|
||||
|
||||
# Fetch messages from API
|
||||
messages = []
|
||||
total = 0
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/messages", params=params
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
messages = data.get("items", [])
|
||||
total = data.get("total", 0)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch messages from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
# Calculate pagination
|
||||
total_pages = (total + limit - 1) // limit if total > 0 else 1
|
||||
|
||||
context.update(
|
||||
{
|
||||
"messages": messages,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total_pages": total_pages,
|
||||
"message_type": message_type or "",
|
||||
"channel_idx": channel_idx_int,
|
||||
"search": search or "",
|
||||
}
|
||||
)
|
||||
|
||||
return templates.TemplateResponse("messages.html", context)
|
||||
@@ -1,79 +0,0 @@
|
||||
"""Network overview page route."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/network", response_class=HTMLResponse)
|
||||
async def network_overview(request: Request) -> HTMLResponse:
|
||||
"""Render the network overview page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
# Fetch stats from API
|
||||
stats = {
|
||||
"total_nodes": 0,
|
||||
"active_nodes": 0,
|
||||
"total_messages": 0,
|
||||
"messages_today": 0,
|
||||
"total_advertisements": 0,
|
||||
"advertisements_24h": 0,
|
||||
"recent_advertisements": [],
|
||||
"channel_message_counts": {},
|
||||
}
|
||||
|
||||
# Fetch activity data for charts (7 days)
|
||||
advert_activity = {"days": 7, "data": []}
|
||||
message_activity = {"days": 7, "data": []}
|
||||
node_count = {"days": 7, "data": []}
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get("/api/v1/dashboard/stats")
|
||||
if response.status_code == 200:
|
||||
stats = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch stats from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/dashboard/activity", params={"days": 7}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
advert_activity = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch advertisement activity from API: {e}")
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/dashboard/message-activity", params={"days": 7}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
message_activity = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch message activity from API: {e}")
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/dashboard/node-count", params={"days": 7}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
node_count = response.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch node count from API: {e}")
|
||||
|
||||
context["stats"] = stats
|
||||
context["advert_activity_json"] = json.dumps(advert_activity)
|
||||
context["message_activity_json"] = json.dumps(message_activity)
|
||||
context["node_count_json"] = json.dumps(node_count)
|
||||
|
||||
return templates.TemplateResponse("network.html", context)
|
||||
@@ -1,117 +0,0 @@
|
||||
"""Nodes page routes."""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Query, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from meshcore_hub.web.app import get_network_context, get_templates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/nodes", response_class=HTMLResponse)
|
||||
async def nodes_list(
|
||||
request: Request,
|
||||
search: str | None = Query(None, description="Search term"),
|
||||
adv_type: str | None = Query(None, description="Filter by node type"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
limit: int = Query(20, ge=1, le=100, description="Items per page"),
|
||||
) -> HTMLResponse:
|
||||
"""Render the nodes list page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# Build query params
|
||||
params: dict[str, int | str] = {"limit": limit, "offset": offset}
|
||||
if search:
|
||||
params["search"] = search
|
||||
if adv_type:
|
||||
params["adv_type"] = adv_type
|
||||
|
||||
# Fetch nodes from API
|
||||
nodes = []
|
||||
total = 0
|
||||
|
||||
try:
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/nodes", params=params
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
nodes = data.get("items", [])
|
||||
total = data.get("total", 0)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch nodes from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
# Calculate pagination
|
||||
total_pages = (total + limit - 1) // limit if total > 0 else 1
|
||||
|
||||
context.update(
|
||||
{
|
||||
"nodes": nodes,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total_pages": total_pages,
|
||||
"search": search or "",
|
||||
"adv_type": adv_type or "",
|
||||
}
|
||||
)
|
||||
|
||||
return templates.TemplateResponse("nodes.html", context)
|
||||
|
||||
|
||||
@router.get("/nodes/{public_key}", response_class=HTMLResponse)
|
||||
async def node_detail(request: Request, public_key: str) -> HTMLResponse:
|
||||
"""Render the node detail page."""
|
||||
templates = get_templates(request)
|
||||
context = get_network_context(request)
|
||||
context["request"] = request
|
||||
|
||||
node = None
|
||||
advertisements = []
|
||||
telemetry = []
|
||||
|
||||
try:
|
||||
# Fetch node details
|
||||
response = await request.app.state.http_client.get(
|
||||
f"/api/v1/nodes/{public_key}"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
node = response.json()
|
||||
|
||||
# Fetch recent advertisements for this node
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/advertisements", params={"public_key": public_key, "limit": 10}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
advertisements = response.json().get("items", [])
|
||||
|
||||
# Fetch recent telemetry for this node
|
||||
response = await request.app.state.http_client.get(
|
||||
"/api/v1/telemetry", params={"node_public_key": public_key, "limit": 10}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
telemetry = response.json().get("items", [])
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch node details from API: {e}")
|
||||
context["api_error"] = str(e)
|
||||
|
||||
context.update(
|
||||
{
|
||||
"node": node,
|
||||
"advertisements": advertisements,
|
||||
"telemetry": telemetry,
|
||||
"public_key": public_key,
|
||||
}
|
||||
)
|
||||
|
||||
return templates.TemplateResponse("node_detail.html", context)
|
||||
349
src/meshcore_hub/web/static/css/app.css
Normal file
349
src/meshcore_hub/web/static/css/app.css
Normal file
@@ -0,0 +1,349 @@
|
||||
/**
|
||||
* MeshCore Hub - Custom Application Styles
|
||||
*
|
||||
* This file contains all custom CSS that extends the Tailwind/DaisyUI framework.
|
||||
* Organized in sections:
|
||||
* - Color palette
|
||||
* - Navbar styling
|
||||
* - Scrollbar styling
|
||||
* - Table styling
|
||||
* - Text utilities
|
||||
* - Prose (markdown content) styling
|
||||
* - Leaflet map theming
|
||||
*/
|
||||
|
||||
/* ==========================================================================
|
||||
Color Palette
|
||||
Single source of truth for page/section colors used across nav, charts,
|
||||
and page content. All values are OKLCH.
|
||||
========================================================================== */
|
||||
|
||||
:root {
|
||||
--color-dashboard: oklch(0.75 0.15 210); /* cyan */
|
||||
--color-nodes: oklch(0.65 0.24 265); /* violet */
|
||||
--color-adverts: oklch(0.7 0.17 330); /* magenta */
|
||||
--color-messages: oklch(0.75 0.18 180); /* teal */
|
||||
--color-map: oklch(0.8471 0.199 83.87); /* yellow (matches btn-warning) */
|
||||
--color-members: oklch(0.72 0.17 50); /* orange */
|
||||
--color-neutral: oklch(0.3 0.01 250); /* subtle dark grey */
|
||||
}
|
||||
|
||||
/* Light mode: darker section colors for contrast on light backgrounds */
|
||||
[data-theme="light"] {
|
||||
--color-dashboard: oklch(0.55 0.15 210);
|
||||
--color-nodes: oklch(0.50 0.24 265);
|
||||
--color-adverts: oklch(0.55 0.17 330);
|
||||
--color-messages: oklch(0.55 0.18 180);
|
||||
--color-map: oklch(0.58 0.16 45);
|
||||
--color-members: oklch(0.55 0.18 25);
|
||||
--color-neutral: oklch(0.85 0.01 250);
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Navbar Styling
|
||||
========================================================================== */
|
||||
|
||||
/* Spacing between horizontal nav items */
|
||||
.menu-horizontal { gap: 0.125rem; }
|
||||
|
||||
/* Invert monochrome logos to dark for light mode */
|
||||
[data-theme="light"] .theme-logo--invert-light {
|
||||
filter: brightness(0.15);
|
||||
}
|
||||
|
||||
/* Ensure hero heading is pure black/white per theme */
|
||||
.hero-title {
|
||||
color: #fff;
|
||||
}
|
||||
[data-theme="light"] .hero-title {
|
||||
color: #000;
|
||||
}
|
||||
|
||||
/* Nav icon colors */
|
||||
.nav-icon-dashboard { color: var(--color-dashboard); }
|
||||
.nav-icon-nodes { color: var(--color-nodes); }
|
||||
.nav-icon-adverts { color: var(--color-adverts); }
|
||||
.nav-icon-messages { color: var(--color-messages); }
|
||||
.nav-icon-map { color: var(--color-map); }
|
||||
.nav-icon-members { color: var(--color-members); }
|
||||
|
||||
/* Propagate section color to parent li for hover/active backgrounds */
|
||||
.navbar .menu li:has(.nav-icon-dashboard) { --nav-color: var(--color-dashboard); }
|
||||
.navbar .menu li:has(.nav-icon-nodes) { --nav-color: var(--color-nodes); }
|
||||
.navbar .menu li:has(.nav-icon-adverts) { --nav-color: var(--color-adverts); }
|
||||
.navbar .menu li:has(.nav-icon-messages) { --nav-color: var(--color-messages); }
|
||||
.navbar .menu li:has(.nav-icon-map) { --nav-color: var(--color-map); }
|
||||
.navbar .menu li:has(.nav-icon-members) { --nav-color: var(--color-members); }
|
||||
|
||||
/* Section-tinted hover and active backgrounds (!important to override DaisyUI CDN) */
|
||||
.navbar .menu li > a:hover {
|
||||
background-color: color-mix(in oklch, var(--nav-color, oklch(var(--bc))) 12%, transparent) !important;
|
||||
}
|
||||
.navbar .menu li > a.active {
|
||||
background-color: color-mix(in oklch, var(--nav-color, oklch(var(--bc))) 20%, transparent) !important;
|
||||
color: inherit !important;
|
||||
}
|
||||
|
||||
/* Homepage hero buttons: slightly thicker outline, white text on hover */
|
||||
#app .btn-outline {
|
||||
border-width: 2px;
|
||||
}
|
||||
#app .btn-outline:hover {
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Panel Glow
|
||||
Radial color glow from bottom-right corner.
|
||||
Set --panel-color on the element for a section-tinted glow.
|
||||
========================================================================== */
|
||||
|
||||
.panel-glow {
|
||||
background-image:
|
||||
radial-gradient(
|
||||
ellipse at 80% 80%,
|
||||
color-mix(in oklch, var(--panel-color, transparent) 15%, transparent),
|
||||
transparent 70%
|
||||
);
|
||||
}
|
||||
|
||||
.panel-glow.panel-glow-tl {
|
||||
background-image:
|
||||
radial-gradient(
|
||||
ellipse at 20% 20%,
|
||||
color-mix(in oklch, var(--panel-color, transparent) 15%, transparent),
|
||||
transparent 70%
|
||||
);
|
||||
}
|
||||
|
||||
.panel-solid {
|
||||
background-color: color-mix(in oklch, var(--panel-color, transparent) 10%, oklch(var(--b1)));
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Scrollbar Styling
|
||||
========================================================================== */
|
||||
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: oklch(var(--b2));
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: oklch(var(--bc) / 0.3);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: oklch(var(--bc) / 0.5);
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Table Styling
|
||||
========================================================================== */
|
||||
|
||||
.table-compact td,
|
||||
.table-compact th {
|
||||
padding: 0.5rem 0.75rem;
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Text Utilities
|
||||
========================================================================== */
|
||||
|
||||
.truncate-cell {
|
||||
max-width: 200px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Prose Styling (Custom Markdown Pages)
|
||||
========================================================================== */
|
||||
|
||||
.prose h1 {
|
||||
font-size: 2.25rem;
|
||||
font-weight: 700;
|
||||
margin-top: 1.5rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.prose h2 {
|
||||
font-size: 1.875rem;
|
||||
font-weight: 600;
|
||||
margin-top: 1.25rem;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.prose h3 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 600;
|
||||
margin-top: 1rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.prose h4 {
|
||||
font-size: 1.25rem;
|
||||
font-weight: 600;
|
||||
margin-top: 1rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.prose p {
|
||||
margin-bottom: 1rem;
|
||||
line-height: 1.75;
|
||||
}
|
||||
|
||||
.prose ul,
|
||||
.prose ol {
|
||||
margin-bottom: 1rem;
|
||||
padding-left: 1.5rem;
|
||||
}
|
||||
|
||||
.prose ul {
|
||||
list-style-type: disc;
|
||||
}
|
||||
|
||||
.prose ol {
|
||||
list-style-type: decimal;
|
||||
}
|
||||
|
||||
.prose li {
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.prose a {
|
||||
color: oklch(var(--p));
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.prose a:hover {
|
||||
color: oklch(var(--pf));
|
||||
}
|
||||
|
||||
.prose code {
|
||||
background: oklch(var(--b2));
|
||||
padding: 0.125rem 0.25rem;
|
||||
border-radius: 0.25rem;
|
||||
font-size: 0.875em;
|
||||
}
|
||||
|
||||
.prose pre {
|
||||
background: oklch(var(--b2));
|
||||
padding: 1rem;
|
||||
border-radius: 0.5rem;
|
||||
overflow-x: auto;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.prose pre code {
|
||||
background: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.prose blockquote {
|
||||
border-left: 4px solid oklch(var(--bc) / 0.3);
|
||||
padding-left: 1rem;
|
||||
margin: 1rem 0;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.prose table {
|
||||
width: 100%;
|
||||
margin-bottom: 1rem;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
.prose th,
|
||||
.prose td {
|
||||
border: 1px solid oklch(var(--bc) / 0.2);
|
||||
padding: 0.5rem;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.prose th {
|
||||
background: oklch(var(--b2));
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.prose hr {
|
||||
border: none;
|
||||
border-top: 1px solid oklch(var(--bc) / 0.2);
|
||||
margin: 2rem 0;
|
||||
}
|
||||
|
||||
.prose img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
border-radius: 0.5rem;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
|
||||
|
||||
/* ==========================================================================
|
||||
Leaflet Map Theming (Dark Mode)
|
||||
========================================================================== */
|
||||
|
||||
/* Popup styling */
|
||||
.leaflet-popup-content-wrapper {
|
||||
background: oklch(var(--b1));
|
||||
color: oklch(var(--bc));
|
||||
}
|
||||
|
||||
.leaflet-popup-tip {
|
||||
background: oklch(var(--b1));
|
||||
}
|
||||
|
||||
/* Map container defaults */
|
||||
#map,
|
||||
#node-map {
|
||||
border-radius: var(--rounded-box);
|
||||
}
|
||||
|
||||
#map {
|
||||
height: calc(100vh - 350px);
|
||||
min-height: 400px;
|
||||
}
|
||||
|
||||
#node-map {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
/* Map label visibility */
|
||||
.map-label {
|
||||
opacity: 0;
|
||||
transition: opacity 0.15s ease-in-out;
|
||||
}
|
||||
|
||||
.map-marker:hover .map-label {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.show-labels .map-label {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* Bring hovered marker to front */
|
||||
.leaflet-marker-icon:hover {
|
||||
z-index: 10000 !important;
|
||||
}
|
||||
|
||||
/* ==========================================================================
|
||||
Node Header Hero Map Background
|
||||
========================================================================== */
|
||||
|
||||
#header-map {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
z-index: 0;
|
||||
}
|
||||
|
||||
/* Ensure Leaflet elements stay within the map layer */
|
||||
#header-map .leaflet-pane,
|
||||
#header-map .leaflet-control {
|
||||
z-index: auto !important;
|
||||
}
|
||||
45
src/meshcore_hub/web/static/img/logo.svg
Normal file
45
src/meshcore_hub/web/static/img/logo.svg
Normal file
@@ -0,0 +1,45 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
viewBox="0 0 53 53"
|
||||
width="53"
|
||||
height="53"
|
||||
version="1.1"
|
||||
id="svg3"
|
||||
sodipodi:docname="logo_bak.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs3" />
|
||||
<sodipodi:namedview
|
||||
id="namedview3"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1" />
|
||||
<!-- WiFi arcs radiating from bottom-left corner -->
|
||||
<g
|
||||
fill="none"
|
||||
stroke="#ffffff"
|
||||
stroke-width="8"
|
||||
stroke-linecap="round"
|
||||
id="g3"
|
||||
transform="translate(-1,-16)">
|
||||
<!-- Inner arc: from right to top -->
|
||||
<path
|
||||
d="M 20,65 A 15,15 0 0 0 5,50"
|
||||
id="path1" />
|
||||
<!-- Middle arc -->
|
||||
<path
|
||||
d="M 35,65 A 30,30 0 0 0 5,35"
|
||||
id="path2" />
|
||||
<!-- Outer arc -->
|
||||
<path
|
||||
d="M 50,65 A 45,45 0 0 0 5,20"
|
||||
id="path3" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
231
src/meshcore_hub/web/static/js/charts.js
Normal file
231
src/meshcore_hub/web/static/js/charts.js
Normal file
@@ -0,0 +1,231 @@
|
||||
/**
|
||||
* MeshCore Hub - Chart.js Helpers
|
||||
*
|
||||
* Provides common chart configuration and initialization helpers
|
||||
* for activity charts used on home and dashboard pages.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Read page colors from CSS custom properties (defined in app.css :root).
|
||||
* Falls back to hardcoded values if CSS vars are unavailable.
|
||||
*/
|
||||
function getCSSColor(varName, fallback) {
|
||||
return getComputedStyle(document.documentElement).getPropertyValue(varName).trim() || fallback;
|
||||
}
|
||||
|
||||
function withAlpha(color, alpha) {
|
||||
// oklch(0.65 0.24 265) -> oklch(0.65 0.24 265 / 0.1)
|
||||
return color.replace(')', ' / ' + alpha + ')');
|
||||
}
|
||||
|
||||
const ChartColors = {
|
||||
get nodes() { return getCSSColor('--color-nodes', 'oklch(0.65 0.24 265)'); },
|
||||
get nodesFill() { return withAlpha(this.nodes, 0.1); },
|
||||
get adverts() { return getCSSColor('--color-adverts', 'oklch(0.7 0.17 330)'); },
|
||||
get advertsFill() { return withAlpha(this.adverts, 0.1); },
|
||||
get messages() { return getCSSColor('--color-messages', 'oklch(0.75 0.18 180)'); },
|
||||
get messagesFill() { return withAlpha(this.messages, 0.1); },
|
||||
|
||||
// Neutral grays (not page-specific)
|
||||
grid: 'oklch(0.4 0 0 / 0.2)',
|
||||
text: 'oklch(0.7 0 0)',
|
||||
tooltipBg: 'oklch(0.25 0 0)',
|
||||
tooltipText: 'oklch(0.9 0 0)',
|
||||
tooltipBorder: 'oklch(0.4 0 0)'
|
||||
};
|
||||
|
||||
/**
|
||||
* Create common chart options with optional legend
|
||||
* @param {boolean} showLegend - Whether to show the legend
|
||||
* @returns {Object} Chart.js options object
|
||||
*/
|
||||
function createChartOptions(showLegend) {
|
||||
return {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
plugins: {
|
||||
legend: {
|
||||
display: showLegend,
|
||||
position: 'bottom',
|
||||
labels: {
|
||||
color: ChartColors.text,
|
||||
boxWidth: 12,
|
||||
padding: 8
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
mode: 'index',
|
||||
intersect: false,
|
||||
backgroundColor: ChartColors.tooltipBg,
|
||||
titleColor: ChartColors.tooltipText,
|
||||
bodyColor: ChartColors.tooltipText,
|
||||
borderColor: ChartColors.tooltipBorder,
|
||||
borderWidth: 1
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
grid: { color: ChartColors.grid },
|
||||
ticks: {
|
||||
color: ChartColors.text,
|
||||
maxRotation: 45,
|
||||
minRotation: 45,
|
||||
maxTicksLimit: 10
|
||||
}
|
||||
},
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
grid: { color: ChartColors.grid },
|
||||
ticks: {
|
||||
color: ChartColors.text,
|
||||
precision: 0
|
||||
}
|
||||
}
|
||||
},
|
||||
interaction: {
|
||||
mode: 'nearest',
|
||||
axis: 'x',
|
||||
intersect: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format date labels for chart display (e.g., "8 Feb")
|
||||
* @param {Array} data - Array of objects with 'date' property
|
||||
* @returns {Array} Formatted date strings
|
||||
*/
|
||||
function formatDateLabels(data) {
|
||||
return data.map(function(d) {
|
||||
var date = new Date(d.date);
|
||||
return date.toLocaleDateString('en-GB', { day: 'numeric', month: 'short' });
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a single-dataset line chart
|
||||
* @param {string} canvasId - ID of the canvas element
|
||||
* @param {Object} data - Data object with 'data' array containing {date, count} objects
|
||||
* @param {string} label - Dataset label
|
||||
* @param {string} borderColor - Line color
|
||||
* @param {string} backgroundColor - Fill color
|
||||
* @param {boolean} fill - Whether to fill under the line
|
||||
*/
|
||||
function createLineChart(canvasId, data, label, borderColor, backgroundColor, fill) {
|
||||
var ctx = document.getElementById(canvasId);
|
||||
if (!ctx || !data || !data.data || data.data.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new Chart(ctx, {
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: formatDateLabels(data.data),
|
||||
datasets: [{
|
||||
label: label,
|
||||
data: data.data.map(function(d) { return d.count; }),
|
||||
borderColor: borderColor,
|
||||
backgroundColor: backgroundColor,
|
||||
fill: fill,
|
||||
tension: 0.3,
|
||||
pointRadius: 2,
|
||||
pointHoverRadius: 5
|
||||
}]
|
||||
},
|
||||
options: createChartOptions(false)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a multi-dataset activity chart (for home page).
|
||||
* Pass null for advertData or messageData to omit that series.
|
||||
* @param {string} canvasId - ID of the canvas element
|
||||
* @param {Object|null} advertData - Advertisement data with 'data' array, or null to omit
|
||||
* @param {Object|null} messageData - Message data with 'data' array, or null to omit
|
||||
*/
|
||||
function createActivityChart(canvasId, advertData, messageData) {
|
||||
var ctx = document.getElementById(canvasId);
|
||||
if (!ctx) return null;
|
||||
|
||||
// Build datasets from whichever series are provided
|
||||
var datasets = [];
|
||||
var labels = null;
|
||||
|
||||
if (advertData && advertData.data && advertData.data.length > 0) {
|
||||
if (!labels) labels = formatDateLabels(advertData.data);
|
||||
datasets.push({
|
||||
label: (window.t && window.t('entities.advertisements')) || 'Advertisements',
|
||||
data: advertData.data.map(function(d) { return d.count; }),
|
||||
borderColor: ChartColors.adverts,
|
||||
backgroundColor: ChartColors.advertsFill,
|
||||
fill: false,
|
||||
tension: 0.3,
|
||||
pointRadius: 2,
|
||||
pointHoverRadius: 5
|
||||
});
|
||||
}
|
||||
|
||||
if (messageData && messageData.data && messageData.data.length > 0) {
|
||||
if (!labels) labels = formatDateLabels(messageData.data);
|
||||
datasets.push({
|
||||
label: (window.t && window.t('entities.messages')) || 'Messages',
|
||||
data: messageData.data.map(function(d) { return d.count; }),
|
||||
borderColor: ChartColors.messages,
|
||||
backgroundColor: ChartColors.messagesFill,
|
||||
fill: false,
|
||||
tension: 0.3,
|
||||
pointRadius: 2,
|
||||
pointHoverRadius: 5
|
||||
});
|
||||
}
|
||||
|
||||
if (datasets.length === 0 || !labels) return null;
|
||||
|
||||
return new Chart(ctx, {
|
||||
type: 'line',
|
||||
data: { labels: labels, datasets: datasets },
|
||||
options: createChartOptions(true)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize dashboard charts (nodes, advertisements, messages).
|
||||
* Pass null for any data parameter to skip that chart.
|
||||
* @param {Object|null} nodeData - Node count data, or null to skip
|
||||
* @param {Object|null} advertData - Advertisement data, or null to skip
|
||||
* @param {Object|null} messageData - Message data, or null to skip
|
||||
*/
|
||||
function initDashboardCharts(nodeData, advertData, messageData) {
|
||||
if (nodeData) {
|
||||
createLineChart(
|
||||
'nodeChart',
|
||||
nodeData,
|
||||
(window.t && window.t('common.total_entity', { entity: t('entities.nodes') })) || 'Total Nodes',
|
||||
ChartColors.nodes,
|
||||
ChartColors.nodesFill,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
if (advertData) {
|
||||
createLineChart(
|
||||
'advertChart',
|
||||
advertData,
|
||||
(window.t && window.t('entities.advertisements')) || 'Advertisements',
|
||||
ChartColors.adverts,
|
||||
ChartColors.advertsFill,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
if (messageData) {
|
||||
createLineChart(
|
||||
'messageChart',
|
||||
messageData,
|
||||
(window.t && window.t('entities.messages')) || 'Messages',
|
||||
ChartColors.messages,
|
||||
ChartColors.messagesFill,
|
||||
true
|
||||
);
|
||||
}
|
||||
}
|
||||
99
src/meshcore_hub/web/static/js/spa/api.js
Normal file
99
src/meshcore_hub/web/static/js/spa/api.js
Normal file
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* MeshCore Hub SPA - API Client
|
||||
*
|
||||
* Wrapper around fetch() for making API calls to the proxied backend.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Make a GET request and return parsed JSON.
|
||||
* @param {string} path - URL path (e.g., '/api/v1/nodes')
|
||||
* @param {Object} [params] - Query parameters
|
||||
* @returns {Promise<any>} Parsed JSON response
|
||||
*/
|
||||
export async function apiGet(path, params = {}) {
|
||||
const url = new URL(path, window.location.origin);
|
||||
for (const [k, v] of Object.entries(params)) {
|
||||
if (v !== null && v !== undefined && v !== '') {
|
||||
url.searchParams.set(k, String(v));
|
||||
}
|
||||
}
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
throw new Error(`API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a POST request with JSON body.
|
||||
* @param {string} path - URL path
|
||||
* @param {Object} body - Request body
|
||||
* @returns {Promise<any>} Parsed JSON response
|
||||
*/
|
||||
export async function apiPost(path, body) {
|
||||
const response = await fetch(path, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`API error: ${response.status} - ${text}`);
|
||||
}
|
||||
if (response.status === 204) return null;
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a PUT request with JSON body.
|
||||
* @param {string} path - URL path
|
||||
* @param {Object} body - Request body
|
||||
* @returns {Promise<any>} Parsed JSON response
|
||||
*/
|
||||
export async function apiPut(path, body) {
|
||||
const response = await fetch(path, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`API error: ${response.status} - ${text}`);
|
||||
}
|
||||
if (response.status === 204) return null;
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a DELETE request.
|
||||
* @param {string} path - URL path
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function apiDelete(path) {
|
||||
const response = await fetch(path, { method: 'DELETE' });
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`API error: ${response.status} - ${text}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a POST request with form-encoded body.
|
||||
* @param {string} path - URL path
|
||||
* @param {Object} data - Form data as key-value pairs
|
||||
* @returns {Promise<any>} Parsed JSON response
|
||||
*/
|
||||
export async function apiPostForm(path, data) {
|
||||
const body = new URLSearchParams(data);
|
||||
const response = await fetch(path, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: body.toString(),
|
||||
});
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`API error: ${response.status} - ${text}`);
|
||||
}
|
||||
if (response.status === 204) return null;
|
||||
return response.json();
|
||||
}
|
||||
183
src/meshcore_hub/web/static/js/spa/app.js
Normal file
183
src/meshcore_hub/web/static/js/spa/app.js
Normal file
@@ -0,0 +1,183 @@
|
||||
/**
|
||||
* MeshCore Hub SPA - Main Application Entry Point
|
||||
*
|
||||
* Initializes i18n, the router, registers all page routes,
|
||||
* and handles navigation.
|
||||
*/
|
||||
|
||||
import { Router } from './router.js';
|
||||
import { getConfig } from './components.js';
|
||||
import { loadLocale, t } from './i18n.js';
|
||||
|
||||
// Page modules (lazy-loaded)
|
||||
const pages = {
|
||||
home: () => import('./pages/home.js'),
|
||||
dashboard: () => import('./pages/dashboard.js'),
|
||||
nodes: () => import('./pages/nodes.js'),
|
||||
nodeDetail: () => import('./pages/node-detail.js'),
|
||||
messages: () => import('./pages/messages.js'),
|
||||
advertisements: () => import('./pages/advertisements.js'),
|
||||
map: () => import('./pages/map.js'),
|
||||
members: () => import('./pages/members.js'),
|
||||
customPage: () => import('./pages/custom-page.js'),
|
||||
notFound: () => import('./pages/not-found.js'),
|
||||
adminIndex: () => import('./pages/admin/index.js'),
|
||||
adminNodeTags: () => import('./pages/admin/node-tags.js'),
|
||||
adminMembers: () => import('./pages/admin/members.js'),
|
||||
};
|
||||
|
||||
// Main app container
|
||||
const appContainer = document.getElementById('app');
|
||||
const router = new Router();
|
||||
|
||||
// Read feature flags from config
|
||||
const config = getConfig();
|
||||
const features = config.features || {};
|
||||
|
||||
/**
|
||||
* Create a route handler that lazy-loads a page module and calls its render function.
|
||||
* @param {Function} loader - Module loader function
|
||||
* @returns {Function} Route handler
|
||||
*/
|
||||
function pageHandler(loader) {
|
||||
return async (params) => {
|
||||
try {
|
||||
const module = await loader();
|
||||
return await module.render(appContainer, params, router);
|
||||
} catch (e) {
|
||||
console.error('Page load error:', e);
|
||||
appContainer.innerHTML = `
|
||||
<div class="flex flex-col items-center justify-center py-20">
|
||||
<h1 class="text-4xl font-bold mb-4">${t('common.error')}</h1>
|
||||
<p class="text-lg opacity-70 mb-6">${t('common.failed_to_load_page')}</p>
|
||||
<p class="text-sm opacity-50 mb-6">${e.message || 'Unknown error'}</p>
|
||||
<a href="/" class="btn btn-primary">${t('common.go_home')}</a>
|
||||
</div>`;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Register routes (conditionally based on feature flags)
|
||||
router.addRoute('/', pageHandler(pages.home));
|
||||
|
||||
if (features.dashboard !== false) {
|
||||
router.addRoute('/dashboard', pageHandler(pages.dashboard));
|
||||
}
|
||||
if (features.nodes !== false) {
|
||||
router.addRoute('/nodes', pageHandler(pages.nodes));
|
||||
router.addRoute('/nodes/:publicKey', pageHandler(pages.nodeDetail));
|
||||
router.addRoute('/n/:prefix', async (params) => {
|
||||
// Short link redirect
|
||||
router.navigate(`/nodes/${params.prefix}`, true);
|
||||
});
|
||||
}
|
||||
if (features.messages !== false) {
|
||||
router.addRoute('/messages', pageHandler(pages.messages));
|
||||
}
|
||||
if (features.advertisements !== false) {
|
||||
router.addRoute('/advertisements', pageHandler(pages.advertisements));
|
||||
}
|
||||
if (features.map !== false) {
|
||||
router.addRoute('/map', pageHandler(pages.map));
|
||||
}
|
||||
if (features.members !== false) {
|
||||
router.addRoute('/members', pageHandler(pages.members));
|
||||
}
|
||||
if (features.pages !== false) {
|
||||
router.addRoute('/pages/:slug', pageHandler(pages.customPage));
|
||||
}
|
||||
|
||||
// Admin routes
|
||||
router.addRoute('/a', pageHandler(pages.adminIndex));
|
||||
router.addRoute('/a/', pageHandler(pages.adminIndex));
|
||||
router.addRoute('/a/node-tags', pageHandler(pages.adminNodeTags));
|
||||
router.addRoute('/a/members', pageHandler(pages.adminMembers));
|
||||
|
||||
// 404 handler
|
||||
router.setNotFound(pageHandler(pages.notFound));
|
||||
|
||||
/**
|
||||
* Update the active state of navigation links.
|
||||
* @param {string} pathname - Current URL path
|
||||
*/
|
||||
function updateNavActiveState(pathname) {
|
||||
document.querySelectorAll('[data-nav-link]').forEach(link => {
|
||||
const href = link.getAttribute('href');
|
||||
let isActive = false;
|
||||
|
||||
if (href === '/') {
|
||||
isActive = pathname === '/';
|
||||
} else if (href === '/nodes') {
|
||||
isActive = pathname.startsWith('/nodes');
|
||||
} else {
|
||||
isActive = pathname === href || pathname.startsWith(href + '/');
|
||||
}
|
||||
|
||||
if (isActive) {
|
||||
link.classList.add('active');
|
||||
} else {
|
||||
link.classList.remove('active');
|
||||
}
|
||||
});
|
||||
|
||||
// Close mobile dropdown if open (DaisyUI dropdowns stay open while focused)
|
||||
if (document.activeElement?.closest('.dropdown')) {
|
||||
document.activeElement.blur();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose a page title from entity name and network name.
|
||||
* @param {string} entityKey - Translation key for entity (e.g., 'entities.dashboard')
|
||||
* @returns {string}
|
||||
*/
|
||||
function composePageTitle(entityKey) {
|
||||
const networkName = config.network_name || 'MeshCore Network';
|
||||
const entity = t(entityKey);
|
||||
return `${entity} - ${networkName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the page title based on the current route.
|
||||
* @param {string} pathname
|
||||
*/
|
||||
function updatePageTitle(pathname) {
|
||||
const networkName = config.network_name || 'MeshCore Network';
|
||||
const titles = {
|
||||
'/': networkName,
|
||||
'/a': composePageTitle('entities.admin'),
|
||||
'/a/': composePageTitle('entities.admin'),
|
||||
'/a/node-tags': `${t('entities.tags')} - ${t('entities.admin')} - ${networkName}`,
|
||||
'/a/members': `${t('entities.members')} - ${t('entities.admin')} - ${networkName}`,
|
||||
};
|
||||
|
||||
// Add feature-dependent titles
|
||||
if (features.dashboard !== false) titles['/dashboard'] = composePageTitle('entities.dashboard');
|
||||
if (features.nodes !== false) titles['/nodes'] = composePageTitle('entities.nodes');
|
||||
if (features.messages !== false) titles['/messages'] = composePageTitle('entities.messages');
|
||||
if (features.advertisements !== false) titles['/advertisements'] = composePageTitle('entities.advertisements');
|
||||
if (features.map !== false) titles['/map'] = composePageTitle('entities.map');
|
||||
if (features.members !== false) titles['/members'] = composePageTitle('entities.members');
|
||||
|
||||
if (titles[pathname]) {
|
||||
document.title = titles[pathname];
|
||||
} else if (pathname.startsWith('/nodes/')) {
|
||||
document.title = composePageTitle('entities.node_detail');
|
||||
} else if (pathname.startsWith('/pages/')) {
|
||||
// Custom pages set their own title in the page module
|
||||
document.title = networkName;
|
||||
} else {
|
||||
document.title = networkName;
|
||||
}
|
||||
}
|
||||
|
||||
// Set up navigation callback
|
||||
router.onNavigate((pathname) => {
|
||||
updateNavActiveState(pathname);
|
||||
updatePageTitle(pathname);
|
||||
});
|
||||
|
||||
// Load locale then start the router
|
||||
const locale = localStorage.getItem('meshcore-locale') || config.locale || 'en';
|
||||
await loadLocale(locale);
|
||||
router.start();
|
||||
87
src/meshcore_hub/web/static/js/spa/auto-refresh.js
Normal file
87
src/meshcore_hub/web/static/js/spa/auto-refresh.js
Normal file
@@ -0,0 +1,87 @@
|
||||
/**
|
||||
* Auto-refresh utility for list pages.
|
||||
*
|
||||
* Reads `auto_refresh_seconds` from the app config. When the interval is > 0
|
||||
* it sets up a periodic timer that calls the provided `fetchAndRender` callback
|
||||
* and renders a pause/play toggle button into the given container element.
|
||||
*/
|
||||
|
||||
import { html, litRender, getConfig, t } from './components.js';
|
||||
|
||||
/**
|
||||
* Create an auto-refresh controller.
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {Function} options.fetchAndRender - Async function that fetches data and re-renders the page.
|
||||
* @param {HTMLElement} options.toggleContainer - Element to render the pause/play toggle into.
|
||||
* @returns {{ cleanup: Function }} cleanup function to stop the timer.
|
||||
*/
|
||||
export function createAutoRefresh({ fetchAndRender, toggleContainer }) {
|
||||
const config = getConfig();
|
||||
const intervalSeconds = config.auto_refresh_seconds || 0;
|
||||
|
||||
if (!intervalSeconds || !toggleContainer) {
|
||||
return { cleanup() {} };
|
||||
}
|
||||
|
||||
let paused = false;
|
||||
let isPending = false;
|
||||
let timerId = null;
|
||||
|
||||
function renderToggle() {
|
||||
const pauseIcon = html`<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor" class="w-4 h-4"><path d="M5.75 3a.75.75 0 0 0-.75.75v12.5c0 .414.336.75.75.75h1.5a.75.75 0 0 0 .75-.75V3.75A.75.75 0 0 0 7.25 3h-1.5ZM12.75 3a.75.75 0 0 0-.75.75v12.5c0 .414.336.75.75.75h1.5a.75.75 0 0 0 .75-.75V3.75a.75.75 0 0 0-.75-.75h-1.5Z"/></svg>`;
|
||||
const playIcon = html`<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor" class="w-4 h-4"><path d="M6.3 2.84A1.5 1.5 0 0 0 4 4.11v11.78a1.5 1.5 0 0 0 2.3 1.27l9.344-5.891a1.5 1.5 0 0 0 0-2.538L6.3 2.84Z"/></svg>`;
|
||||
|
||||
const tooltip = paused ? t('auto_refresh.resume') : t('auto_refresh.pause');
|
||||
const icon = paused ? playIcon : pauseIcon;
|
||||
|
||||
litRender(html`
|
||||
<button class="btn btn-ghost btn-xs gap-1 opacity-60 hover:opacity-100"
|
||||
title=${tooltip}
|
||||
@click=${onToggle}>
|
||||
${icon}
|
||||
<span class="text-xs">${intervalSeconds}s</span>
|
||||
</button>
|
||||
`, toggleContainer);
|
||||
}
|
||||
|
||||
function onToggle() {
|
||||
paused = !paused;
|
||||
if (paused) {
|
||||
clearInterval(timerId);
|
||||
timerId = null;
|
||||
} else {
|
||||
startTimer();
|
||||
}
|
||||
renderToggle();
|
||||
}
|
||||
|
||||
async function tick() {
|
||||
if (isPending || paused) return;
|
||||
isPending = true;
|
||||
try {
|
||||
await fetchAndRender();
|
||||
} catch (_e) {
|
||||
// Errors are handled inside fetchAndRender; don't stop the timer.
|
||||
} finally {
|
||||
isPending = false;
|
||||
}
|
||||
}
|
||||
|
||||
function startTimer() {
|
||||
timerId = setInterval(tick, intervalSeconds * 1000);
|
||||
}
|
||||
|
||||
// Initial render and start
|
||||
renderToggle();
|
||||
startTimer();
|
||||
|
||||
return {
|
||||
cleanup() {
|
||||
if (timerId) {
|
||||
clearInterval(timerId);
|
||||
timerId = null;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
495
src/meshcore_hub/web/static/js/spa/components.js
Normal file
495
src/meshcore_hub/web/static/js/spa/components.js
Normal file
@@ -0,0 +1,495 @@
|
||||
/**
|
||||
* MeshCore Hub SPA - Shared UI Components
|
||||
*
|
||||
* Reusable rendering functions using lit-html.
|
||||
*/
|
||||
|
||||
import { html, nothing } from 'lit-html';
|
||||
import { render } from 'lit-html';
|
||||
import { unsafeHTML } from 'lit-html/directives/unsafe-html.js';
|
||||
import { t } from './i18n.js';
|
||||
|
||||
// Re-export lit-html utilities for page modules
|
||||
export { html, nothing, unsafeHTML };
|
||||
export { render as litRender } from 'lit-html';
|
||||
export { t } from './i18n.js';
|
||||
|
||||
/**
|
||||
* Get app config from the embedded window object.
|
||||
* @returns {Object} App configuration
|
||||
*/
|
||||
export function getConfig() {
|
||||
return window.__APP_CONFIG__ || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build channel label map from app config.
|
||||
* Keys are numeric channel indexes and values are non-empty labels.
|
||||
*
|
||||
* @param {Object} [config]
|
||||
* @returns {Map<number, string>}
|
||||
*/
|
||||
export function getChannelLabelsMap(config = getConfig()) {
|
||||
return new Map(
|
||||
Object.entries(config.channel_labels || {})
|
||||
.map(([idx, label]) => [parseInt(idx, 10), typeof label === 'string' ? label.trim() : ''])
|
||||
.filter(([idx, label]) => Number.isInteger(idx) && label.length > 0),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a channel label from a numeric index.
|
||||
*
|
||||
* @param {number|string} channelIdx
|
||||
* @param {Map<number, string>} [channelLabels]
|
||||
* @returns {string|null}
|
||||
*/
|
||||
export function resolveChannelLabel(channelIdx, channelLabels = getChannelLabelsMap()) {
|
||||
const parsed = parseInt(String(channelIdx), 10);
|
||||
if (!Number.isInteger(parsed)) return null;
|
||||
return channelLabels.get(parsed) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse API datetime strings reliably.
|
||||
* MeshCore API often returns UTC timestamps without an explicit timezone suffix.
|
||||
* In that case, treat them as UTC by appending 'Z' before Date parsing.
|
||||
*
|
||||
* @param {string|null} isoString
|
||||
* @returns {Date|null}
|
||||
*/
|
||||
export function parseAppDate(isoString) {
|
||||
if (!isoString || typeof isoString !== 'string') return null;
|
||||
|
||||
let value = isoString.trim();
|
||||
if (!value) return null;
|
||||
|
||||
// Normalize "YYYY-MM-DD HH:MM:SS" to ISO separator.
|
||||
if (/^\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}/.test(value)) {
|
||||
value = value.replace(/\s+/, 'T');
|
||||
}
|
||||
|
||||
// If no timezone suffix is present, treat as UTC.
|
||||
const hasTimePart = /T\d{2}:\d{2}/.test(value);
|
||||
const hasTimezoneSuffix = /(Z|[+-]\d{2}:\d{2}|[+-]\d{4})$/i.test(value);
|
||||
if (hasTimePart && !hasTimezoneSuffix) {
|
||||
value += 'Z';
|
||||
}
|
||||
|
||||
const parsed = new Date(value);
|
||||
if (isNaN(parsed.getTime())) return null;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Page color palette - reads from CSS custom properties (defined in app.css :root).
|
||||
* Use for inline styles or dynamic coloring in page modules.
|
||||
*/
|
||||
export const pageColors = {
|
||||
get dashboard() { return getComputedStyle(document.documentElement).getPropertyValue('--color-dashboard').trim(); },
|
||||
get nodes() { return getComputedStyle(document.documentElement).getPropertyValue('--color-nodes').trim(); },
|
||||
get adverts() { return getComputedStyle(document.documentElement).getPropertyValue('--color-adverts').trim(); },
|
||||
get messages() { return getComputedStyle(document.documentElement).getPropertyValue('--color-messages').trim(); },
|
||||
get map() { return getComputedStyle(document.documentElement).getPropertyValue('--color-map').trim(); },
|
||||
get members() { return getComputedStyle(document.documentElement).getPropertyValue('--color-members').trim(); },
|
||||
};
|
||||
|
||||
// --- Formatting Helpers (return strings) ---
|
||||
|
||||
/**
|
||||
* Get the type emoji for a node advertisement type.
|
||||
* @param {string|null} advType
|
||||
* @returns {string} Emoji character
|
||||
*/
|
||||
function inferNodeType(value) {
|
||||
const normalized = (value || '').toLowerCase();
|
||||
if (!normalized) return null;
|
||||
if (normalized.includes('room')) return 'room';
|
||||
if (normalized.includes('repeater') || normalized.includes('relay')) return 'repeater';
|
||||
if (normalized.includes('companion') || normalized.includes('observer')) return 'companion';
|
||||
if (normalized.includes('chat')) return 'chat';
|
||||
return null;
|
||||
}
|
||||
|
||||
export function typeEmoji(advType) {
|
||||
switch (inferNodeType(advType) || (advType || '').toLowerCase()) {
|
||||
case 'chat': return '\u{1F4AC}'; // 💬
|
||||
case 'repeater': return '\u{1F4E1}'; // 📡
|
||||
case 'companion': return '\u{1F4F1}'; // 📱
|
||||
case 'room': return '\u{1FAA7}'; // 🪧
|
||||
default: return '\u{1F4CD}'; // 📍
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the first emoji from a string.
|
||||
* Uses a regex pattern that matches emoji characters including compound emojis.
|
||||
* @param {string|null} str
|
||||
* @returns {string|null} First emoji found, or null if none
|
||||
*/
|
||||
export function extractFirstEmoji(str) {
|
||||
if (!str) return null;
|
||||
// Match emoji using Unicode ranges and zero-width joiners
|
||||
const emojiRegex = /[\u{1F300}-\u{1F9FF}\u{2600}-\u{26FF}\u{2700}-\u{27BF}\u{1F000}-\u{1F02F}\u{1F0A0}-\u{1F0FF}\u{1F100}-\u{1F64F}\u{1F680}-\u{1F6FF}\u{1F900}-\u{1F9FF}\u{1FA00}-\u{1FA6F}\u{1FA70}-\u{1FAFF}\u{231A}-\u{231B}\u{23E9}-\u{23FA}\u{25AA}-\u{25AB}\u{25B6}\u{25C0}\u{25FB}-\u{25FE}\u{2B50}\u{2B55}\u{3030}\u{303D}\u{3297}\u{3299}](?:\u{FE0F})?(?:\u{200D}[\u{1F300}-\u{1F9FF}\u{2600}-\u{26FF}\u{2700}-\u{27BF}](?:\u{FE0F})?)*|\u{00A9}|\u{00AE}|\u{203C}|\u{2049}|\u{2122}|\u{2139}|\u{2194}-\u{2199}|\u{21A9}-\u{21AA}|\u{24C2}|\u{2934}-\u{2935}|\u{2B05}-\u{2B07}|\u{2B1B}-\u{2B1C}/u;
|
||||
const match = str.match(emojiRegex);
|
||||
return match ? match[0] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the display emoji for a node.
|
||||
* Prefers the first emoji from the node name, falls back to type emoji.
|
||||
* @param {string|null} nodeName - Node's display name
|
||||
* @param {string|null} advType - Advertisement type
|
||||
* @returns {string} Emoji character to display
|
||||
*/
|
||||
export function getNodeEmoji(nodeName, advType) {
|
||||
const nameEmoji = extractFirstEmoji(nodeName);
|
||||
if (nameEmoji) return nameEmoji;
|
||||
const inferred = inferNodeType(advType) || inferNodeType(nodeName);
|
||||
return typeEmoji(inferred || advType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format an ISO datetime string to the configured timezone.
|
||||
* @param {string|null} isoString
|
||||
* @param {Object} [options] - Intl.DateTimeFormat options override
|
||||
* @returns {string} Formatted datetime string
|
||||
*/
|
||||
export function formatDateTime(isoString, options) {
|
||||
if (!isoString) return '-';
|
||||
try {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone_iana || 'UTC';
|
||||
const locale = config.datetime_locale || 'en-US';
|
||||
const date = parseAppDate(isoString);
|
||||
if (!date) return '-';
|
||||
const opts = options || {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
hour: '2-digit', minute: '2-digit', second: '2-digit',
|
||||
hour12: false,
|
||||
};
|
||||
if (!opts.timeZone) opts.timeZone = tz;
|
||||
return date.toLocaleString(locale, opts);
|
||||
} catch {
|
||||
return isoString ? isoString.slice(0, 19).replace('T', ' ') : '-';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format an ISO datetime string to short format (date + HH:MM).
|
||||
* @param {string|null} isoString
|
||||
* @returns {string}
|
||||
*/
|
||||
export function formatDateTimeShort(isoString) {
|
||||
if (!isoString) return '-';
|
||||
try {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone_iana || 'UTC';
|
||||
const locale = config.datetime_locale || 'en-US';
|
||||
const date = parseAppDate(isoString);
|
||||
if (!date) return '-';
|
||||
return date.toLocaleString(locale, {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
hour: '2-digit', minute: '2-digit',
|
||||
hour12: false,
|
||||
});
|
||||
} catch {
|
||||
return isoString ? isoString.slice(0, 16).replace('T', ' ') : '-';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format an ISO datetime as relative time (e.g., "2m ago", "1h ago").
|
||||
* @param {string|null} isoString
|
||||
* @returns {string}
|
||||
*/
|
||||
export function formatRelativeTime(isoString) {
|
||||
if (!isoString) return '';
|
||||
const date = parseAppDate(isoString);
|
||||
if (!date) return '';
|
||||
const now = new Date();
|
||||
const diffMs = now - date;
|
||||
const diffSec = Math.floor(diffMs / 1000);
|
||||
const diffMin = Math.floor(diffSec / 60);
|
||||
const diffHour = Math.floor(diffMin / 60);
|
||||
const diffDay = Math.floor(diffHour / 24);
|
||||
if (diffDay > 0) return t('time.days_ago', { count: diffDay });
|
||||
if (diffHour > 0) return t('time.hours_ago', { count: diffHour });
|
||||
if (diffMin > 0) return t('time.minutes_ago', { count: diffMin });
|
||||
return t('time.less_than_minute');
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate a public key for display.
|
||||
* @param {string} key - Full public key
|
||||
* @param {number} [length=12] - Characters to show
|
||||
* @returns {string} Truncated key with ellipsis
|
||||
*/
|
||||
export function truncateKey(key, length = 12) {
|
||||
if (!key) return '-';
|
||||
if (key.length <= length) return key;
|
||||
return key.slice(0, length) + '...';
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape HTML special characters. Rarely needed with lit-html
|
||||
* since template interpolation auto-escapes, but kept for edge cases.
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
export function escapeHtml(str) {
|
||||
if (!str) return '';
|
||||
const div = document.createElement('div');
|
||||
div.textContent = str;
|
||||
return div.innerHTML;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy text to clipboard with visual feedback.
|
||||
* Updates the target element to show "Copied!" temporarily.
|
||||
* Falls back to execCommand for browsers without Clipboard API.
|
||||
* @param {Event} e - Click event
|
||||
* @param {string} text - Text to copy to clipboard
|
||||
*/
|
||||
export function copyToClipboard(e, text) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
// Capture target element synchronously before async operations
|
||||
const targetElement = e.currentTarget;
|
||||
|
||||
const showSuccess = (target) => {
|
||||
const originalText = target.textContent;
|
||||
target.textContent = 'Copied!';
|
||||
target.classList.add('text-success');
|
||||
setTimeout(() => {
|
||||
target.textContent = originalText;
|
||||
target.classList.remove('text-success');
|
||||
}, 1500);
|
||||
};
|
||||
|
||||
// Try modern Clipboard API first
|
||||
if (navigator.clipboard && navigator.clipboard.writeText) {
|
||||
navigator.clipboard.writeText(text).then(() => {
|
||||
showSuccess(targetElement);
|
||||
}).catch(err => {
|
||||
console.error('Clipboard API failed:', err);
|
||||
fallbackCopy(text, targetElement);
|
||||
});
|
||||
} else {
|
||||
// Fallback for older browsers or non-secure contexts
|
||||
fallbackCopy(text, targetElement);
|
||||
}
|
||||
|
||||
function fallbackCopy(text, target) {
|
||||
const textArea = document.createElement('textarea');
|
||||
textArea.value = text;
|
||||
textArea.style.position = 'fixed';
|
||||
textArea.style.left = '-999999px';
|
||||
textArea.style.top = '-999999px';
|
||||
document.body.appendChild(textArea);
|
||||
textArea.focus();
|
||||
textArea.select();
|
||||
try {
|
||||
document.execCommand('copy');
|
||||
showSuccess(target);
|
||||
} catch (err) {
|
||||
console.error('Fallback copy failed:', err);
|
||||
}
|
||||
document.body.removeChild(textArea);
|
||||
}
|
||||
}
|
||||
|
||||
// --- UI Components (return lit-html TemplateResult) ---
|
||||
|
||||
/**
|
||||
* Render a node display with emoji, name, and optional description.
|
||||
* Used for consistent node representation across lists (nodes, advertisements, messages, etc.).
|
||||
*
|
||||
* @param {Object} options - Node display options
|
||||
* @param {string|null} options.name - Node display name (from tag or advertised name)
|
||||
* @param {string|null} options.description - Node description from tags
|
||||
* @param {string} options.publicKey - Node public key (for fallback display)
|
||||
* @param {string|null} options.advType - Advertisement type (chat, repeater, room)
|
||||
* @param {string} [options.size='base'] - Size variant: 'sm' (small lists) or 'base' (normal)
|
||||
* @returns {TemplateResult} lit-html template
|
||||
*/
|
||||
export function renderNodeDisplay({ name, description, publicKey, advType, size = 'base' }) {
|
||||
const displayName = name || null;
|
||||
const emoji = getNodeEmoji(name, advType);
|
||||
const emojiSize = size === 'sm' ? 'text-lg' : 'text-lg';
|
||||
const nameSize = size === 'sm' ? 'text-sm' : 'text-base';
|
||||
const descSize = size === 'sm' ? 'text-xs' : 'text-xs';
|
||||
|
||||
const nameBlock = displayName
|
||||
? html`<div class="font-medium ${nameSize} truncate">${displayName}</div>
|
||||
${description ? html`<div class="${descSize} opacity-70 truncate">${description}</div>` : nothing}`
|
||||
: html`<div class="font-mono ${nameSize} truncate">${publicKey.slice(0, 16)}...</div>`;
|
||||
|
||||
return html`
|
||||
<div class="flex items-center gap-2 min-w-0">
|
||||
<span class="${emojiSize} flex-shrink-0" title=${advType || t('node_types.unknown')}>${emoji}</span>
|
||||
<div class="min-w-0">
|
||||
${nameBlock}
|
||||
</div>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a loading spinner.
|
||||
* @returns {TemplateResult}
|
||||
*/
|
||||
export function loading() {
|
||||
return html`<div class="flex justify-center py-12"><span class="loading loading-spinner loading-lg"></span></div>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render an error alert.
|
||||
* @param {string} message
|
||||
* @returns {TemplateResult}
|
||||
*/
|
||||
export function errorAlert(message) {
|
||||
return html`<div role="alert" class="alert alert-error mb-4">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>
|
||||
<span>${message}</span>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render an info alert. Use unsafeHTML for HTML content.
|
||||
* @param {string} message - Plain text message
|
||||
* @returns {TemplateResult}
|
||||
*/
|
||||
export function infoAlert(message) {
|
||||
return html`<div role="alert" class="alert alert-info mb-4">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>
|
||||
<span>${message}</span>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a success alert.
|
||||
* @param {string} message
|
||||
* @returns {TemplateResult}
|
||||
*/
|
||||
export function successAlert(message) {
|
||||
return html`<div role="alert" class="alert alert-success mb-4">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>
|
||||
<span>${message}</span>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render pagination controls.
|
||||
* @param {number} page - Current page (1-based)
|
||||
* @param {number} totalPages - Total number of pages
|
||||
* @param {string} basePath - Base URL path (e.g., '/nodes')
|
||||
* @param {Object} [params={}] - Extra query parameters to preserve
|
||||
* @returns {TemplateResult|nothing}
|
||||
*/
|
||||
export function pagination(page, totalPages, basePath, params = {}) {
|
||||
if (totalPages <= 1) return nothing;
|
||||
|
||||
const queryParts = [];
|
||||
for (const [k, v] of Object.entries(params)) {
|
||||
if (k !== 'page' && v !== null && v !== undefined && v !== '') {
|
||||
queryParts.push(`${encodeURIComponent(k)}=${encodeURIComponent(v)}`);
|
||||
}
|
||||
}
|
||||
const extraQuery = queryParts.length > 0 ? '&' + queryParts.join('&') : '';
|
||||
|
||||
function pageUrl(p) {
|
||||
return `${basePath}?page=${p}${extraQuery}`;
|
||||
}
|
||||
|
||||
const pageNumbers = [];
|
||||
for (let p = 1; p <= totalPages; p++) {
|
||||
if (p === page) {
|
||||
pageNumbers.push(html`<button class="join-item btn btn-sm btn-active">${p}</button>`);
|
||||
} else if (p === 1 || p === totalPages || (p >= page - 2 && p <= page + 2)) {
|
||||
pageNumbers.push(html`<a href=${pageUrl(p)} class="join-item btn btn-sm">${p}</a>`);
|
||||
} else if (p === 2 || p === totalPages - 1) {
|
||||
pageNumbers.push(html`<button class="join-item btn btn-sm btn-disabled" disabled>...</button>`);
|
||||
}
|
||||
}
|
||||
|
||||
return html`<div class="flex justify-center mt-6"><div class="join">
|
||||
${page > 1
|
||||
? html`<a href=${pageUrl(page - 1)} class="join-item btn btn-sm">${t('common.previous')}</a>`
|
||||
: html`<button class="join-item btn btn-sm btn-disabled" disabled>${t('common.previous')}</button>`}
|
||||
${pageNumbers}
|
||||
${page < totalPages
|
||||
? html`<a href=${pageUrl(page + 1)} class="join-item btn btn-sm">${t('common.next')}</a>`
|
||||
: html`<button class="join-item btn btn-sm btn-disabled" disabled>${t('common.next')}</button>`}
|
||||
</div></div>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a timezone indicator for page headers.
|
||||
* @returns {TemplateResult|nothing}
|
||||
*/
|
||||
export function timezoneIndicator() {
|
||||
const config = getConfig();
|
||||
const tz = config.timezone || 'UTC';
|
||||
return html`<span class="text-xs opacity-50 ml-2">(${tz})</span>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render receiver node icons with tooltips.
|
||||
* @param {Array} receivers
|
||||
* @returns {TemplateResult|nothing}
|
||||
*/
|
||||
export function receiverIcons(receivers) {
|
||||
if (!receivers || receivers.length === 0) return nothing;
|
||||
return html`${receivers.map(r => {
|
||||
const name = r.receiver_node_name || truncateKey(r.receiver_node_public_key || '', 8);
|
||||
const time = formatRelativeTime(r.received_at);
|
||||
const tooltip = time ? `${name} (${time})` : name;
|
||||
return html`<span class="cursor-help" title=${tooltip}>\u{1F4E1}</span>`;
|
||||
})}`;
|
||||
}
|
||||
|
||||
// --- Form Helpers ---
|
||||
|
||||
/**
|
||||
* Create a submit handler for filter forms that uses SPA navigation.
|
||||
* Use as: @submit=${createFilterHandler('/nodes', navigate)}
|
||||
* @param {string} basePath - Base URL path for the page
|
||||
* @param {Function} navigate - Router navigate function
|
||||
* @returns {Function} Event handler
|
||||
*/
|
||||
export function createFilterHandler(basePath, navigate) {
|
||||
return (e) => {
|
||||
e.preventDefault();
|
||||
const formData = new FormData(e.target);
|
||||
const params = new URLSearchParams();
|
||||
for (const [k, v] of formData.entries()) {
|
||||
if (v) params.set(k, v);
|
||||
}
|
||||
const queryStr = params.toString();
|
||||
navigate(queryStr ? `${basePath}?${queryStr}` : basePath);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-submit handler for select/checkbox elements.
|
||||
* Use as: @change=${autoSubmit}
|
||||
* @param {Event} e
|
||||
*/
|
||||
export function autoSubmit(e) {
|
||||
e.target.closest('form').requestSubmit();
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit form on Enter key in text inputs.
|
||||
* Use as: @keydown=${submitOnEnter}
|
||||
* @param {KeyboardEvent} e
|
||||
*/
|
||||
export function submitOnEnter(e) {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
e.target.closest('form').requestSubmit();
|
||||
}
|
||||
}
|
||||
76
src/meshcore_hub/web/static/js/spa/i18n.js
Normal file
76
src/meshcore_hub/web/static/js/spa/i18n.js
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* MeshCore Hub SPA - Lightweight i18n Module
|
||||
*
|
||||
* Loads a JSON translation file and provides a t() lookup function.
|
||||
* Shares the same locale JSON files with the Python/Jinja2 server side.
|
||||
*
|
||||
* Usage:
|
||||
* import { t, loadLocale } from './i18n.js';
|
||||
* await loadLocale('en');
|
||||
* t('entities.home'); // "Home"
|
||||
* t('common.total', { count: 42 }); // "42 total"
|
||||
*/
|
||||
|
||||
let _translations = {};
|
||||
let _locale = 'en';
|
||||
|
||||
/**
|
||||
* Load a locale JSON file from the server.
|
||||
* @param {string} locale - Language code (e.g. 'en')
|
||||
*/
|
||||
export async function loadLocale(locale) {
|
||||
try {
|
||||
const res = await fetch(`/static/locales/${locale}.json`);
|
||||
if (res.ok) {
|
||||
_translations = await res.json();
|
||||
_locale = locale;
|
||||
} else {
|
||||
console.warn(`Failed to load locale '${locale}', status ${res.status}`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`Failed to load locale '${locale}':`, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a dot-separated key in the translations object.
|
||||
* @param {string} key
|
||||
* @returns {*}
|
||||
*/
|
||||
function resolve(key) {
|
||||
return key.split('.').reduce(
|
||||
(obj, k) => (obj && typeof obj === 'object' ? obj[k] : undefined),
|
||||
_translations,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Translate a key with optional {{var}} interpolation.
|
||||
* Falls back to the key itself if not found.
|
||||
* @param {string} key - Dot-separated translation key
|
||||
* @param {Object} [params={}] - Interpolation values
|
||||
* @returns {string}
|
||||
*/
|
||||
export function t(key, params = {}) {
|
||||
let val = resolve(key);
|
||||
|
||||
if (typeof val !== 'string') return key;
|
||||
|
||||
// Replace {{var}} placeholders
|
||||
if (Object.keys(params).length > 0) {
|
||||
val = val.replace(/\{\{(\w+)\}\}/g, (_, k) => (k in params ? String(params[k]) : ''));
|
||||
}
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the currently loaded locale code.
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getLocale() {
|
||||
return _locale;
|
||||
}
|
||||
|
||||
// Also expose t() globally for non-module scripts (e.g. charts.js)
|
||||
window.t = t;
|
||||
103
src/meshcore_hub/web/static/js/spa/icons.js
Normal file
103
src/meshcore_hub/web/static/js/spa/icons.js
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* MeshCore Hub SPA - SVG Icon Functions
|
||||
*
|
||||
* Each function returns a lit-html TemplateResult. Pass a CSS class string to customize size.
|
||||
*/
|
||||
|
||||
import { html } from 'lit-html';
|
||||
|
||||
export function iconDashboard(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2 2v6a2 2 0 002 2h2a2 2 0 002-2zm0 0V9a2 2 0 012-2h2a2 2 0 012 2v10m-6 0a2 2 0 002 2h2a2 2 0 002-2m0 0V5a2 2 0 012-2h2a2 2 0 012 2v14a2 2 0 01-2 2h-2a2 2 0 01-2-2z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconMap(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 20l-5.447-2.724A1 1 0 013 16.382V5.618a1 1 0 011.447-.894L9 7m0 13l6-3m-6 3V7m6 10l4.553 2.276A1 1 0 0021 18.382V7.618a1 1 0 00-.553-.894L15 4m0 13V4m0 0L9 7" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconNodes(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconAdvertisements(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M11 5.882V19.24a1.76 1.76 0 01-3.417.592l-2.147-6.15M18 13a3 3 0 100-6M5.436 13.683A4.001 4.001 0 017 6h1.832c4.1 0 7.625-1.234 9.168-3v14c-1.543-1.766-5.067-3-9.168-3H7a3.988 3.988 0 01-1.564-.317z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconMessages(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M8 10h.01M12 10h.01M16 10h.01M9 16H5a2 2 0 01-2-2V6a2 2 0 012-2h14a2 2 0 012 2v8a2 2 0 01-2 2h-5l-5 5v-5z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconHome(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M3 12l2-2m0 0l7-7 7 7M5 10v10a1 1 0 001 1h3m10-11l2 2m-2-2v10a1 1 0 01-1 1h-3m-6 0a1 1 0 001-1v-4a1 1 0 011-1h2a1 1 0 011 1v4a1 1 0 001 1m-6 0h6" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconMembers(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 4.354a4 4 0 110 5.292M15 21H3v-1a6 6 0 0112 0v1zm0 0h6v-1a6 6 0 00-9-5.197M13 7a4 4 0 11-8 0 4 4 0 018 0z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconPage(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconInfo(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconAlert(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconChart(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M7 12l3-3 3 3 4-4M8 21l4-4 4 4M3 4h18M4 4h16v12a1 1 0 01-1 1H5a1 1 0 01-1-1V4z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconRefresh(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconMenu(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 6h16M4 12h8m-8 6h16" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconGithub(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} viewBox="0 0 24 24" fill="currentColor"><path d="M12 0c-6.626 0-12 5.373-12 12 0 5.302 3.438 9.8 8.207 11.387.599.111.793-.261.793-.577v-2.234c-3.338.726-4.033-1.416-4.033-1.416-.546-1.387-1.333-1.756-1.333-1.756-1.089-.745.083-.729.083-.729 1.205.084 1.839 1.237 1.839 1.237 1.07 1.834 2.807 1.304 3.492.997.107-.775.418-1.305.762-1.604-2.665-.305-5.467-1.334-5.467-5.931 0-1.311.469-2.381 1.236-3.221-.124-.303-.535-1.524.117-3.176 0 0 1.008-.322 3.301 1.23.957-.266 1.983-.399 3.003-.404 1.02.005 2.047.138 3.006.404 2.291-1.552 3.297-1.23 3.297-1.23.653 1.653.242 2.874.118 3.176.77.84 1.235 1.911 1.235 3.221 0 4.609-2.807 5.624-5.479 5.921.43.372.823 1.102.823 2.222v3.293c0 .319.192.694.801.576 4.765-1.589 8.199-6.086 8.199-11.386 0-6.627-5.373-12-12-12z"/></svg>`;
|
||||
}
|
||||
|
||||
export function iconExternalLink(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 6H6a2 2 0 00-2 2v10a2 2 0 002 2h10a2 2 0 002-2v-4M14 4h6m0 0v6m0-6L10 14" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconGlobe(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M21 12a9 9 0 01-9 9m9-9a9 9 0 00-9-9m9 9H3m9 9a9 9 0 01-9-9m9 9c1.657 0 3-4.03 3-9s-1.343-9-3-9m0 18c-1.657 0-3-4.03-3-9s1.343-9 3-9m-9 9a9 9 0 019-9" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconError(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconChannel(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M7 20l4-16m2 16l4-16M6 9h14M4 15h14" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconSuccess(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconLock(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 15v2m-6 4h12a2 2 0 002-2v-6a2 2 0 00-2-2H6a2 2 0 00-2 2v6a2 2 0 002 2zm10-10V7a4 4 0 00-8 0v4h8z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconUser(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconEmail(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M3 8l7.89 5.26a2 2 0 002.22 0L21 8M5 19h14a2 2 0 002-2V7a2 2 0 00-2-2H5a2 2 0 00-2 2v10a2 2 0 002 2z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconTag(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A2 2 0 013 12V7a4 4 0 014-4z" /></svg>`;
|
||||
}
|
||||
|
||||
export function iconUsers(cls = 'h-5 w-5') {
|
||||
return html`<svg xmlns="http://www.w3.org/2000/svg" class=${cls} fill="none" viewBox="0 0 24 24" stroke="currentColor"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" /></svg>`;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user