Compare commits

246 Commits
v0.2.1 ... main

Author SHA1 Message Date
JingleManSweep
92ff1ab306 Merge pull request #146 from ipnet-mesh/chore/codecov-tests
Add Test upload
2026-03-18 12:05:17 +00:00
JingleManSweep
0e2a24caa6 Upgrade Codecov action and specify report type
Updated Codecov action to version 5 and added report type.
2026-03-18 12:03:05 +00:00
JingleManSweep
ff36a991af Update ci.yml 2026-03-18 11:57:23 +00:00
JingleManSweep
fa1a2ecc17 Add Codecov badge to README
Added Codecov badge to README for coverage tracking.
2026-03-18 11:55:06 +00:00
JingleManSweep
9099ffb0cb Merge pull request #145 from ipnet-mesh/fix/codecov-default
Add push trigger for CI on main branch
2026-03-18 11:42:37 +00:00
JingleManSweep
f8219b4626 Add push trigger for CI on main branch 2026-03-18 11:39:42 +00:00
JingleManSweep
27b78d6904 Merge pull request #144 from ipnet-mesh/chore/add-codecov
Add CODECOV_TOKEN to CI workflow
2026-03-18 11:32:45 +00:00
JingleManSweep
d4c3e127a2 Add CODECOV_TOKEN to CI workflow 2026-03-18 11:23:32 +00:00
JingleManSweep
92e9ccdbfa Merge pull request #143 from ipnet-mesh/feature/multibyte-support
feat: support multibyte path hashes for MeshCore firmware v1.14+
2026-03-17 23:02:51 +00:00
Louis King
29b5820ed1 feat: support multibyte path hashes for MeshCore firmware v1.14+
Update path hash handling to accept variable-length hex-encoded hashes
(e.g. "4a" for single-byte, "b3fa" for multibyte) instead of requiring
exactly 2-character hashes. Bump meshcore dependency to >=2.3.0.

- Update normalizer to accept even-length hex strings >= 2 chars
- Update schemas and model docstrings for variable-length hashes
- Add tests for multibyte and mixed-length path hash round-trips
- Fix web test flakiness from local .env datetime locale leaking
2026-03-17 22:59:29 +00:00
JingleManSweep
889aa32e3a Merge pull request #142 from ipnet-mesh/fix/security-fixes
fix: harden security across auth, XSS, and proxy trust
2026-03-09 22:57:33 +00:00
Louis King
3c3873951d chore: add agentmap and security fixes planning files 2026-03-09 22:54:53 +00:00
Louis King
4b58160f31 fix: harden security across auth, XSS, and proxy trust
- Use hmac.compare_digest for constant-time API key comparison in auth
  and metrics endpoints to prevent timing attacks
- Escape user-controlled data in admin JS templates (members, node-tags)
  to prevent XSS via innerHTML
- Escape </script> sequences in embedded JSON config to prevent XSS
  breakout from <script> blocks
- Add configurable WEB_TRUSTED_PROXY_HOSTS setting instead of trusting
  all proxy headers unconditionally
- Warn on startup when admin is enabled with default trust-all proxy
- Remove legacy HTML dashboard endpoint (unused, superseded by SPA)
- Add comprehensive auth and dashboard test coverage
2026-03-09 22:53:53 +00:00
JingleManSweep
a32255e110 fix: support monochrome custom logos via logo-invert.svg filename convention (#141)
Custom logos were hardcoded as full-color, making white/monochrome logos
invisible in light mode. Adds logo-invert.svg as a higher-priority
candidate that enables the brightness filter in light mode.
2026-03-09 17:30:40 +00:00
JingleManSweep
59a1898824 Merge pull request #139 from ipnet-mesh/renovate/docker-metadata-action-6.x
Update docker/metadata-action action to v6
2026-03-06 21:21:14 +00:00
renovate[bot]
9256f8375d Update docker/metadata-action action to v6 2026-03-06 19:03:36 +00:00
JingleManSweep
e9b25c1ca7 Merge pull request #140 from ipnet-mesh/renovate/docker-build-push-action-7.x
Update docker/build-push-action action to v7
2026-03-06 19:03:03 +00:00
renovate[bot]
749bed6d5b Update docker/build-push-action action to v7 2026-03-06 18:04:06 +00:00
JingleManSweep
97539cb960 Merge pull request #128 from yellowcooln/main
LetsMesh Compatibility Mode: Decoder-Backed Messaging, Admin Proxy Auth, and Branding/Timezone Improvements
2026-03-06 18:03:32 +00:00
yellowcooln
c418959e5d Use patch-package file for meshcore-decoder Docker compatibility patch 2026-03-06 12:35:47 -05:00
JingleManSweep
14fac89f49 Merge branch 'main' into main 2026-03-05 12:00:36 +00:00
JingleManSweep
8201be5a39 Merge pull request #137 from ipnet-mesh/renovate/docker-setup-buildx-action-4.x
Update docker/setup-buildx-action action to v4
2026-03-05 11:47:19 +00:00
renovate[bot]
17fa2f1005 Update docker/setup-buildx-action action to v4 2026-03-05 11:44:58 +00:00
JingleManSweep
535186efb1 Merge pull request #138 from ipnet-mesh/renovate/docker-setup-qemu-action-4.x
Update docker/setup-qemu-action action to v4
2026-03-05 11:44:36 +00:00
JingleManSweep
fa1db5e709 Merge branch 'main' into renovate/docker-setup-qemu-action-4.x 2026-03-05 11:42:14 +00:00
JingleManSweep
840b8636a2 Merge pull request #136 from ipnet-mesh/renovate/docker-login-action-4.x
Update docker/login-action action to v4
2026-03-05 11:42:00 +00:00
renovate[bot]
cb305083e7 Update docker/setup-qemu-action action to v4 2026-03-05 11:38:49 +00:00
renovate[bot]
d475a12292 Update docker/login-action action to v4 2026-03-05 11:38:41 +00:00
JingleManSweep
53f0ce7225 Merge pull request #135 from ipnet-mesh/chore/fix-actions
Updates
2026-03-05 11:36:55 +00:00
Louis King
90268e9b98 Updates 2026-03-05 11:34:18 +00:00
yellowcooln
18edcfe9bf Extract LetsMesh normalization from subscriber into dedicated module 2026-03-04 20:21:49 -05:00
yellowcooln
2a380f88b4 Fix review items 001/003/005 for decoder, channel labels, and node filters 2026-03-04 20:07:37 -05:00
yellowcooln
c22274c4e5 Add LetsMesh structured event parity mappings 2026-03-03 16:18:54 -05:00
Yellowcooln
54449aa5fb Delete agent.md 2026-03-03 16:18:54 -05:00
yellowcooln
15556c3eb9 Document NPM admin proxy configuration and auth troubleshooting 2026-03-03 16:18:54 -05:00
yellowcooln
6a66eab663 Refine LetsMesh status ingest and custom logo behavior 2026-03-03 16:18:54 -05:00
yellowcooln
2f40b4a730 Add LetsMesh compatibility ingest, decoder integration, and admin auth updates 2026-03-03 16:18:54 -05:00
JingleManSweep
3eff7f03db Merge pull request #130 from shiqual/main
Add Dutch localization file nl.json
2026-03-02 23:38:37 +00:00
JingleManSweep
905ea0190b Merge branch 'main' into main 2026-03-02 23:35:45 +00:00
JingleManSweep
86cc7edca3 Merge pull request #129 from ipnet-mesh/renovate/major-github-artifact-actions
Update actions/upload-artifact action to v7
2026-03-02 23:30:39 +00:00
shiqual
eb3f8508b7 Add Dutch localization file nl.json
Dutch translation
2026-03-02 00:13:46 +01:00
renovate[bot]
74a34fdcba Update actions/upload-artifact action to v7 2026-02-26 20:53:07 +00:00
JingleManSweep
175fc8c524 Merge pull request #127 from ipnet-mesh/chore/fix-metrics-labels
Add role label to node last seen metric and filter alerts by role
2026-02-19 00:05:08 +00:00
Louis King
2a153a5239 Add role label to node last seen metric and filter alerts by role
Joins NodeTag (key='role') to the node last seen Prometheus metric so
alert rules can target infrastructure nodes only (role="infra").

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-19 00:01:20 +00:00
JingleManSweep
de85e0cd7a Merge pull request #126 from ipnet-mesh/feat/prometheus
Add Prometheus metrics endpoint, Alertmanager, and 1h stats window
2026-02-18 23:09:22 +00:00
Louis King
5a20da3afa Add Prometheus metrics endpoint, Alertmanager, and 1h stats window
Add /metrics endpoint with Prometheus gauges for nodes, messages,
advertisements, telemetry, trace paths, events, and members. Include
per-node last_seen timestamps for alerting. Add Alertmanager service
to Docker Compose metrics profile with default blackhole receiver.
Add NodeNotSeen alert rule (48h threshold). Add 1h time window to
all windowed metrics alongside existing 24h/7d/30d windows.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 23:06:07 +00:00
JingleManSweep
dcd33711db Merge pull request #125 from ipnet-mesh/feat/auto-update-lists
Add configurable auto-refresh for list pages
2026-02-18 16:07:25 +00:00
Louis King
a8cb20fea5 Add configurable auto-refresh for list pages
Nodes, advertisements, and messages pages now auto-refresh on a
configurable interval (WEB_AUTO_REFRESH_SECONDS, default 30s). A
pause/play toggle in the page header lets users control it. Setting
the interval to 0 disables auto-refresh entirely.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 14:37:33 +00:00
JingleManSweep
3ac5667d7a Merge pull request #118 from ipnet-mesh/feat/node-list-tag-improvements
Fix clipboard copy error with null target
2026-02-14 01:49:09 +00:00
JingleManSweep
c8c53b25bd Merge branch 'main' into feat/node-list-tag-improvements 2026-02-14 01:46:45 +00:00
Louis King
e4a1b005dc Fix clipboard copy error with null target
Capture e.currentTarget synchronously before async operations
to prevent it from becoming null in async promise handlers.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-14 01:44:28 +00:00
JingleManSweep
27adc6e2de Merge pull request #117 from ipnet-mesh/feat/node-list-tag-improvements
Improve node list tag display with name, description, members, and emoji extraction
2026-02-14 01:37:11 +00:00
Louis King
835fb1c094 Respect FEATURE_MEMBERS flag in advertisements page
- Only fetch members data when feature is enabled
- Hide member filter when feature is disabled

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-14 01:32:18 +00:00
Louis King
d7a351a803 Respect FEATURE_MEMBERS flag in nodes list
- Only fetch members data when feature is enabled
- Hide member filter when feature is disabled
- Hide member column when feature is disabled
- Adjust table colspan dynamically based on feature

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-14 01:30:38 +00:00
JingleManSweep
317627833c Merge pull request #116 from ipnet-mesh/feat/node-list-tag-improvements
Improve node display with descriptions, members, and emoji extraction
2026-02-14 01:24:05 +00:00
Louis King
f4514d1150 Improve node display with descriptions, members, and emoji extraction
Enhances the web dashboard's node presentation to match official MeshCore
app behavior and provide better user experience:

- Extract emoji from node names (e.g., "🏠 Home Gateway" uses 🏠 icon)
- Display description tags under node names across all list pages
- Add Member column to show network member associations
- Add copyable public key columns on Nodes and Advertisements pages
- Create reusable renderNodeDisplay() component for consistency
- Improve node detail page layout with larger emoji and inline description
- Document standard node tags (name, description, member_id, etc.)
- Fix documentation: correct Python version requirement and tag examples

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-14 01:20:52 +00:00
JingleManSweep
7be5f6afdf Merge pull request #115 from ipnet-mesh/chore/http-caching
Add HTTP caching for web dashboard resources
2026-02-14 00:05:44 +00:00
Louis King
54695ab9e2 Add beautifulsoup4 to dev dependencies
Required for HTML parsing in web caching tests.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-14 00:03:28 +00:00
Louis King
189eb3a139 Add HTTP caching for web dashboard resources
Implement cache-control middleware to optimize browser caching and reduce
bandwidth usage. Static files are cached for 1 year when accessed with
version parameters, while dynamic content is never cached.

Changes:
- Add CacheControlMiddleware with path-based caching logic
- Register middleware in web app after ProxyHeadersMiddleware
- Add version query parameters to CSS, JS, and app.js references
- Create comprehensive test suite (20 tests) for all cache behaviors

Cache strategy:
- Static files with ?v=X.Y.Z: 1 year (immutable)
- Static files without version: 1 hour (fallback)
- SPA shell HTML: no-cache (dynamic config)
- Health endpoints: no-cache, no-store (always fresh)
- Map data: 5 minutes (location updates)
- Custom pages: 1 hour (stable markdown)
- API proxy: pass-through (backend controls)

All 458 tests passing, 95% middleware coverage.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-14 00:01:08 +00:00
JingleManSweep
96ca6190db Merge pull request #113 from ipnet-mesh/claude/add-i18n-support-1duUx
Fix translation key in node detail page: nodes.tags → entities.tags
2026-02-13 23:10:35 +00:00
Louis King
baf08a9545 Shorten translation call-to-action with GitHub alert
Replaced verbose translation section with concise GitHub alert notification.

- Uses [!IMPORTANT] alert style for better visibility
- Reduced from 16 lines to 4 lines
- Keeps essential information and link to Translation Guide
- More scannable and professional appearance

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 23:07:53 +00:00
JingleManSweep
1d3e649ce0 Merge branch 'main' into claude/add-i18n-support-1duUx 2026-02-13 23:03:43 +00:00
Louis King
45abc66816 Remove Claude Code review GitHub action
Removed the code-review.yml workflow that automatically runs Claude Code review on pull requests.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 23:03:22 +00:00
Louis King
9c8eb27455 Fix translation key in node detail page: nodes.tags → entities.tags
The Tags panel title was showing 'nodes.tags' as literal text instead of the translation.

Fixed: node-detail.js line 174 now uses entities.tags

Comprehensive review completed:
- Verified all 115 unique translation keys across all pages
- All keys properly resolve to valid translations in en.json
- All i18n tests passing

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 23:01:49 +00:00
JingleManSweep
e6c6d4aecc Merge pull request #112 from ipnet-mesh/claude/add-i18n-support-1duUx
Add i18n support for web dashboard
2026-02-13 22:38:49 +00:00
Louis King
19bb06953e Fix remaining translation key: common.all_nodes
Replaced non-existent common.all_nodes key with common.all_entity pattern.

- advertisements.js: Use common.all_entity with entities.nodes
- map.js: Use common.all_entity with entities.nodes

All translation keys now properly resolve across the entire dashboard.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 22:36:17 +00:00
Louis King
1f55d912ea Fix translation key references across all pages
Fixes critical issue where translation keys were displaying as literal text instead of translations.

Changes:
- home.js: Fix stat headers (home.* → entities.*)
- dashboard.js: Fix stat headers, chart labels, table columns
- nodes.js: Fix table columns and filter labels (common.* → entities.*)
- advertisements.js: Fix filter widgets and table headers
- messages.js: Fix table column header
- map.js: Fix filter label and dropdown
- admin/node-tags.js: Fix node label reference

All translation keys now correctly reference entities.* section.
Used common.all_entity pattern instead of non-existent common.all_members.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 22:31:59 +00:00
Louis King
5272a72647 Refactor i18n, add translation guide, and audit documentation
## i18n Refactoring

- Refactor admin translations to use common composable patterns
- Add common patterns: delete_entity_confirm, entity_added_success, move_entity_to_another_node, etc.
- Remove 18 duplicate keys from admin_members and admin_node_tags sections
- Update all admin JavaScript files to use new common patterns with dynamic entity composition
- Fix label consistency: rename first_seen to first_seen_label to match naming convention

## Translation Documentation

- Create comprehensive translation reference guide (languages.md) with 200+ documented keys
- Add translation architecture documentation to AGENTS.md with examples and best practices
- Add "Help Translate" call-to-action section in README with link to translation guide
- Add i18n feature to README features list

## Documentation Audit

- Add undocumented config options: API_KEY, WEB_LOCALE, WEB_DOMAIN to README and .env.example
- Fix outdated CLI syntax: interface --mode receiver → interface receiver
- Update database migration commands to use CLI wrapper (meshcore-hub db) instead of direct alembic
- Add static/locales/ directory to project structure section
- Add i18n configuration (WEB_LOCALE, WEB_THEME) to docker-compose.yml

## Testing

- All 438 tests passing
- All pre-commit checks passing (black, flake8, mypy)
- Added tests for new common translation patterns

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 22:19:37 +00:00
Louis King
b2f8e18f13 Fix admin translations to use entity references
- Update admin index page to use entities.members and entities.tags
- Rename admin.node_tags_description to admin.tags_description
- Remove redundant admin.*_title keys in favor of entities

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 21:33:35 +00:00
Louis King
a15e91c754 Further refine i18n structure
- Remove "nav" section, use "entities" references instead
- Remove composite strings like "Total Nodes", "Recent Advertisements"
  - Use composed patterns: t('common.total_entity', { entity: t('entities.nodes') })
  - Use common.recent_entity, common.edit_entity, common.add_entity patterns
- Hardcode MeshCore tagline (official trademark, not configurable)
- Update all page components and templates to use entity-based translations
- Update tests to reflect new structure
- Remove redundant page-specific composite keys

This maximizes reusability and reduces duplication across translations.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 21:32:36 +00:00
Louis King
85129e528e Refactor i18n translations for better maintainability
- Remove page_title section, compose titles dynamically as "{{entity}} - {{network_name}}"
- Add entities section for centralized entity names (nodes, members, tags, etc.)
- Replace specific action translations with composed patterns (add_entity, edit_entity, etc.)
- Create links section for common platform names (github, discord, youtube)
- Remove redundant page-specific title fields, use entity names instead
- Update all page components to use new translation structure
- Keep user-defined strings (network_name) separate from translatable content

This follows i18n best practices by using composition over duplication,
centralizing reusable terms, and making it easier to add new languages.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-13 21:19:02 +00:00
Claude
127cd7adf6 Add i18n support for web dashboard
Implement lightweight i18n infrastructure with shared JSON translation
files used by both server-side Jinja2 templates and client-side SPA.

- Add custom i18n module (Python + JS, ~80 lines total, zero deps)
- Create en.json with ~200 translation keys covering all web strings
- Add WEB_LOCALE config setting (default: 'en', with localStorage override)
- Translate all navigation labels, page titles, and footer in spa.html
- Translate all 13 SPA page modules (home, dashboard, nodes, etc.)
- Translate shared components (pagination, relative time, charts)
- Translate all 3 admin pages (index, members, node-tags)
- Fix Adverts/Advertisements inconsistency (standardize to Advertisements)
- Add i18n unit tests with 100% coverage

https://claude.ai/code/session_01FbnUnwYAwPrsQmAh5EuSkF
2026-02-13 18:49:06 +00:00
JingleManSweep
91b3f1926f Merge pull request #110 from ipnet-mesh/chore/testing-claude-github-actions
Testing Claude GitHub Actions integrations
2026-02-11 12:53:17 +00:00
Louis King
3ef94a21df Testing Claude GitHub Actions integrations 2026-02-11 12:49:17 +00:00
JingleManSweep
19e724fcc8 Merge pull request #109 from ipnet-mesh/chore/test-claude
Updates
2026-02-11 12:40:56 +00:00
Louis King
7b7910b42e Updates 2026-02-11 12:35:45 +00:00
JingleManSweep
c711a0eb9b Merge pull request #108 from ipnet-mesh/renovate/actions-checkout-6.x
Update actions/checkout action to v6
2026-02-11 12:25:52 +00:00
renovate[bot]
dcd7ed248d Update actions/checkout action to v6 2026-02-11 12:24:09 +00:00
JingleManSweep
b0ea6bcc0e Merge pull request #107 from ipnet-mesh/add-claude-github-actions-1770812503821
Add Claude Code GitHub Workflow
2026-02-11 12:23:36 +00:00
JingleManSweep
7ef41a3671 "Claude Code Review workflow" 2026-02-11 12:21:46 +00:00
JingleManSweep
a7611dd8d4 "Claude PR Assistant workflow" 2026-02-11 12:21:44 +00:00
JingleManSweep
8f907edce6 Merge pull request #106 from ipnet-mesh/chore/screenshot
Updated Screenshot
2026-02-11 12:09:05 +00:00
JingleManSweep
95d1b260ab Merge pull request #105 from ipnet-mesh/renovate/docker-build-push-action-6.x
Update docker/build-push-action action to v6
2026-02-11 12:08:35 +00:00
renovate[bot]
fba2656268 Update docker/build-push-action action to v6 2026-02-11 12:08:24 +00:00
JingleManSweep
69adca09e3 Merge pull request #102 from ipnet-mesh/renovate/major-github-artifact-actions
Update actions/upload-artifact action to v6
2026-02-11 12:06:28 +00:00
JingleManSweep
9c2a0527ff Merge pull request #101 from ipnet-mesh/renovate/actions-setup-python-6.x
Update actions/setup-python action to v6
2026-02-11 12:04:56 +00:00
JingleManSweep
c0db5b1da5 Merge pull request #103 from ipnet-mesh/renovate/codecov-codecov-action-5.x
Update codecov/codecov-action action to v5
2026-02-11 12:04:31 +00:00
Louis King
77dcbb77ba Push 2026-02-11 12:02:40 +00:00
renovate[bot]
5bf0265fd9 Update codecov/codecov-action action to v5 2026-02-11 12:01:49 +00:00
renovate[bot]
1adef40fdc Update actions/upload-artifact action to v6 2026-02-11 12:01:21 +00:00
renovate[bot]
c9beb7e801 Update actions/setup-python action to v6 2026-02-11 12:01:18 +00:00
JingleManSweep
cd14c23cf2 Merge pull request #104 from ipnet-mesh/chore/ci-fixes
CI Fixes
2026-02-11 11:54:10 +00:00
Louis King
708bfd1811 CI Fixes 2026-02-11 11:53:21 +00:00
JingleManSweep
afdc76e546 Merge pull request #97 from ipnet-mesh/renovate/python-3.x
Update python Docker tag to v3.14
2026-02-11 11:34:18 +00:00
renovate[bot]
e07b9ee2ab Update python Docker tag to v3.14 2026-02-11 11:33:31 +00:00
JingleManSweep
00851bfcaa Merge pull request #100 from ipnet-mesh/chore/fix-ci
Push
2026-02-11 11:30:44 +00:00
Louis King
6a035e41c0 Push 2026-02-11 11:30:25 +00:00
JingleManSweep
2ffc78fda2 Merge pull request #98 from ipnet-mesh/renovate/actions-checkout-6.x
Update actions/checkout action to v6
2026-02-11 11:26:25 +00:00
renovate[bot]
3f341a4031 Update actions/checkout action to v6 2026-02-11 11:24:17 +00:00
JingleManSweep
1ea729bd51 Merge pull request #96 from ipnet-mesh/renovate/configure
Configure Renovate
2026-02-11 11:23:03 +00:00
renovate[bot]
d329f67ba8 Add renovate.json 2026-02-11 11:22:03 +00:00
JingleManSweep
c42a2deffb Merge pull request #95 from ipnet-mesh/chore/add-sponsorship-badge
Add README badges and workflow path filters
2026-02-11 00:40:57 +00:00
Louis King
dfa4157c9c Fixed funding 2026-02-11 00:36:13 +00:00
Louis King
b52fd32106 Add path filters to CI and Docker workflows
Skip unnecessary workflow runs when only non-code files change (README,
docs, etc). Docker workflow always runs on version tags.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-11 00:32:46 +00:00
Louis King
4bbf43a078 Add CI, Docker, and sponsorship badges to README 2026-02-11 00:29:06 +00:00
JingleManSweep
deae9c67fe Add Buy Me a Coffee funding option
Added Buy Me a Coffee funding option.
2026-02-11 00:25:26 +00:00
JingleManSweep
ceee27a3af Merge pull request #94 from ipnet-mesh/chore/docs-update
Update docs and add Claude Code skills
2026-02-11 00:24:24 +00:00
Louis King
f478096bc2 Add Claude Code skills for git branching, PRs, and releases 2026-02-11 00:01:51 +00:00
Louis King
8ae94a7763 Add Claude Code skills for documentation and quality checks 2026-02-10 23:49:58 +00:00
Louis King
fb6cc6f5a9 Update docs to reflect recent features and config options
- Add contact cleanup, admin UI, content home, and webhook secret
  settings to .env.example and README
- Update AGENTS.md project structure with pages.py, example content
  dirs, and corrected receiver init steps
- Document new API endpoints (prefix lookup, members, dashboard
  activity, send-advertisement) in README
- Fix Docker Compose core profile to include db-migrate service
2026-02-10 23:49:31 +00:00
JingleManSweep
a98b295618 Merge pull request #93 from ipnet-mesh/feat/theme-improvements
Add radial glow and solid tint backgrounds to panels and filter bars
2026-02-10 20:26:50 +00:00
Louis King
da512c0d9f Add radial glow and solid tint backgrounds to panels and filter bars
- Add panel-glow CSS class with radial gradient using section colors
- Add panel-solid CSS class for neutral solid-tinted filter bars
- Apply colored glow to stat cards on home and dashboard pages
- Apply neutral grey glow to dashboard chart and data panels
- Apply neutral solid background to filter panels on list pages
- Add shadow-xl drop shadows to dashboard panels and home hero
- Limit dashboard recent adverts to 5 rows

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 20:23:19 +00:00
JingleManSweep
652486aa15 Merge pull request #92 from ipnet-mesh/fix/network-name-colours
Fix hero title to use black/white per theme
2026-02-10 18:24:16 +00:00
Louis King
947c12bfe1 Fix hero title to use black/white per theme 2026-02-10 18:23:46 +00:00
JingleManSweep
e80cd3a83c Merge pull request #91 from ipnet-mesh/feature/light-mode
Add light mode theme with dark/light toggle
2026-02-10 18:16:07 +00:00
Louis King
70ecb5e4da Add light mode theme with dark/light toggle
- Add sun/moon toggle in navbar (top-right) using DaisyUI swap component
- Store user theme preference in localStorage, default to server config
- Add WEB_THEME env var to configure default theme (dark/light)
- Add light mode color palette with adjusted section colors for contrast
- Use CSS filter to invert white SVG logos in light mode
- Add section-colored hover/active backgrounds for navbar items
- Style hero buttons with thicker outlines and white text on hover
- Soften hero heading color in light mode
- Change member callsign badges from green to neutral
- Update AGENTS.md, .env.example with WEB_THEME documentation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 18:11:11 +00:00
JingleManSweep
565e0ffc7b Merge pull request #90 from ipnet-mesh/feat/feature-flags
Add feature flags to control web dashboard page visibility
2026-02-10 16:52:31 +00:00
Louis King
bdc3b867ea Fix missing receiver tooltips on advertisements and messages pages
The multi-receiver table view used data-* attributes that were never
read instead of native title attributes. Replace with title= so the
browser shows the receiver node name on hover.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 16:23:40 +00:00
Louis King
48786a18f9 Fix missing profile and tx_power in radio config JSON
The radio_config_dict passed to the frontend was missing the profile
and tx_power fields, causing the Network Info panel to omit them.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 15:56:45 +00:00
Louis King
706c32ae01 Add feature flags to control web dashboard page visibility
Operators can now disable specific pages (Dashboard, Nodes, Advertisements,
Messages, Map, Members, Pages) via FEATURE_* environment variables. Disabled
features are fully hidden: removed from navigation, return 404 on routes,
and excluded from sitemap/robots.txt. Dashboard auto-disables when all of
Nodes/Advertisements/Messages are off. Map auto-disables when Nodes is off.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 15:43:23 +00:00
JingleManSweep
bafc16d746 Merge pull request #89 from ipnet-mesh/claude/fix-admin-auth-bypass-atTWJ
Enforce authentication for admin API proxy mutations
2026-02-10 08:51:40 +00:00
Claude
9b09e32d41 Fix admin authentication bypass in web dashboard
The admin pages only checked config.admin_enabled but not
config.is_authenticated, allowing unauthenticated users to access
admin functionality when WEB_ADMIN_ENABLED=true. Additionally, the
API proxy forwarded the service-level Bearer token on all requests
regardless of user authentication, granting full admin API access
to unauthenticated browsers.

Server-side: block POST/PUT/DELETE/PATCH through the API proxy when
admin is enabled and no X-Forwarded-User header is present.

Client-side: add is_authenticated check to all three admin pages,
showing a sign-in prompt instead of admin content.

https://claude.ai/code/session_01HYuz5XLjYZ6JaowWqz643A
2026-02-10 01:20:04 +00:00
JingleManSweep
2b9f83e55e Merge pull request #88 from ipnet-mesh/feat/spa
Initial SPA (Single Page App) Conversion
2026-02-10 00:43:53 +00:00
Louis King
75c1966385 Fix Map nav icon color to exact DaisyUI warning yellow
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 00:39:06 +00:00
Louis King
3089ff46a8 Clean up legacy templates, fix nav colors and QR code timing
Remove all old Jinja2 templates (only spa.html is used now). Fix Map
nav icon color to yellow (matching btn-warning) and Members to orange.
Fix QR code intermittently not rendering on node detail pages with GPS
coords by deferring init to requestAnimationFrame.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 00:36:24 +00:00
Louis King
f1bceb5780 Rewrite web dashboard as Single Page Application
Replace server-side rendered Jinja2 page routes with a client-side SPA
using ES modules, lit-html templating, and a custom History API router.
All page rendering now happens in the browser with efficient DOM diffing.

Key changes:
- Add SPA router, API client, shared components, and 14 page modules
- Serve single spa.html shell template with catch-all route
- Remove server-side page routes (web/routes/) and legacy JS files
- Add centralized OKLCH color palette in CSS custom properties
- Add colored nav icons, navbar spacing, and loading spinner
- Add canonical URL and SEO path exclusions to SPA router
- Update charts.js to read from shared color palette
- Update tests for SPA architecture (template-agnostic assertions)
- Update AGENTS.md and README.md with SPA documentation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 00:23:45 +00:00
JingleManSweep
caf88bdba1 Merge pull request #87 from ipnet-mesh/feat/timezones
Move timezone display to page headers instead of each timestamp
2026-02-09 00:52:28 +00:00
Louis King
9eb1acfc02 Add TZ variable to .env.example
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-09 00:47:58 +00:00
Louis King
62e0568646 Use timezone abbreviation (GMT, EST) instead of full name in headers
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-09 00:46:39 +00:00
Louis King
b4da93e4f0 Move timezone display to page headers instead of each timestamp
- Remove timezone abbreviation from datetime format strings
- Add timezone label to page headers (Nodes, Messages, Advertisements, Map)
- Only show timezone when not UTC to reduce clutter

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-09 00:43:51 +00:00
JingleManSweep
981402f7aa Merge pull request #86 from ipnet-mesh/feat/timezones
Add timezone support for web dashboard date/time display
2026-02-09 00:38:43 +00:00
Louis King
76717179c2 Add timezone support for web dashboard date/time display
- Add TZ environment variable support (standard Linux timezone)
- Create Jinja2 filters for timezone-aware formatting (localtime, localdate, etc.)
- Update all templates to use timezone filters with abbreviation suffix
- Pass TZ through docker-compose for web service
- Document TZ setting in README and AGENTS.md

Timestamps remain stored as UTC; only display is converted.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-09 00:34:57 +00:00
JingleManSweep
f42987347e Merge pull request #85 from ipnet-mesh/chore/tidy-map
Use colored dots for map markers instead of logo
2026-02-08 23:53:48 +00:00
Louis King
25831f14e6 Use colored dots for map markers instead of logo
Replace logo icons with colored circle markers:
- Red dots for infrastructure nodes
- Blue dots for public nodes

Update popup overlay to show type emoji (📡, 💬, etc.) on the left
and infra/public indicator dot on the right of the node name.
Update legend to match new marker style.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-08 23:50:44 +00:00
Louis King
0e6cbc8094 Optimised GitHub CI workflow triggers 2026-02-08 23:40:35 +00:00
JingleManSweep
76630f0bb0 Merge pull request #84 from ipnet-mesh/chore/youtube-link
Add NETWORK_CONTACT_YOUTUBE config for footer link
2026-02-08 23:39:35 +00:00
Louis King
8fbac2cbd6 Add NETWORK_CONTACT_YOUTUBE config for footer link
Add YouTube channel URL configuration option alongside existing
GitHub/Discord/Email contact links. Also crop logo SVG to content
bounds and pass YouTube env var through docker-compose.
2026-02-08 23:36:40 +00:00
Louis King
fcac5e01dc Use network welcome text for SEO meta description
Meta description now uses NETWORK_WELCOME_TEXT prefixed with network
name for better SEO, falling back to generic message if not set.
2026-02-08 23:21:17 +00:00
Louis King
b6f3b2d864 Redesign node detail page with hero map header
- Add hero panel with non-interactive map background when GPS coords exist
- Fix coordinate detection: check node model fields before falling back to tags
- Move node name to standard page header above hero panel
- QR code displayed in hero panel (right side, 140px)
- Map pans to show node at 1/3 horizontal position (avoiding QR overlap)
- Replace Telemetry section with Tags card in grid layout
- Consolidate First Seen, Last Seen, Location into single row
- Add configurable offset support to map-node.js (offsetX, offsetY)
- Add configurable size support to qrcode-init.js
2026-02-08 23:16:13 +00:00
JingleManSweep
7de6520ae7 Merge pull request #83 from ipnet-mesh/feat/js-filter-submit
Add auto-submit for filter controls on list pages
2026-02-08 22:11:10 +00:00
Louis King
5b8b2eda10 Fix mixed content blocking for static assets behind reverse proxy
Add ProxyHeadersMiddleware to trust X-Forwarded-Proto headers from
reverse proxies. This ensures url_for() generates HTTPS URLs when
the app is accessed via HTTPS through nginx or similar proxies.

Without this, static assets (CSS, JS) were blocked by browsers as
mixed content when the site was served over HTTPS.
2026-02-08 22:08:04 +00:00
Louis King
042a1b04fa Add auto-submit for filter controls on list pages
Filter forms now auto-submit when select dropdowns change or when
Enter is pressed in text inputs. Uses a data-auto-submit attribute
pattern for consistency with existing data attribute conventions.
2026-02-08 21:53:35 +00:00
JingleManSweep
5832cbf53a Merge pull request #82 from ipnet-mesh/chore/tidy-html-output
Refactored inline styles/SVG/scripts, improved SEO
2026-02-08 21:45:30 +00:00
Louis King
c540e15432 Improve HTML output and SEO title tags
- Add Jinja2 whitespace control (trim_blocks, lstrip_blocks) to
  eliminate excessive newlines in rendered HTML output
- Reverse title tag order to "Page - Brand" for better SEO (specific
  content first, brand name second to avoid truncation)
- Add dynamic titles for node detail pages using node name
- Standardize UI text: Dashboard, Advertisements, Map, Members
- Remove refresh button from dashboard page

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-08 21:40:19 +00:00
Louis King
6b1b277c6c Refactor HTML output: extract inline CSS, JS, and SVGs
Extract inline styles, JavaScript, and SVG icons from templates into
reusable external resources for improved maintainability and caching.

New static files:
- static/css/app.css: Custom CSS (scrollbar, prose, animations, Leaflet)
- static/js/charts.js: Chart.js helpers with shared colors/options
- static/js/map-main.js: Full map page functionality
- static/js/map-node.js: Node detail page map
- static/js/qrcode-init.js: QR code generation

New icon macros in macros/icons.html:
- icon_info, icon_alert, icon_chart, icon_refresh, icon_menu
- icon_github, icon_globe, icon_error, icon_channel
- icon_success, icon_lock, icon_user, icon_email, icon_tag, icon_users

Updated templates to use external resources and icon macros:
- base.html, home.html, dashboard.html, map.html, node_detail.html
- nodes.html, messages.html, advertisements.html, members.html
- errors/404.html, admin/*.html

Net reduction: ~700 lines of inline code removed from templates.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-08 21:23:06 +00:00
Louis King
470c374f11 Remove redundant Show Chat Nodes checkbox from map
The Node Type dropdown already provides chat node filtering,
making the separate checkbox unnecessary.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-07 21:06:25 +00:00
Louis King
71859b2168 Adjust map zoom levels for mobile devices
- Mobile portrait (< 480px): padding [50, 50] for wider view
- Mobile landscape (< 768px): padding [75, 75]
- Desktop: padding [100, 100]

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-07 21:01:29 +00:00
Louis King
3d7ed53df3 Improve map UI and add QR code to node detail page
Map improvements:
- Change non-infra nodes from emojis to subtle blue circles
- Add "Show Chat Nodes" checkbox (hidden by default)
- Fix z-index for hovered marker labels
- Increase zoom on mobile devices
- Simplify legend to show Infrastructure and Node icons

Node detail page:
- Add QR code for meshcore:// contact protocol
- Move activity (first/last seen) to title row
- QR code positioned under public key with white background
- Protocol: meshcore://contact/add?name=<name>&public_key=<key>&type=<n>
- Type mapping: chat=1, repeater=2, room=3, sensor=4

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-07 20:51:25 +00:00
Louis King
ceaef9178a Fixed map z-order 2026-02-07 20:13:24 +00:00
JingleManSweep
5ccb077188 Merge pull request #81 from ipnet-mesh/feat/public-node-map
Enhance map page with GPS fallback, infrastructure filter, and UI improvements
2026-02-07 20:09:13 +00:00
Louis King
8f660d6b94 Enhance map page with GPS fallback, infrastructure filter, and UI improvements
- Add GPS coordinate fallback: use tag coords, fall back to model coords
- Filter out nodes at (0, 0) coordinates (likely unset defaults)
- Add "Show" filter to toggle between All Nodes and Infrastructure Only
- Add "Show Labels" checkbox (labels hidden by default, appear on hover)
- Infrastructure nodes display network logo instead of emoji
- Add radius-based bounds filtering (20km) to prevent outlier zoom issues
- Position labels underneath pins, centered with transparent background
- Calculate and return infra_center for infrastructure node focus
- Initial map view focuses on infrastructure nodes when available
- Update popup button to outline style
- Add comprehensive tests for new functionality

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-07 20:05:56 +00:00
Louis King
6e40be6487 Updated home page buttons 2026-02-07 14:51:48 +00:00
Louis King
d79e29bc0a Updates 2026-02-07 14:47:12 +00:00
Louis King
2758cf4dd5 Fixed mobile menu 2026-02-07 14:40:17 +00:00
Louis King
f37e993ede Updates 2026-02-07 14:32:44 +00:00
Louis King
b18b3c9aa4 Refactor PAGES_HOME to CONTENT_HOME and add custom logo support
- Replace PAGES_HOME with CONTENT_HOME configuration (default: ./content)
- Content directory now contains pages/ and media/ subdirectories
- Add support for custom logo at $CONTENT_HOME/media/images/logo.svg
- Custom logo replaces favicon and navbar/home logos when present
- Mount media directory as /media for serving custom assets
- Simplify default logo to generic WiFi-style radiating arcs
- Update documentation and example directory structure
- Update tests for new CONTENT_HOME structure

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-07 13:45:42 +00:00
Louis King
9d99262401 Updates 2026-02-06 23:48:43 +00:00
Louis King
adfe5bc503 Updates 2026-02-06 23:38:08 +00:00
Louis King
deaab9b9de Rename /network to /dashboard and add reusable icon macros
- Renamed network route, template, and tests to dashboard
- Added logo.svg for favicon and navbar branding
- Created reusable Jinja2 icon macros for navigation and UI elements
- Updated home page hero layout with centered content and larger logo
- Added Map button alongside Dashboard button in hero section
- Navigation menu items now display icons before labels

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-06 22:53:36 +00:00
Louis King
95636ef580 Removed Claude Code workflow 2026-02-06 19:19:10 +00:00
JingleManSweep
5831592f88 Merge pull request #79 from ipnet-mesh/feat/custom-pages
Feat/custom pages
2026-02-06 19:14:53 +00:00
Louis King
bc7bff8b82 Updates 2026-02-06 19:14:19 +00:00
Louis King
9445d2150c Fix links and update join guide
- Fix T114 manufacturer (Heltec, not LilyGO) and link
- Fix T1000-E product link
- Fix Google Play and App Store links
- Add Amazon to where to buy options
- Add radio configuration step

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-06 19:10:00 +00:00
Louis King
3e9f478a65 Replace example about page with join guide
Add getting started guide covering:
- Node types (Companion, Repeater, Room Server)
- Frequency regulations (868MHz EU/UK, 915MHz US/AU)
- Recommended hardware (Heltec V3, T114, T1000-E, T-Deck Plus)
- Mobile apps (Android/iOS)
- Links to MeshCore docs and web flasher

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-06 19:04:56 +00:00
JingleManSweep
6656bd8214 Merge pull request #78 from ipnet-mesh/feat/custom-pages
Add custom markdown pages feature to web dashboard
2026-02-06 18:40:42 +00:00
Louis King
0f50bf4a41 Add custom markdown pages feature to web dashboard
Allows adding static content pages (About, FAQ, etc.) as markdown files
with YAML frontmatter. Pages are stored in PAGES_HOME directory (default:
./pages), automatically appear in navigation menu, and are included in
the sitemap.

- Add PageLoader class to parse markdown with frontmatter
- Add /pages/{slug} route for rendering custom pages
- Add PAGES_HOME config setting to WebSettings
- Add prose CSS styles for markdown content
- Add pages to navigation and sitemap
- Update docker-compose.yml with pages volume mount
- Add comprehensive tests for PageLoader and routes

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-06 18:36:23 +00:00
Louis King
99206f7467 Updated README 2026-02-06 17:53:02 +00:00
Louis King
3a89daa9c0 Use empty Disallow in robots.txt for broader compatibility
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-06 15:58:52 +00:00
Louis King
86c5ff8f1c SEO fixes 2026-02-06 14:38:26 +00:00
JingleManSweep
59d0edc96f Merge pull request #76 from ipnet-mesh/chore/add-dynamic-sitemap-xml
Added dynamic XML sitemap for SEO
2026-02-06 12:53:20 +00:00
Louis King
b01611e0e8 Added dynamic XML sitemap for SEO 2026-02-06 12:50:40 +00:00
JingleManSweep
1e077f50f7 Merge pull request #75 from ipnet-mesh/chore/add-meshcore-text-seo
Updated SEO descriptions
2026-02-06 12:34:25 +00:00
Louis King
09146a2e94 Updated SEO descriptions 2026-02-06 12:31:40 +00:00
JingleManSweep
56487597b7 Merge pull request #73 from ipnet-mesh/chore/improve-seo
Added SEO optimisations
2026-02-06 12:21:30 +00:00
Louis King
de968f397d Added SEO optimisations 2026-02-06 12:17:27 +00:00
JingleManSweep
3ca5284c11 Merge pull request #72 from ipnet-mesh/chore/add-permissive-robots-txt
Added permissive robots.txt route
2026-02-06 12:12:20 +00:00
Louis King
75d7e5bdfa Added permissive robots.txt route 2026-02-06 12:09:36 +00:00
Louis King
927fcd6efb Fixed README and Docker Compose 2026-02-03 22:58:58 +00:00
JingleManSweep
3132d296bb Merge pull request #71 from ipnet-mesh/chore/fix-compose-profile
Fixed Compose dependencies and switched to Docker managed volume
2026-01-28 21:56:32 +00:00
Louis King
96e4215c29 Fixed Compose dependencies and switched to Docker managed volume 2026-01-28 21:53:36 +00:00
Louis King
fd3c3171ce Fix FastAPI response model for union return type 2026-01-26 22:29:13 +00:00
Louis King
345ffd219b Separate API prefix search from exact match endpoint
- Add /api/v1/nodes/prefix/{prefix} for prefix-based node lookup
- Change /api/v1/nodes/{public_key} to exact match only
- /n/{prefix} now simply redirects to /nodes/{prefix}
- /nodes/{key} resolves prefixes via API and redirects to full key
2026-01-26 22:27:15 +00:00
Louis King
9661b22390 Fix node detail 404 to use custom error page 2026-01-26 22:11:48 +00:00
Louis King
31aa48c9a0 Return 404 page when node not found in detail view 2026-01-26 22:08:01 +00:00
Louis King
1a3649b3be Revert "Simplify 404 page design"
This reverts commit 33649a065b.
2026-01-26 22:07:29 +00:00
Louis King
33649a065b Simplify 404 page design 2026-01-26 22:05:31 +00:00
Louis King
fd582bda35 Add custom 404 error page 2026-01-26 22:01:00 +00:00
Louis King
c42b26c8f3 Make /n/ short link resolve prefix to full public key 2026-01-26 21:57:04 +00:00
Louis King
d52163949a Change /n/ short link to redirect to /nodes/ 2026-01-26 21:48:55 +00:00
Louis King
ca101583f0 Add /n/ short link alias and simplify CI lint job
- Add /n/{public_key} route as alias for /nodes/{public_key} for shorter URLs
- Replace individual lint tools in CI with pre-commit/action for consistency
2026-01-26 21:41:33 +00:00
JingleManSweep
4af0f2ea80 Merge pull request #70 from ipnet-mesh/chore/node-page-prefix-support
Add prefix matching support to node API endpoint
2026-01-26 21:28:43 +00:00
Louis King
0b3ac64845 Add prefix matching support to node API endpoint
Allow users to navigate to a node using any prefix of its public key
instead of requiring the full 64-character key. If multiple nodes match
the prefix, the first one alphabetically is returned.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-26 21:27:36 +00:00
Louis King
3c7a8981ee Increased dedup bucket window to 120s 2026-01-17 20:15:46 +00:00
JingleManSweep
238e28ae41 Merge pull request #67 from ipnet-mesh/chore/tidyup-message-filters-columns
Message Filter and Table Tidying
2026-01-15 18:19:30 +00:00
Louis King
68d5049963 Removed pointless channel number filter and tidied column headings/values 2026-01-15 18:16:31 +00:00
JingleManSweep
624fa458ac Merge pull request #66 from ipnet-mesh/chore/fix-sqlite-path-exists
Ensure SQLite database path/subdirectories exist before initialising …
2026-01-15 17:36:58 +00:00
Louis King
309d575fc0 Ensure SQLite database path/subdirectories exist before initialising database 2026-01-15 17:32:56 +00:00
Louis King
f7b4df13a7 Added more test coverage 2026-01-12 21:00:02 +00:00
Louis King
13bae5c8d7 Added more test coverage 2026-01-12 20:34:53 +00:00
Louis King
8a6b4d8e88 Tidying 2026-01-12 20:02:45 +00:00
JingleManSweep
b67e1b5b2b Merge pull request #65 from ipnet-mesh/claude/plan-member-editor-BwkcS
Plan Member Editor for Organization Management
2026-01-12 19:59:32 +00:00
Louis King
d4e3dc0399 Local tweaks 2026-01-12 19:59:14 +00:00
Claude
7f0adfa6a7 Implement Member Editor admin interface
Add a complete CRUD interface for managing network members at /a/members,
following the proven pattern established by the Tag Editor.

Changes:
- Add member routes to admin.py (GET, POST create/update/delete)
- Create admin/members.html template with member table, forms, and modals
- Add Members navigation card to admin index page
- Include proper authentication checks and flash message handling
- Fix mypy type hints for optional form fields

The Member Editor allows admins to:
- View all network members in a sortable table
- Create new members with all fields (member_id, name, callsign, role, contact, description)
- Edit existing members via modal dialog
- Delete members with confirmation
- Client-side validation for member_id format (alphanumeric + underscore)

All backend API infrastructure (models, schemas, routes) was already implemented.
This is purely a web UI layer built on top of the existing /api/v1/members endpoints.
2026-01-12 19:41:56 +00:00
Claude
94b03b49d9 Add comprehensive Member Editor implementation plan
Create detailed plan for building a Member Editor admin interface at /a/members.
The plan follows the proven Tag Editor pattern and includes:

- Complete route structure for CRUD operations
- Full HTML template layout with modals and forms
- JavaScript event handlers for edit/delete actions
- Integration with existing Member API endpoints
- Testing checklist and acceptance criteria

All backend infrastructure (API, models, schemas) already exists.
This is purely a web UI implementation task estimated at 2-3 hours.
2026-01-12 19:33:13 +00:00
Louis King
20d75fe041 Add bulk copy and delete all tags for node replacement workflow
When replacing a node device, users can now:
- Copy All: Copy all tags to a new node (skips existing tags)
- Delete All: Remove all tags from a node after migration

New API endpoints:
- POST /api/v1/nodes/{pk}/tags/copy-to/{dest_pk}
- DELETE /api/v1/nodes/{pk}/tags

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-11 14:46:51 +00:00
Louis King
307f3935e0 Add access denied page for unauthenticated admin access
When users try to access /a/ without valid OAuth2Proxy headers (e.g.,
GitHub account not in org), they now see a friendly 403 page instead
of a 500 error. Added authentication checks to all admin routes.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-11 13:34:03 +00:00
Louis King
6901bafb02 Tidying Tag Editor layout 2026-01-11 13:13:22 +00:00
JingleManSweep
e595dc2b27 Merge pull request #63 from ipnet-mesh/claude/admin-node-tags-interface-pHbKm
Add admin interface for managing node tags
2026-01-11 12:51:56 +00:00
Louis King
ed2cf09ff3 Improve admin UI and remove unused coordinate tag type
- Replace node type badge with icon in admin tag editor
- Add Edit/Add Tags button on node detail page (when admin enabled and authenticated)
- Remove automatic seed container startup to prevent overwriting user changes
- Remove unused 'coordinate' value type from node tags (only string, number, boolean remain)
2026-01-11 12:49:34 +00:00
Claude
bec736a894 Sort node dropdown alphabetically in admin interface
Nodes in the dropdown are now sorted alphabetically by name,
with unnamed nodes appearing at the end.
2026-01-11 12:01:11 +00:00
Claude
1457360703 Use API_ADMIN_KEY for web service to enable admin operations
The web admin interface needs write permissions to create, update,
move, and delete node tags. Changed to use API_ADMIN_KEY with
fallback to API_READ_KEY if admin key is not configured.
2026-01-11 11:55:15 +00:00
Claude
d8a0f2abb8 Fix security vulnerabilities and add validation
- Fix XSS vulnerability by using data attributes instead of inline
  onclick handlers in node_tags.html template
- Fix URL injection by using urlencode for all redirect URL parameters
- Add validation to reject moves where source and destination nodes
  are the same (returns 400 Bad Request)
- Add error handling for response.json() calls that may fail
- Add missing test coverage for update endpoint error scenarios
2026-01-11 11:51:57 +00:00
Claude
367f838371 Add admin interface for managing node tags
Implement CRUD operations for NodeTags in the admin interface:

- Add NodeTagMove schema for moving tags between nodes
- Add PUT /nodes/{public_key}/tags/{key}/move API endpoint
- Add web routes at /a/node-tags for tag management
- Create admin templates with node selector and tag management UI
- Support editing, adding, moving, and deleting tags via API calls
- Add comprehensive tests for new functionality

The interface allows selecting a node from a dropdown, viewing its
tags, and performing all CRUD operations including moving a tag
to a different node without having to delete and recreate it.
2026-01-11 01:34:07 +00:00
Louis King
741dd3ce84 Initial admin commit 2026-01-11 00:42:57 +00:00
JingleManSweep
0a12f389df Merge pull request #62 from ipnet-mesh/feature/contact-gps
Store Node GPS Coordinates
2026-01-09 20:17:40 +00:00
Louis King
8240c2fd57 Initial commit 2026-01-09 20:07:36 +00:00
Louis King
38f7fe291e Add member filtering to map page using member_id tag
Change the map filter from matching nodes by public_key to using the
member_id tag system. Now populates the member dropdown with all members
from the database and filters nodes based on their member_id tag value.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-09 19:16:15 +00:00
JingleManSweep
e4087efbf0 Merge pull request #61 from ipnet-mesh/feature/ui-improvements
Remove SNR column from messages and add last seen to members
2026-01-08 21:25:03 +00:00
Louis King
3051984fb9 Remove SNR column from messages and add last seen to members
- Remove SNR column from messages list (no longer provided by meshcore library)
- Add relative "last seen" time to nodes on members page with tooltip
- Add populateRelativeTimeElements() utility for time elements
2026-01-08 21:23:14 +00:00
JingleManSweep
eea2c90ea4 Merge pull request #58 from ipnet-mesh/feature/ui-improvements
Add member/node filters, mobile card views, and pagination macro
2026-01-08 20:15:54 +00:00
Louis King
d52c23fc29 Add member/node filters, mobile card views, and pagination macro
- Add member_id filter to nodes and advertisements API endpoints
- Add member and node dropdowns to web list pages
- Implement responsive mobile card view for nodes and advertisements
- Extract pagination into reusable Jinja2 macro (_macros.html)
- Fix Python version in README (3.11+ -> 3.13+)
2026-01-08 20:13:49 +00:00
Louis King
a1fb71ce65 Add responsive mobile card view for messages page 2026-01-08 16:50:29 +00:00
JingleManSweep
6a5549081f Merge pull request #56 from ipnet-mesh/fix/receiver-contact-cleanup
Add contact cleanup to interface RECEIVER mode
2026-01-08 10:28:26 +00:00
Louis King
68e24ee886 Fix 2026-01-08 10:26:31 +00:00
Louis King
61d6b6287e Add contact cleanup to interface RECEIVER mode
- Add CONTACT_CLEANUP_ENABLED and CONTACT_CLEANUP_DAYS settings
- Implement remove_contact and schedule_remove_contact on device classes
- During contact sync, remove stale contacts from companion node
- Stale contacts (not advertised for > N days) not published to MQTT
- Update Python version to 3.13 across project config
- Remove brittle config tests that assumed default env values
2026-01-08 10:22:27 +00:00
Louis King
7007c84577 Updated screenshot 2025-12-08 23:45:22 +00:00
Louis King
fd928d9fea Updated diagrams 2025-12-08 23:40:52 +00:00
Louis King
68b6aa85cd Updated diagrams 2025-12-08 23:39:25 +00:00
Louis King
abbc07edb3 Updated diagrams 2025-12-08 23:37:13 +00:00
Louis King
b42add310e Updated diagrams 2025-12-08 23:36:13 +00:00
Louis King
98a5526e80 Updated diagrams 2025-12-08 23:34:28 +00:00
Louis King
db86b3198e Some minor UI improvements, updated env.example, and docs 2025-12-08 23:06:04 +00:00
Louis King
cd4f0b91dc Various UI improvements 2025-12-08 22:07:46 +00:00
Louis King
a290db0491 Updated chart stats 2025-12-08 19:37:45 +00:00
Louis King
92b0b883e6 More website improvements 2025-12-08 17:07:39 +00:00
Louis King
9e621c0029 Fixed test 2025-12-08 16:42:13 +00:00
Louis King
a251f3a09f Added map to node detail page, made title consistent with emoji 2025-12-08 16:37:53 +00:00
Louis King
0fdedfe5ba Tidied Advert/Node search 2025-12-08 16:22:08 +00:00
Louis King
243a3e8521 Added truncate CLI command 2025-12-08 15:54:32 +00:00
JingleManSweep
b24a6f0894 Merge pull request #54 from ipnet-mesh/feature/more-filters
Fixed Member model
2025-12-08 15:15:04 +00:00
Louis King
57f51c741c Fixed Member model 2025-12-08 15:13:24 +00:00
Louis King
65b8418af4 Fixed last seen issue 2025-12-08 00:15:25 +00:00
JingleManSweep
89ceee8741 Merge pull request #51 from ipnet-mesh/feat/sync-receiver-contacts-on-advert
Receiver nodes now sync contacts to MQTT on every advert received
2025-12-07 23:36:11 +00:00
Louis King
64ec1a7135 Receiver nodes now sync contacts to MQTT on every advert received 2025-12-07 23:34:33 +00:00
JingleManSweep
3d632a94b1 Merge pull request #50 from ipnet-mesh/feat/remove-friendly-name
Removed friendly name support and tidied tags
2025-12-07 23:03:39 +00:00
Louis King
fbd29ff78e Removed friendly name support and tidied tags 2025-12-07 23:02:19 +00:00
179 changed files with 20206 additions and 4082 deletions

103
.agentmap.yaml Normal file
View File

@@ -0,0 +1,103 @@
# MeshCore Hub — codebase orientation map
# See: https://github.com/anthropics/agentmap
meta:
project: meshcore-hub
version: 1
updated: "2026-02-27"
stack:
- python 3.13
- fastapi
- sqlalchemy (async)
- paho-mqtt
- click
- lit-html SPA
- tailwind + daisyui
- sqlite
tasks:
install: "pip install -e '.[dev]'"
test: "pytest"
run: "meshcore-hub api --reload"
lint: "pre-commit run --all-files"
tree:
src/meshcore_hub/:
__main__.py: "Click CLI entry point, registers subcommands"
common/:
config.py: "pydantic-settings, all env vars [config]"
database.py: "async SQLAlchemy session management"
mqtt.py: "MQTT client helpers"
i18n.py: "translation loader, t() function"
models/:
base.py: "Base, UUIDMixin, TimestampMixin"
node.py: null
member.py: null
advertisement.py: null
message.py: null
telemetry.py: null
node_tag.py: null
schemas/:
events.py: "inbound MQTT event schemas"
commands.py: "outbound command schemas"
nodes.py: "API request/response schemas"
members.py: null
messages.py: null
interface/:
receiver.py: "reads device events, publishes to MQTT"
sender.py: "subscribes MQTT commands, writes to device"
device.py: "meshcore library wrapper"
mock_device.py: "fake device for testing"
collector/:
subscriber.py: "MQTT subscriber, routes events to handlers"
handlers/: "per-event-type DB persistence"
cleanup.py: "data retention and node cleanup"
webhook.py: "forward events to HTTP endpoints"
tag_import.py: "seed node tags from YAML"
member_import.py: "seed members from YAML"
api/:
app.py: "FastAPI app factory"
auth.py: "API key authentication"
dependencies.py: "DI for db session and auth"
metrics.py: "Prometheus /metrics endpoint"
routes/: "REST endpoints per resource"
web/:
app.py: "FastAPI app factory, SPA shell"
pages.py: "custom markdown page loader"
middleware.py: null
templates/:
spa.html: "single Jinja2 shell template"
static/js/spa/:
app.js: "SPA entry, route registration"
router.js: "History API client-side router"
api.js: "fetch wrapper for API calls"
components.js: "shared lit-html helpers, t() re-export"
icons.js: "SVG icon functions"
pages/: "lazy-loaded page modules"
alembic/: "DB migrations"
etc/:
prometheus/: "Prometheus scrape + alert rules"
alertmanager/: null
seed/: "YAML seed data (node_tags, members)"
tests/:
key_symbols:
- src/meshcore_hub/__main__.py::cli — Click root group [entry-point]
- src/meshcore_hub/common/config.py::CommonSettings — shared env config base
- src/meshcore_hub/common/database.py::DatabaseManager — async session factory
- src/meshcore_hub/common/models/base.py::Base — declarative base for all models
- src/meshcore_hub/api/app.py::create_app — API FastAPI factory
- src/meshcore_hub/web/app.py::create_app — Web FastAPI factory
- src/meshcore_hub/api/auth.py::require_read — read-key auth dependency
- src/meshcore_hub/api/auth.py::require_admin — admin-key auth dependency
- src/meshcore_hub/collector/subscriber.py::MQTTSubscriber — event ingestion loop
- src/meshcore_hub/interface/receiver.py::Receiver — device→MQTT bridge
- src/meshcore_hub/interface/sender.py::Sender — MQTT→device bridge
conventions:
- four Click subcommands: interface, collector, api, web
- "MQTT topic pattern: {prefix}/{pubkey}/event/{name} and .../command/{name}"
- env config via pydantic-settings, no manual os.environ
- web SPA: ES modules + lit-html, pages export async render()
- i18n via t() with JSON locale files in static/locales/
- node tags are freeform key-value pairs, standard keys in AGENTS.md

View File

@@ -0,0 +1,60 @@
---
allowed-tools: Bash(gh label list:*),Bash(gh issue view:*),Bash(gh issue edit:*),Bash(gh search:*)
description: Apply labels to GitHub issues
---
You're an issue triage assistant for GitHub issues. Your task is to analyze the issue and select appropriate labels from the provided list.
IMPORTANT: Don't post any comments or messages to the issue. Your only action should be to apply labels.
Issue Information:
- REPO: ${{ github.repository }}
- ISSUE_NUMBER: ${{ github.event.issue.number }}
TASK OVERVIEW:
1. First, fetch the list of labels available in this repository by running: `gh label list`. Run exactly this command with nothing else.
2. Next, use gh commands to get context about the issue:
- Use `gh issue view ${{ github.event.issue.number }}` to retrieve the current issue's details
- Use `gh search issues` to find similar issues that might provide context for proper categorization
- You have access to these Bash commands:
- Bash(gh label list:\*) - to get available labels
- Bash(gh issue view:\*) - to view issue details
- Bash(gh issue edit:\*) - to apply labels to the issue
- Bash(gh search:\*) - to search for similar issues
3. Analyze the issue content, considering:
- The issue title and description
- The type of issue (bug report, feature request, question, etc.)
- Technical areas mentioned
- Severity or priority indicators
- User impact
- Components affected
4. Select appropriate labels from the available labels list provided above:
- Choose labels that accurately reflect the issue's nature
- Be specific but comprehensive
- IMPORTANT: Add a priority label (P1, P2, or P3) based on the label descriptions from gh label list
- Consider platform labels (android, ios) if applicable
- If you find similar issues using gh search, consider using a "duplicate" label if appropriate. Only do so if the issue is a duplicate of another OPEN issue.
5. Apply the selected labels:
- Use `gh issue edit` to apply your selected labels
- DO NOT post any comments explaining your decision
- DO NOT communicate directly with users
- If no labels are clearly applicable, do not apply any labels
IMPORTANT GUIDELINES:
- Be thorough in your analysis
- Only select labels from the provided list above
- DO NOT post any comments to the issue
- Your ONLY action should be to apply labels using gh issue edit
- It's okay to not add any labels if none are clearly applicable
---

View File

@@ -0,0 +1,44 @@
---
name: documentation
description: Audit and update project documentation to accurately reflect the current codebase. Use when documentation may be outdated, after significant code changes, or when the user asks to review or update docs.
---
# Documentation Audit
Audit and update all project documentation so it accurately reflects the current state of the codebase. Documentation must only describe features, options, configurations, and functionality that actually exist in the code.
## Files to Review
- **README.md** - Project overview, setup instructions, usage examples
- **AGENTS.md** - AI coding assistant guidelines, project structure, conventions
- **.env.example** - Example environment variables
Also check for substantial comments or inline instructions within the codebase that may be outdated.
## Process
1. **Read all documentation files** listed above in full.
2. **Cross-reference against the codebase.** For every documented item (features, env vars, CLI commands, routes, models, directory paths, conventions), search the code to verify:
- It actually exists.
- Its described behavior matches the implementation.
- File paths and directory structures are accurate.
3. **Identify and fix discrepancies:**
- **Version updates** — ensure documentation reflects any new/updated/removed versions. Check .python-version, pyproject.toml, etc.
- **Stale/legacy content** — documented but no longer in the code. Remove it.
- **Missing content** — exists in the code but not documented. Add it.
- **Inaccurate descriptions** — documented behavior doesn't match implementation. Correct it.
4. **Apply updates** to each file. Preserve existing style and structure.
5. **Verify consistency** across all documentation files — they must not contradict each other.
## Rules
- Do NOT invent features or options that don't exist in the code.
- Do NOT remove documentation for features that DO exist.
- Do NOT change the fundamental structure or style of the docs.
- Do NOT modify CLAUDE.md.
- Focus on accuracy, not cosmetic changes.
- When in doubt, check the source code.

View File

@@ -0,0 +1,49 @@
---
name: git-branch
description: Create a new branch from latest main with the project's naming convention (feat/fix/chore). Use when starting new work on a feature, bug fix, or chore.
---
# Git Branch
Create a new branch from the latest `main` branch using the project's naming convention.
## Arguments
The user may provide arguments in the format: `<type>/<description>`
- `type` — one of `feat`, `fix`, or `chore`
- `description` — short kebab-case description (e.g., `add-map-clustering`)
If not provided, ask the user for the branch type and description.
## Process
1. **Fetch latest main:**
```bash
git fetch origin main
```
2. **Determine branch name:**
- If the user provided arguments (e.g., `/git-branch feat/add-map-clustering`), use them directly.
- Otherwise, ask the user for:
- **Branch type**: `feat`, `fix`, or `chore`
- **Short description**: a brief kebab-case slug describing the work
- Construct the branch name as `{type}/{slug}` (e.g., `feat/add-map-clustering`).
3. **Create and switch to the new branch:**
```bash
git checkout -b {branch_name} origin/main
```
4. **Confirm** by reporting the new branch name to the user.
## Rules
- Branch names MUST follow the `{type}/{slug}` convention.
- Valid types are `feat`, `fix`, and `chore` only.
- The slug MUST be kebab-case (lowercase, hyphens, no spaces or underscores).
- Always branch from `origin/main`, never from the current branch.
- Do NOT push the branch — just create it locally.

View File

@@ -0,0 +1,94 @@
---
name: git-pr
description: Create a pull request to main from the current branch. Runs quality checks, commits changes, pushes, and opens a PR via gh CLI. Use when ready to submit work for review.
---
# Git PR
Create a pull request to `main` from the current feature branch.
## Process
### Phase 1: Pre-flight Checks
1. **Verify branch:**
```bash
git branch --show-current
```
- The current branch must NOT be `main`. If on `main`, tell the user to create a feature branch first (e.g., `/git-branch`).
2. **Check for uncommitted changes:**
```bash
git status
```
- If there are uncommitted changes, ask the user for a commit message and commit them using the `/git-commit` skill conventions (no Claude authoring details).
### Phase 2: Quality Checks
1. **Determine changed components** by comparing against `main`:
```bash
git diff --name-only main...HEAD
```
2. **Run targeted tests** based on changed files:
- `tests/test_web/` for web-only changes (templates, static JS, web routes)
- `tests/test_api/` for API changes
- `tests/test_collector/` for collector changes
- `tests/test_interface/` for interface/sender/receiver changes
- `tests/test_common/` for common models/schemas/config changes
- Run the full `pytest` if changes span multiple components
3. **Run pre-commit checks:**
```bash
pre-commit run --all-files
```
- If checks fail and auto-fix files, commit the fixes and re-run until clean.
4. If tests or checks fail and cannot be auto-fixed, report the issues to the user and stop.
### Phase 3: Push and Create PR
1. **Push the branch to origin:**
```bash
git push -u origin HEAD
```
2. **Generate PR content:**
- **Title**: Derive from the branch name. Convert `feat/add-map-clustering` to `Add map clustering`, `fix/login-error` to `Fix login error`, etc. Keep under 70 characters.
- **Body**: Generate a summary from the commit history:
```bash
git log main..HEAD --oneline
```
3. **Create the PR:**
```bash
gh pr create --title "{title}" --body "$(cat <<'EOF'
## Summary
{bullet points summarizing the changes}
## Test plan
{checklist of testing steps}
EOF
)"
```
4. **Return the PR URL** to the user.
## Rules
- Do NOT create a PR from `main`.
- Do NOT skip quality checks — tests and pre-commit must pass.
- Do NOT force-push.
- Always target `main` as the base branch.
- Keep the PR title concise (under 70 characters).
- If quality checks fail, fix issues or report to the user — do NOT create the PR with failing checks.

View File

@@ -0,0 +1,66 @@
---
name: quality
description: Run the full test suite, pre-commit checks, and re-run tests to ensure code quality. Fixes any issues found. Use after code changes, before commits, or when the user asks to check quality.
---
# Quality Check
Run the full quality pipeline: tests, pre-commit checks, and a verification test run. Fix any issues discovered at each stage.
## Prerequisites
Before running checks, ensure the environment is ready:
1. Check for `.venv` directory — create with `python -m venv .venv` if missing.
2. Activate the virtual environment: `source .venv/bin/activate`
3. Install dependencies: `pip install -e ".[dev]"`
## Process
### Phase 1: Initial Test Run
Run the full test suite to establish a baseline:
```bash
pytest
```
- If tests **pass**, proceed to Phase 2.
- If tests **fail**, investigate and fix the failures before continuing. Re-run the failing tests to confirm fixes. Then proceed to Phase 2.
### Phase 2: Pre-commit Checks
Run all pre-commit hooks against the entire codebase:
```bash
pre-commit run --all-files
```
- If all checks **pass**, proceed to Phase 3.
- If checks **fail**:
- Many hooks (black, trailing whitespace, end-of-file) auto-fix issues. Re-run `pre-commit run --all-files` to confirm auto-fixes resolved the issues.
- For remaining failures (flake8, mypy, etc.), investigate and fix manually.
- Re-run `pre-commit run --all-files` until all checks pass.
- Then proceed to Phase 3.
### Phase 3: Verification Test Run
Run the full test suite again to ensure pre-commit fixes (formatting, import sorting, etc.) haven't broken any functionality:
```bash
pytest
```
- If tests **pass**, the quality check is complete.
- If tests **fail**, the pre-commit fixes introduced a regression. Investigate and fix, then re-run both `pre-commit run --all-files` and `pytest` until both pass cleanly.
## Rules
- Always run the FULL test suite (`pytest`), not targeted tests.
- Always run pre-commit against ALL files (`--all-files`).
- Do NOT skip or ignore failing tests — investigate and fix them.
- Do NOT skip or ignore pre-commit failures — investigate and fix them.
- Do NOT modify test assertions to make tests pass unless the test is genuinely wrong.
- Do NOT disable pre-commit hooks or add noqa/type:ignore unless truly justified.
- Fix root causes, not symptoms.
- If a fix requires changes outside the scope of a simple quality fix (e.g., a design change), report it to the user rather than making the change silently.

View File

@@ -0,0 +1,114 @@
---
name: release
description: Full release workflow — quality gate, semantic version tag, push, and GitHub release. Use when ready to cut a new release from main.
---
# Release
Run the full release workflow: quality checks, version tagging, push, and GitHub release creation.
## Arguments
The user may optionally provide a version number (e.g., `/release 1.2.0`). If not provided, one will be suggested based on commit history.
## Process
### Phase 1: Pre-flight Checks
1. **Verify on `main` branch:**
```bash
git branch --show-current
```
- Must be on `main`. If not, tell the user to switch to `main` first.
2. **Verify working tree is clean:**
```bash
git status --porcelain
```
- If there are uncommitted changes, tell the user to commit or stash them first.
3. **Pull latest:**
```bash
git pull origin main
```
### Phase 2: Quality Gate
1. **Run full test suite:**
```bash
pytest
```
2. **Run pre-commit checks:**
```bash
pre-commit run --all-files
```
3. If either fails, report the issues and stop. Do NOT proceed with a release that has failing checks.
### Phase 3: Determine Version
1. **Get the latest tag:**
```bash
git describe --tags --abbrev=0 2>/dev/null || echo "none"
```
2. **List commits since last tag:**
```bash
git log {last_tag}..HEAD --oneline
```
If no previous tag exists, list the last 20 commits:
```bash
git log --oneline -20
```
3. **Determine next version:**
- If the user provided a version, use it.
- Otherwise, suggest a version based on commit prefixes:
- Any commit starting with `feat` or `Add`**minor** bump
- Only `fix` or `Fix` commits → **patch** bump
- If no previous tag, suggest `0.1.0`
- Present the suggestion and ask the user to confirm or provide a different version.
### Phase 4: Tag and Release
1. **Create annotated tag:**
```bash
git tag -a v{version} -m "Release v{version}"
```
2. **Push tag to origin:**
```bash
git push origin v{version}
```
3. **Create GitHub release:**
```bash
gh release create v{version} --title "v{version}" --generate-notes
```
4. **Report** the release URL to the user.
## Rules
- MUST be on `main` branch with a clean working tree.
- MUST pass all quality checks before tagging.
- Tags MUST follow the `v{major}.{minor}.{patch}` format (e.g., `v1.2.0`).
- Always create an annotated tag, not a lightweight tag.
- Always confirm the version with the user before tagging.
- Do NOT skip quality checks under any circumstances.
- Do NOT force-push tags.

View File

@@ -1,17 +1,40 @@
# MeshCore Hub - Docker Compose Environment Configuration
# MeshCore Hub - Environment Configuration
# Copy this file to .env and customize values
#
# Configuration is grouped by service. Most deployments only need:
# - Common Settings (always required)
# - MQTT Settings (always required)
# - Interface Settings (for receiver/sender services)
#
# The Collector, API, and Web services typically run as a combined "core"
# profile and share the same data directory.
#
# -----------------------------------------------------------------------------
# QUICK START: Receiver/Sender Only
# -----------------------------------------------------------------------------
# For a minimal receiver or sender setup, you only need these settings:
#
# MQTT_HOST=your-mqtt-broker.example.com
# MQTT_PORT=1883
# MQTT_USERNAME=your_username
# MQTT_PASSWORD=your_password
# MQTT_TLS=false
# SERIAL_PORT=/dev/ttyUSB0
#
# Serial ports are typically /dev/ttyUSB[0-9] or /dev/ttyACM[0-9] on Linux.
# -----------------------------------------------------------------------------
# ===================
# Docker Image
# ===================
# =============================================================================
# COMMON SETTINGS
# =============================================================================
# These settings apply to all services
# Docker image version tag to use
# Options: latest, main, v1.0.0, etc.
IMAGE_VERSION=latest
# ===================
# Data & Seed Directories
# ===================
# Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
LOG_LEVEL=INFO
# Base directory for runtime data (database, etc.)
# Default: ./data (relative to docker-compose.yml location)
@@ -19,7 +42,8 @@ IMAGE_VERSION=latest
#
# Structure:
# ${DATA_HOME}/
# └── meshcore.db # SQLite database
# └── collector/
# └── meshcore.db # SQLite database
DATA_HOME=./data
# Directory containing seed data files for import
@@ -32,43 +56,51 @@ DATA_HOME=./data
# └── members.yaml # Network members for import
SEED_HOME=./seed
# ===================
# Common Settings
# ===================
# =============================================================================
# MQTT SETTINGS
# =============================================================================
# MQTT broker connection settings for interface, collector, and API services
# Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
LOG_LEVEL=INFO
# ===================
# MQTT Settings
# ===================
# MQTT Broker connection (for interface/collector/api services)
# When using the local MQTT broker (--profile mqtt), use "mqtt" as host
# MQTT Broker host
# When using the local MQTT broker (--profile mqtt), use "mqtt"
# When using an external broker, set the hostname/IP
MQTT_HOST=mqtt
# MQTT Broker port (default: 1883, or 8883 for TLS)
MQTT_PORT=1883
# MQTT authentication (optional)
MQTT_USERNAME=
MQTT_PASSWORD=
# MQTT topic prefix for all MeshCore messages
MQTT_PREFIX=meshcore
# Enable TLS/SSL for MQTT connection (default: false)
# Enable TLS/SSL for MQTT connection
# When enabled, uses TLS with system CA certificates (e.g., for Let's Encrypt)
# Set to true for secure MQTT connections (port 8883)
MQTT_TLS=false
# MQTT transport protocol
# Options: tcp, websockets
MQTT_TRANSPORT=tcp
# MQTT WebSocket path (used only when MQTT_TRANSPORT=websockets)
# Common values: /mqtt, /
MQTT_WS_PATH=/mqtt
# External port mappings for local MQTT broker (--profile mqtt only)
MQTT_EXTERNAL_PORT=1883
MQTT_WS_PORT=9001
# ===================
# Interface Settings
# ===================
# =============================================================================
# INTERFACE SETTINGS (Receiver/Sender)
# =============================================================================
# Settings for the MeshCore device interface services
# Serial port for receiver device
SERIAL_PORT=/dev/ttyUSB0
# Serial port for sender device (if separate)
# Serial port for sender device (if using separate device)
SERIAL_PORT_SENDER=/dev/ttyUSB1
# Baud rate for serial communication
@@ -83,55 +115,56 @@ MESHCORE_DEVICE_NAME=
NODE_ADDRESS=
NODE_ADDRESS_SENDER=
# ===================
# API Settings
# ===================
# -------------------
# Contact Cleanup Settings (RECEIVER mode only)
# -------------------
# Automatic removal of stale contacts from the MeshCore companion node
# External API port
API_PORT=8000
# Enable automatic removal of stale contacts from companion node
CONTACT_CLEANUP_ENABLED=true
# API Keys for authentication (generate secure keys for production!)
# Example: openssl rand -hex 32
API_READ_KEY=
API_ADMIN_KEY=
# Remove contacts not advertised for this many days
CONTACT_CLEANUP_DAYS=7
# ===================
# Web Dashboard Settings
# ===================
# =============================================================================
# COLLECTOR SETTINGS
# =============================================================================
# The collector subscribes to MQTT events and stores them in the database
# External web port
WEB_PORT=8080
# Collector MQTT ingest mode
# - native: expects <prefix>/<pubkey>/event/<event_name> topics
# - letsmesh_upload: expects LetsMesh observer uploads on
# <prefix>/<pubkey>/(packets|status|internal)
COLLECTOR_INGEST_MODE=native
# Network Information (displayed on web dashboard)
NETWORK_NAME=MeshCore Network
NETWORK_CITY=
NETWORK_COUNTRY=
# LetsMesh decoder support (used only when COLLECTOR_INGEST_MODE=letsmesh_upload)
# Set to false to disable external packet decoding
COLLECTOR_LETSMESH_DECODER_ENABLED=true
# Radio configuration (comma-delimited)
# Format: <profile>,<frequency>,<bandwidth>,<spreading_factor>,<coding_rate>,<tx_power>
# Example: EU/UK Narrow,869.618MHz,62.5kHz,8,8,22dBm
NETWORK_RADIO_CONFIG=
# Decoder command (must be available in container PATH)
# Examples: meshcore-decoder, /usr/local/bin/meshcore-decoder, npx meshcore-decoder
COLLECTOR_LETSMESH_DECODER_COMMAND=meshcore-decoder
# Contact information
NETWORK_CONTACT_EMAIL=
NETWORK_CONTACT_DISCORD=
NETWORK_CONTACT_GITHUB=
# Optional: channel secret keys (comma or space separated) used to decrypt GroupText
# packets. This supports unlimited keys.
# Note: Public + #test keys are built into the collector code by default.
# To show friendly channel names in the web feed, use label=hex (example: bot=ABCDEF...).
# Without keys, encrypted packets cannot be shown as plaintext.
# COLLECTOR_LETSMESH_DECODER_KEYS=
# Welcome text displayed on the homepage (plain text, optional)
# If not set, a default welcome message is shown
NETWORK_WELCOME_TEXT=
# Timeout in seconds per decode invocation
COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS=2.0
# ===================
# -------------------
# Webhook Settings
# ===================
# -------------------
# Webhooks forward mesh events to external HTTP endpoints as POST requests
# Webhook for advertisement events (node discovery)
# Events are sent as POST requests with JSON payload
WEBHOOK_ADVERTISEMENT_URL=
WEBHOOK_ADVERTISEMENT_SECRET=
# Webhook for all message events (channel and direct messages)
# Use this for a single endpoint handling all messages
WEBHOOK_MESSAGE_URL=
WEBHOOK_MESSAGE_SECRET=
@@ -147,34 +180,161 @@ WEBHOOK_TIMEOUT=10.0
WEBHOOK_MAX_RETRIES=3
WEBHOOK_RETRY_BACKOFF=2.0
# ===================
# -------------------
# Data Retention Settings
# ===================
# -------------------
# Automatic cleanup of old event data (advertisements, messages, telemetry, etc.)
# Enable automatic cleanup of old event data
# When enabled, the collector runs periodic cleanup to delete old events
# Default: true
DATA_RETENTION_ENABLED=true
# Number of days to retain event data (advertisements, messages, telemetry, etc.)
# Number of days to retain event data
# Events older than this are deleted during cleanup
# Default: 30 days
DATA_RETENTION_DAYS=30
# Hours between automatic cleanup runs (applies to both events and nodes)
# Default: 24 hours (once per day)
# Hours between automatic cleanup runs
# Applies to both event data and node cleanup
DATA_RETENTION_INTERVAL_HOURS=24
# ===================
# -------------------
# Node Cleanup Settings
# ===================
# -------------------
# Automatic removal of inactive nodes
# Enable automatic cleanup of inactive nodes
# Nodes that haven't been seen (last_seen) for the specified period are removed
# Nodes with last_seen=NULL (never seen on network) are NOT removed
# Default: true
NODE_CLEANUP_ENABLED=true
# Remove nodes not seen for this many days (based on last_seen field)
# Default: 7 days
NODE_CLEANUP_DAYS=7
# =============================================================================
# API SETTINGS
# =============================================================================
# REST API for querying data and sending commands
# External API port
API_PORT=8000
# API Keys for authentication
# Generate secure keys for production: openssl rand -hex 32
# Leave empty to disable authentication (not recommended for production)
API_READ_KEY=
API_ADMIN_KEY=
# -------------------
# Prometheus Metrics
# -------------------
# Prometheus metrics endpoint exposed at /metrics on the API service
# Enable Prometheus metrics endpoint
# Default: true
METRICS_ENABLED=true
# Seconds to cache metrics output (reduces database load)
# Default: 60
METRICS_CACHE_TTL=60
# External Prometheus port (when using --profile metrics)
PROMETHEUS_PORT=9090
# External Alertmanager port (when using --profile metrics)
ALERTMANAGER_PORT=9093
# =============================================================================
# WEB DASHBOARD SETTINGS
# =============================================================================
# Web interface for visualizing network status
# External web port
WEB_PORT=8080
# API endpoint URL for the web dashboard
# Default: http://localhost:8000
# API_BASE_URL=http://localhost:8000
# API key for web dashboard queries (optional)
# If API_READ_KEY is set on the API, provide it here
# API_KEY=
# Default theme for the web dashboard (dark or light)
# Users can override via the theme toggle; their preference is saved in localStorage
# Default: dark
# WEB_THEME=dark
# Locale/language for the web dashboard
# Default: en
# Supported: en (see src/meshcore_hub/web/static/locales/ for available translations)
# WEB_LOCALE=en
# Locale used for date/time formatting in the web dashboard
# Controls date ordering only; 24-hour clock is still used by default
# Examples: en-US (MM/DD/YYYY), en-GB (DD/MM/YYYY)
# Default: en-US
# WEB_DATETIME_LOCALE=en-US
# Auto-refresh interval in seconds for list pages (nodes, advertisements, messages)
# Set to 0 to disable auto-refresh
# Default: 30
# WEB_AUTO_REFRESH_SECONDS=30
# Enable admin interface at /a/ (requires auth proxy in front)
# Default: false
# WEB_ADMIN_ENABLED=false
# Timezone for displaying dates/times on the web dashboard
# Uses standard IANA timezone names (e.g., America/New_York, Europe/London)
# Default: UTC
TZ=UTC
# Directory containing custom content (pages/, media/)
# Default: ./content
# CONTENT_HOME=./content
# -------------------
# Network Information
# -------------------
# Displayed on the web dashboard homepage
# Network domain name (optional)
# NETWORK_DOMAIN=
# Network display name
NETWORK_NAME=MeshCore Network
# Network location
NETWORK_CITY=
NETWORK_COUNTRY=
# Radio configuration (comma-delimited)
# Format: <profile>,<frequency>,<bandwidth>,<spreading_factor>,<coding_rate>,<tx_power>
# Example: EU/UK Narrow,869.618MHz,62.5kHz,SF8,CR8,22dBm
NETWORK_RADIO_CONFIG=
# Welcome text displayed on the homepage (optional, plain text)
# If not set, a default welcome message is shown
NETWORK_WELCOME_TEXT=
# -------------------
# Feature Flags
# -------------------
# Control which pages are visible in the web dashboard
# Set to false to completely hide a page (nav, routes, sitemap, robots.txt)
# FEATURE_DASHBOARD=true
# FEATURE_NODES=true
# FEATURE_ADVERTISEMENTS=true
# FEATURE_MESSAGES=true
# FEATURE_MAP=true
# FEATURE_MEMBERS=true
# FEATURE_PAGES=true
# -------------------
# Contact Information
# -------------------
# Contact links displayed in the footer
NETWORK_CONTACT_EMAIL=
NETWORK_CONTACT_DISCORD=
NETWORK_CONTACT_GITHUB=
NETWORK_CONTACT_YOUTUBE=

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
buy_me_a_coffee: jinglemansweep

View File

@@ -5,49 +5,40 @@ on:
branches: [main]
pull_request:
branches: [main]
paths:
- "src/**"
- "tests/**"
- "alembic/**"
- ".python-version"
- "pyproject.toml"
- ".pre-commit-config.yaml"
- ".github/workflows/**"
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: "3.11"
python-version-file: ".python-version"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install black flake8 mypy
pip install -e ".[dev]"
- name: Check formatting with black
run: black --check src/ tests/
- name: Lint with flake8
run: flake8 src/ tests/
- name: Type check with mypy
run: mypy src/
- name: Run pre-commit
uses: pre-commit/action@v3.0.1
test:
name: Test (Python ${{ matrix.python-version }})
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.11"]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
python-version-file: ".python-version"
- name: Install dependencies
run: |
@@ -56,27 +47,35 @@ jobs:
- name: Run tests with pytest
run: |
pytest --cov=meshcore_hub --cov-report=xml --cov-report=term-missing
pytest --cov=meshcore_hub --cov-report=xml --cov-report=term-missing --junitxml=junit.xml -o junit_family=legacy
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
if: matrix.python-version == '3.11'
uses: codecov/codecov-action@v5
if: always()
with:
files: ./coverage.xml
fail_ci_if_error: false
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
uses: codecov/codecov-action@v5
if: ${{ !cancelled() }}
with:
report_type: test_results
token: ${{ secrets.CODECOV_TOKEN }}
build:
name: Build Package
runs-on: ubuntu-latest
needs: [lint, test]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version: "3.11"
python-version-file: ".python-version"
- name: Install build tools
run: |
@@ -87,7 +86,7 @@ jobs:
run: python -m build
- name: Upload artifacts
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: dist
path: dist/

View File

@@ -1,49 +0,0 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
actions: read # Required for Claude to read CI results on PRs
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
# This is an optional setting that allows Claude to read CI results on PRs
additional_permissions: |
actions: read
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
# prompt: 'Update the pull request description to include a summary of changes.'
# Optional: Add claude_args to customize behavior and configuration
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
# claude_args: '--allowed-tools Bash(gh pr:*)'

View File

@@ -3,6 +3,15 @@ name: Docker
on:
push:
branches: [main]
paths:
- "src/**"
- "alembic/**"
- "alembic.ini"
- ".python-version"
- "pyproject.toml"
- "Dockerfile"
- "docker-compose.yml"
- ".github/workflows/**"
tags:
- "v*"
@@ -19,17 +28,17 @@ jobs:
packages: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v4
- name: Log in to Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
uses: docker/login-action@v4
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@@ -37,7 +46,7 @@ jobs:
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
uses: docker/metadata-action@v6
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
@@ -48,7 +57,7 @@ jobs:
type=sha
- name: Build and push Docker image
uses: docker/build-push-action@v5
uses: docker/build-push-action@v7
with:
context: .
file: Dockerfile

1
.gitignore vendored
View File

@@ -3,6 +3,7 @@
!example/data/
/seed/
!example/seed/
/content/
# Byte-compiled / optimized / DLL files
__pycache__/

View File

@@ -0,0 +1,100 @@
## TASK-001: Remove legacy HTML dashboard endpoint
**Status:** completed
### Files Modified
- `src/meshcore_hub/api/routes/dashboard.py`
- `tests/test_api/test_dashboard.py`
### Notes
Removed the `dashboard()` route handler and its `@router.get("")` decorator. Removed `HTMLResponse` and `Request` imports no longer used. Updated existing tests to verify the HTML endpoint returns 404/405. All JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) remain intact.
---
## TASK-002: Replace API key comparisons with constant-time comparison
**Status:** completed
### Files Modified
- `src/meshcore_hub/api/auth.py`
- `src/meshcore_hub/api/metrics.py`
### Notes
Added `import hmac` to both files. Replaced `==` comparisons with `hmac.compare_digest()` in `require_read`, `require_admin`, and `verify_basic_auth`. Added truthiness guards for `read_key`/`admin_key` in `require_read` since either can be `None` and `hmac.compare_digest()` raises `TypeError` on `None`.
---
## TASK-003: Add WEB_TRUSTED_PROXY_HOSTS configuration setting
**Status:** completed
### Files Modified
- `src/meshcore_hub/common/config.py`
### Notes
Added `web_trusted_proxy_hosts: str = Field(default="*", ...)` to `WebSettings` class. Automatically configurable via `WEB_TRUSTED_PROXY_HOSTS` env var through Pydantic Settings.
---
## TASK-004: Integrate trusted proxy hosts into web app middleware and add startup warning
**Status:** completed
### Files Modified
- `src/meshcore_hub/web/app.py`
### Notes
Replaced hardcoded `trusted_hosts="*"` in `ProxyHeadersMiddleware` with configured value. If value is `"*"`, passes string directly; otherwise splits on commas. Added startup warning when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS="*"`. `_is_authenticated_proxy_request` unchanged.
---
## TASK-005: Escape config JSON in template script block to prevent XSS breakout
**Status:** completed
### Files Modified
- `src/meshcore_hub/web/app.py`
### Notes
Added `.replace("</", "<\\/")` to `_build_config_json` return value. Prevents `</script>` breakout in the Jinja2 template's `<script>` block. `<\/` is valid JSON per spec and parsed correctly by `JSON.parse()`.
---
## TASK-006: Fix stored XSS in admin node-tags page
**Status:** completed
### Files Modified
- `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js`
### Notes
Added `escapeHtml` to imports. Escaped `nodeName` with `escapeHtml()` in copy-all and delete-all confirmation dialogs (2 `unsafeHTML()` calls). Escaped `activeTagKey` with `escapeHtml()` in single tag delete confirmation (`innerHTML` assignment). Translation template `<strong>` tags preserved.
---
## TASK-007: Fix stored XSS in admin members page
**Status:** completed
### Files Modified
- `src/meshcore_hub/web/static/js/spa/pages/admin/members.js`
### Notes
Added `escapeHtml` to imports. Escaped `memberName` with `escapeHtml()` before passing to `t()` in delete confirmation dialog. `innerHTML` retained for `<strong>` tag rendering from translation template.
---
## TASK-008: Write tests for legacy dashboard endpoint removal
**Status:** completed
### Files Modified
- `tests/test_api/test_dashboard.py`
### Notes
Added 5 new tests: 1 for trailing-slash 404/405 verification, 4 for authenticated JSON sub-route responses. Total 20 dashboard tests passing.
---
## TASK-009: Write tests for constant-time API key comparison
**Status:** completed
### Files Modified
- `tests/test_api/test_auth.py`
### Notes
Restructured from 10 tests (2 classes) to 22 tests (4 classes): `TestReadAuthentication` (9), `TestAdminAuthentication` (4), `TestMetricsAuthentication` (7), `TestHealthEndpoint` (2). Added coverage for multi-endpoint read/admin key acceptance, missing auth header rejection, and metrics credential validation.
---
## TASK-010: Write tests for trusted proxy hosts configuration and startup warning
**Status:** completed
### Files Modified
- `tests/test_common/test_config.py`
- `tests/test_web/test_app.py`
### Notes
Added 3 config tests (default value, specific IP, comma-separated list) and 5 web app tests (warning logged with wildcard+admin, no warning with specific hosts, no warning with admin disabled, comma list parsing, wildcard passed as string).
---
## TASK-011: Write tests for config JSON script block escaping
**Status:** completed
### Files Created
- `tests/test_web/test_app.py`
### Notes
Added 5 tests in `TestConfigJsonXssEscaping` class: rendered HTML escaping, normal values unaffected, escaped JSON parseable, direct `_build_config_json` escaping, direct no-escaping-needed.
---
## TASK-012: Update documentation for WEB_TRUSTED_PROXY_HOSTS setting
**Status:** completed
### Files Modified
- `README.md`
- `AGENTS.md`
- `PLAN.md`
### Notes
Added `WEB_TRUSTED_PROXY_HOSTS` to environment variables sections in all three docs. Documented default value (`*`), production recommendation, and startup warning behavior.
---

View File

@@ -0,0 +1,162 @@
# Product Requirements Document
> Source: `.plans/2026/03/09/01-security-fixes/prompt.md`
## Project Overview
This project addresses CRITICAL and HIGH severity vulnerabilities identified in a security audit of MeshCore Hub. The fixes span stored XSS in server-rendered and client-side code, timing attacks on authentication, proxy header forgery, and a legacy endpoint with missing authentication. All changes must be backward-compatible and preserve existing API contracts.
## Goals
- Eliminate all CRITICAL and HIGH severity security vulnerabilities found in the audit
- Harden API key comparison against timing side-channel attacks
- Prevent XSS vectors in both Jinja2 templates and client-side JavaScript
- Add configurable proxy trust to defend against header forgery while maintaining backward compatibility
- Remove the redundant legacy HTML dashboard endpoint that lacks authentication
## Functional Requirements
### REQ-001: Remove legacy HTML dashboard endpoint
**Description:** Remove the `GET /api/v1/dashboard/` route handler that renders a standalone HTML page with unescaped database content (stored XSS) and no authentication. The JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) must remain intact and unchanged.
**Acceptance Criteria:**
- [ ] The `dashboard()` route handler in `api/routes/dashboard.py` is removed
- [ ] The `HTMLResponse` import is removed (if no longer used)
- [ ] `GET /api/v1/dashboard/` returns 404 or Method Not Allowed
- [ ] `GET /api/v1/dashboard/stats` continues to return valid JSON with authentication
- [ ] `GET /api/v1/dashboard/activity` continues to return valid JSON with authentication
- [ ] `GET /api/v1/dashboard/message-activity` continues to return valid JSON with authentication
- [ ] `GET /api/v1/dashboard/node-count` continues to return valid JSON with authentication
- [ ] Existing API tests for JSON sub-routes still pass
### REQ-002: Use constant-time comparison for API key validation
**Description:** Replace all Python `==` comparisons of API keys and credentials with `hmac.compare_digest()` to prevent timing side-channel attacks that could leak key material.
**Acceptance Criteria:**
- [ ] All API key comparisons in `api/auth.py` use `hmac.compare_digest()` instead of `==`
- [ ] All credential comparisons in `api/metrics.py` use `hmac.compare_digest()` instead of `==`
- [ ] `hmac` is imported in all files where secret comparison occurs
- [ ] The authentication behavior is unchanged — valid keys are accepted, invalid keys are rejected
- [ ] Tests confirm authentication still works correctly with valid and invalid keys
### REQ-003: Add configurable trusted proxy hosts for admin authentication
**Description:** Add a `WEB_TRUSTED_PROXY_HOSTS` configuration setting that controls which hosts are trusted for proxy authentication headers (`X-Forwarded-User`, `X-Auth-Request-User`, `Authorization: Basic`). The setting defaults to `*` for backward compatibility. A startup warning is emitted when admin is enabled with the wildcard default. The `Authorization: Basic` header check must be preserved for Nginx Proxy Manager compatibility.
**Acceptance Criteria:**
- [ ] A `WEB_TRUSTED_PROXY_HOSTS` setting is added to the configuration (Pydantic Settings)
- [ ] The setting defaults to `*` (backward compatible)
- [ ] `ProxyHeadersMiddleware` uses the configured `trusted_hosts` value instead of hardcoded `*`
- [ ] A warning is logged at startup when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS` is `*`
- [ ] The warning message recommends restricting trusted hosts to the operator's proxy IP
- [ ] The `_is_authenticated_proxy_request` function continues to accept `X-Forwarded-User`, `X-Auth-Request-User`, and `Authorization: Basic` headers
- [ ] OAuth2 proxy setups continue to function correctly
- [ ] Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP restricts proxy header trust to that IP
### REQ-004: Escape config JSON in template script block
**Description:** Prevent XSS via `</script>` breakout in the `config_json|safe` template injection by escaping `</` sequences in the serialized JSON string before passing it to the Jinja2 template.
**Acceptance Criteria:**
- [ ] `config_json` is escaped by replacing `</` with `<\\/` before template rendering (in `web/app.py`)
- [ ] The `|safe` filter continues to be used (the escaping happens in Python, not Jinja2)
- [ ] A config value containing `</script><script>alert(1)</script>` does not execute JavaScript
- [ ] The SPA application correctly parses the escaped config JSON on the client side
- [ ] Normal config values (without special characters) render unchanged
### REQ-005: Fix stored XSS in admin page JavaScript
**Description:** Sanitize API-sourced data (node names, tag keys, member names) before rendering in admin pages. Replace `unsafeHTML()` and direct `innerHTML` assignment with safe alternatives — either `escapeHtml()` (already available in `components.js`) or lit-html safe templating (`${value}` interpolation without `unsafeHTML`).
**Acceptance Criteria:**
- [ ] Node names in `admin/node-tags.js` are escaped or safely templated before HTML rendering
- [ ] Tag keys in `admin/node-tags.js` are escaped or safely templated before HTML rendering
- [ ] Member names in `admin/members.js` are escaped or safely templated before HTML rendering
- [ ] All `unsafeHTML()` calls on API-sourced data in the identified files are replaced with safe alternatives
- [ ] All direct `innerHTML` assignments of API-sourced data in the identified files are replaced with safe alternatives
- [ ] A node name containing `<img src=x onerror=alert(1)>` renders as text, not as an HTML element
- [ ] A member name containing `<script>alert(1)</script>` renders as text, not as executable script
- [ ] Normal names (without special characters) continue to display correctly
## Non-Functional Requirements
### REQ-006: Backward compatibility
**Category:** Reliability
**Description:** All security fixes must maintain backward compatibility with existing deployments. No breaking changes to API contracts, configuration defaults, or deployment workflows.
**Acceptance Criteria:**
- [ ] All existing API endpoints (except the removed HTML dashboard) return the same response format
- [ ] Default configuration values preserve existing behavior without requiring operator action
- [ ] Docker Compose deployments continue to function without configuration changes
- [ ] All existing tests pass after the security fixes are applied
### REQ-007: No regression in authentication flows
**Category:** Security
**Description:** The security hardening must not introduce authentication regressions. Valid credentials must continue to be accepted, and invalid credentials must continue to be rejected, across all authentication methods.
**Acceptance Criteria:**
- [ ] API read key authentication accepts valid keys and rejects invalid keys
- [ ] API admin key authentication accepts valid keys and rejects invalid keys
- [ ] Metrics endpoint authentication (if configured) accepts valid credentials and rejects invalid ones
- [ ] Proxy header authentication continues to work with OAuth2 proxy setups
- [ ] Basic auth header forwarding from Nginx Proxy Manager continues to work
## Technical Constraints and Assumptions
### Constraints
- Python 3.13+ (specified by project `.python-version`)
- Must use `hmac.compare_digest()` from the Python standard library for constant-time comparison
- The `Authorization: Basic` header check in `_is_authenticated_proxy_request` must not be removed or modified to validate credentials server-side — credential validation is the proxy's responsibility
- Changes must not alter existing API response schemas or status codes (except removing the HTML dashboard endpoint)
### Assumptions
- The `escapeHtml()` utility in `components.js` correctly escapes `<`, `>`, `&`, `"`, and `'` characters
- The SPA client-side JavaScript can parse JSON containing escaped `<\/` sequences (standard behavior per JSON spec)
- Operators using proxy authentication have a reverse proxy (e.g., Nginx, Traefik, NPM) in front of MeshCore Hub
## Scope
### In Scope
- Removing the legacy HTML dashboard route handler (C1 + H2)
- Replacing `==` with `hmac.compare_digest()` for all secret comparisons (H1)
- Adding `WEB_TRUSTED_PROXY_HOSTS` configuration and startup warning (H3)
- Escaping `</` in config JSON template injection (H4)
- Fixing `unsafeHTML()`/`innerHTML` XSS in admin JavaScript pages (H5)
- Updating tests to cover the security fixes
- Updating documentation for the new `WEB_TRUSTED_PROXY_HOSTS` setting
### Out of Scope
- MEDIUM severity findings (CORS, error detail leakage, rate limiting, security headers, CSRF, CDN SRI, markdown sanitization, input validation, channel key exposure)
- LOW severity findings (auth warnings, version disclosure, unbounded fields, credential logging, SecretStr, port exposure, cache safety, image pinning)
- INFO findings (OpenAPI docs, proxy IP logging, alertmanager comments, DOM XSS in error handler, locale path)
- Adding rate limiting infrastructure
- Adding Content-Security-Policy or other security headers
- Dependency version pinning or lockfile generation
- Server-side credential validation for Basic auth (proxy responsibility)
## Suggested Tech Stack
| Layer | Technology | Rationale |
|-------|-----------|-----------|
| Secret comparison | `hmac.compare_digest()` (stdlib) | Specified by prompt; constant-time comparison prevents timing attacks |
| Template escaping | Python `str.replace()` | Minimal approach to escape `</` in JSON before Jinja2 rendering |
| Client-side escaping | `escapeHtml()` from `components.js` | Already available in the codebase; standard HTML entity escaping |
| Configuration | Pydantic Settings | Specified by project stack; used for `WEB_TRUSTED_PROXY_HOSTS` |
| Testing | pytest, pytest-asyncio | Specified by project stack |

View File

@@ -0,0 +1,65 @@
# Phase: 01-security-fixes
## Overview
Address CRITICAL and HIGH severity vulnerabilities identified in the MeshCore Hub security audit across API and Web components. These findings represent exploitable vulnerabilities including XSS, timing attacks, authentication bypasses, and insecure defaults.
## Goals
- Eliminate all CRITICAL and HIGH severity security vulnerabilities
- Harden authentication mechanisms against timing attacks and header forgery
- Prevent XSS vectors in both server-rendered HTML and client-side JavaScript
- Secure default MQTT configuration against unauthenticated access
## Requirements
### C1 + H2 — Remove legacy HTML dashboard endpoint
- **File:** `src/meshcore_hub/api/routes/dashboard.py:367-536`
- The `GET /api/v1/dashboard/` endpoint is a standalone HTML page with two CRITICAL/HIGH issues: stored XSS (unescaped DB content in f-string HTML) and missing authentication
- The SPA web dashboard provides a full-featured replacement, making this endpoint redundant
- **Fix:** Remove the `dashboard()` route handler and its `HTMLResponse` import. Keep all JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`) intact.
### H1 — Fix timing attack on API key comparison
- **Files:** `api/auth.py:82,127` | `api/metrics.py:57`
- All secret comparisons use Python `==`, which is not constant-time
- **Fix:** Replace with `hmac.compare_digest()` for all key/credential comparisons
### H3 — Harden admin auth against proxy header forgery
- **File:** `web/app.py:73-86,239`
- Admin access trusts `X-Forwarded-User`, `X-Auth-Request-User`, or `Authorization: Basic` header
- `ProxyHeadersMiddleware(trusted_hosts="*")` accepts forged headers from any client
- The `Authorization: Basic` check must be preserved — it is required by the Nginx Proxy Manager (NPM) Access List setup documented in README.md (NPM validates credentials and forwards the header)
- **Fix:** Add a `WEB_TRUSTED_PROXY_HOSTS` config setting (default `*` for backward compatibility). Pass it to `ProxyHeadersMiddleware(trusted_hosts=...)`. Add a startup warning when `WEB_ADMIN_ENABLED=true` and `trusted_hosts` is still `*`, recommending operators restrict it to their proxy IP. Do NOT remove the Basic auth header check or validate credentials server-side — that is the proxy's responsibility.
### H4 — Fix XSS via config_json|safe script block breakout
- **File:** `web/templates/spa.html:188` | `web/app.py:157-183`
- Operator config values injected into `<script>` block with `|safe` — a value containing `</script>` breaks out and executes arbitrary JS
- **Fix:** Escape `</` sequences in the JSON string: `config_json = json.dumps(config).replace("</", "<\\/")`
### H5 — Fix stored XSS via unsafeHTML/innerHTML with API-sourced data
- **Files:** `web/static/js/spa/pages/admin/node-tags.js:243,272,454` | `admin/members.js:309`
- Node names, tag keys, and member names from the API are interpolated into HTML via `unsafeHTML()` and direct `innerHTML` assignment
- **Fix:** Use `escapeHtml()` (already in `components.js`) on API data before HTML interpolation, or replace with lit-html safe templating
## Constraints
- Must not break existing functionality or API contracts
- Changes to docker-compose.yml and mosquitto.conf must remain backward-compatible (use env var defaults)
- The `_is_authenticated_proxy_request` function must continue to work with OAuth2 proxy setups — only add defense-in-depth, don't remove proxy header support entirely
## Out of Scope
- MEDIUM severity findings (CORS config, error detail leakage, rate limiting, security headers, CSRF, CDN SRI, markdown sanitization, input validation, channel key exposure)
- LOW severity findings (auth warnings, version disclosure, unbounded fields, credential logging, SecretStr, port exposure, cache safety, image pinning)
- INFO findings (OpenAPI docs, proxy IP logging, alertmanager comments, DOM XSS in error handler, locale path)
- Adding rate limiting infrastructure
- Adding Content-Security-Policy or other security headers
- Dependency version pinning or lockfile generation
## References
- Security audit performed in this conversation (2026-03-09)
- OWASP Top 10: XSS (A7:2017), Broken Authentication (A2:2017)
- Python `hmac.compare_digest` documentation
- FastAPI security best practices

View File

@@ -0,0 +1,54 @@
# Code review round 001
# Phase: .plans/2026/03/09/01-security-fixes
# Scope: full
# Generated by: /jp-codereview
issues:
- id: "ISSUE-001"
severity: "MINOR"
category: "integration"
file: "src/meshcore_hub/web/app.py"
line: 251
description: |
The startup warning for insecure trusted proxy hosts checks `settings.web_admin_enabled`
instead of the effective admin_enabled value that gets stored in `app.state.admin_enabled`.
The `create_app()` function accepts an `admin_enabled` parameter (line 193) that can override
the setting. If a caller passes `admin_enabled=True` but `settings.web_admin_enabled` is False,
the warning will not fire despite admin being enabled. In practice this does not affect production
deployments (CLI always uses the settings value), only programmatic/test usage.
suggestion: |
Consider computing the effective admin_enabled value before the warning check and using
that for both the warning and `app.state.admin_enabled`, e.g.:
`effective_admin = admin_enabled if admin_enabled is not None else settings.web_admin_enabled`
related_tasks:
- "TASK-004"
- id: "ISSUE-002"
severity: "MINOR"
category: "style"
file: "src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js"
line: 3
description: |
The `unsafeHTML` import is retained and still used on lines 243 and 272. Although the
API-sourced data (`nodeName`) is now safely escaped via `escapeHtml()` before interpolation,
the continued use of `unsafeHTML()` may confuse future reviewers into thinking the XSS
fix is incomplete. The `unsafeHTML()` is needed to render the translation template's HTML
tags (e.g., `<strong>`), so this is functionally correct.
suggestion: |
Add a brief inline comment above each `unsafeHTML()` call explaining that the dynamic
values are pre-escaped and `unsafeHTML()` is only needed for the template's HTML formatting.
related_tasks:
- "TASK-006"
summary:
total_issues: 2
critical: 0
major: 0
minor: 2
by_category:
integration: 1
architecture: 0
security: 0
duplication: 0
error-handling: 0
style: 1

View File

@@ -0,0 +1,70 @@
# PRD Review
> Phase: `.plans/2026/03/09/01-security-fixes`
> PRD: `.plans/2026/03/09/01-security-fixes/prd.md`
> Prompt: `.plans/2026/03/09/01-security-fixes/prompt.md`
## Verdict: PASS
The PRD fully covers all five security requirements from the prompt with clear, implementable, and testable acceptance criteria. No contradictions, blocking ambiguities, or feasibility concerns were found. One prompt goal ("Secure default MQTT configuration") has no corresponding requirement in either the prompt or the PRD, but since no prompt requirement addresses it, the PRD correctly does not fabricate one.
## Coverage Assessment
| Prompt Item | PRD Section | Covered? | Notes |
|---|---|---|---|
| C1+H2: Remove legacy HTML dashboard endpoint | REQ-001 | Yes | Route removal, import cleanup, sub-route preservation all specified |
| H1: Fix timing attack on API key comparison | REQ-002 | Yes | Files and `hmac.compare_digest()` approach match |
| H3: Harden admin auth / proxy header forgery | REQ-003 | Yes | Config setting, default, warning, Basic auth preservation all covered |
| H4: Fix XSS via config_json\|safe breakout | REQ-004 | Yes | Escape approach and XSS test payload specified |
| H5: Fix stored XSS via unsafeHTML/innerHTML | REQ-005 | Yes | Files, fix approach, and XSS test payloads specified |
| Constraint: No breaking changes to API contracts | REQ-006 | Yes | |
| Constraint: docker-compose.yml/mosquitto.conf backward-compatible | REQ-006 | Partial | REQ-006 covers Docker Compose but not mosquitto.conf; moot since no requirement changes mosquitto.conf |
| Constraint: _is_authenticated_proxy_request works with OAuth2 | REQ-003, REQ-007 | Yes | |
| Goal: Secure default MQTT configuration | -- | No | Goal stated in prompt but no prompt requirement addresses it; PRD correctly does not fabricate one |
| Out of scope items | Scope section | Yes | All exclusions match prompt |
**Coverage summary:** 5 of 5 prompt requirements fully covered, 1 constraint partially covered (moot), 1 prompt goal has no corresponding requirement in the prompt itself.
## Requirement Evaluation
All requirements passed evaluation. Minor observations noted below.
### REQ-003: Add configurable trusted proxy hosts
- **Implementability:** Pass -- A developer familiar with Pydantic Settings and `ProxyHeadersMiddleware` can implement this without ambiguity. The env var format (comma-separated list vs. single value) is not explicitly stated but follows standard Pydantic patterns.
- **Testability:** Pass
- **Completeness:** Pass
- **Consistency:** Pass
### REQ-006: Backward compatibility
- **Implementability:** Pass
- **Testability:** Pass
- **Completeness:** Pass -- The prompt constraint about mosquitto.conf backward compatibility is not explicitly mentioned, but no requirement modifies mosquitto.conf, making this moot.
- **Consistency:** Pass
## Structural Issues
### Contradictions
None found.
### Ambiguities
None that would block implementation. The `WEB_TRUSTED_PROXY_HOSTS` env var format is a minor detail resolvable by the developer from the `ProxyHeadersMiddleware` API and standard Pydantic Settings patterns.
### Missing Edge Cases
None significant. The `hmac.compare_digest()` change (REQ-002) assumes the existing code handles the "no key configured" case before reaching the comparison, which is standard practice and verifiable during implementation.
### Feasibility Concerns
None.
### Scope Inconsistencies
The prompt states a goal of "Secure default MQTT configuration against unauthenticated access" but provides no requirement for it. The PRD drops this goal without explanation. This is a prompt-level gap, not a PRD-level gap -- the PRD should not invent requirements that the prompt does not specify.
## Action Items
No action items. The PRD is ready for task breakdown.

View File

@@ -0,0 +1,90 @@
# Task Review
> Phase: `.plans/2026/03/09/01-security-fixes`
> Tasks: `.plans/2026/03/09/01-security-fixes/tasks.yaml`
> PRD: `.plans/2026/03/09/01-security-fixes/prd.md`
## Verdict: PASS
The task list is structurally sound, correctly ordered, and fully covers all 7 PRD requirements. The dependency graph is a valid DAG with no cycles or invalid references. No ordering issues, coverage gaps, vague tasks, or invalid fields were found. Two non-blocking warnings are noted: TASK-006 and TASK-007 (frontend XSS fixes) lack corresponding test tasks, and two pairs of independent tasks share output files but modify independent sections.
## Dependency Validation
### Reference Validity
All dependency references are valid. Every task ID referenced in a `dependencies` list corresponds to an existing task in the inventory.
### DAG Validation
The dependency graph is a valid directed acyclic graph. No cycles detected.
Topological layers:
- **Layer 0 (roots):** TASK-001, TASK-002, TASK-003, TASK-005, TASK-006, TASK-007
- **Layer 1:** TASK-004 (depends on TASK-003), TASK-008 (depends on TASK-001), TASK-009 (depends on TASK-002), TASK-011 (depends on TASK-005)
- **Layer 2:** TASK-010 (depends on TASK-003, TASK-004), TASK-012 (depends on TASK-003, TASK-004)
### Orphan Tasks
No orphan tasks detected. All non-root tasks with dependencies are either terminal test/docs tasks (TASK-008, TASK-009, TASK-010, TASK-011, TASK-012) or integration tasks (TASK-004). Root tasks without dependents (TASK-006, TASK-007) are excluded from orphan detection per the review protocol.
## Ordering Check
No blocking ordering issues detected.
**Observation (non-blocking):** Two pairs of independent tasks share output files:
1. **TASK-004 and TASK-005** both modify `src/meshcore_hub/web/app.py` without a dependency between them. TASK-004 modifies `ProxyHeadersMiddleware` (line ~239) and adds a startup warning, while TASK-005 modifies `_build_config_json` (line ~183). These are independent functions in the same file; no actual conflict exists.
2. **TASK-010 and TASK-011** both modify `tests/test_web/test_app.py` without a dependency between them. Both add new test functions to the same test file. No actual conflict exists.
These are not blocking because neither task creates the shared file — both modify existing files in independent sections. Adding artificial dependencies would unnecessarily serialize parallelizable work.
## Coverage Check
### Uncovered Requirements
All PRD requirements are covered.
### Phantom References
No phantom references detected.
**Coverage summary:** 7 of 7 PRD requirements covered by tasks.
| Requirement | Tasks |
|---|---|
| REQ-001 | TASK-001, TASK-008 |
| REQ-002 | TASK-002, TASK-009 |
| REQ-003 | TASK-003, TASK-004, TASK-010, TASK-012 |
| REQ-004 | TASK-005, TASK-011 |
| REQ-005 | TASK-006, TASK-007 |
| REQ-006 | TASK-001, TASK-003, TASK-004, TASK-005, TASK-006, TASK-007, TASK-008, TASK-010, TASK-011, TASK-012 |
| REQ-007 | TASK-002, TASK-004, TASK-009 |
## Scope Check
### Tasks Too Large
No tasks flagged as too large. No task has `estimated_complexity: large`.
### Tasks Too Vague
No tasks flagged as too vague. All tasks have detailed descriptions (>50 chars), multiple testable acceptance criteria, and specific file paths in `files_affected`.
### Missing Test Tasks
Two implementation tasks lack corresponding test tasks:
- **TASK-006** (Fix stored XSS in admin node-tags page) — modifies `admin/node-tags.js` but no test task verifies the XSS fix in this JavaScript file. The acceptance criteria include XSS payload testing, but no automated test is specified. This is a frontend JavaScript change where manual verification or browser-based testing may be appropriate.
- **TASK-007** (Fix stored XSS in admin members page) — modifies `admin/members.js` but no test task verifies the XSS fix in this JavaScript file. Same reasoning as TASK-006.
**Note:** These are warnings, not blocking issues. The project's test infrastructure (`tests/test_web/`) focuses on server-side rendering and API responses. Client-side JavaScript XSS fixes are typically verified through acceptance criteria rather than automated unit tests.
### Field Validation
All tasks have valid fields:
- **Roles:** All `suggested_role` values are valid (`python`, `frontend`, `docs`).
- **Complexity:** All `estimated_complexity` values are valid (`small`, `medium`).
- **Completeness:** All 12 tasks have all required fields (`id`, `title`, `description`, `requirements`, `dependencies`, `suggested_role`, `acceptance_criteria`, `estimated_complexity`, `files_affected`). All list fields have at least one entry.

View File

@@ -0,0 +1,22 @@
status: running
phase_path: .plans/2026/03/09/01-security-fixes
branch: fix/security-fixes
current_phase: summary
current_task: null
fix_round: 0
last_review_round: 1
review_loop_exit_reason: success
quality_gate: pass
tasks:
TASK-001: completed
TASK-002: completed
TASK-003: completed
TASK-004: completed
TASK-005: completed
TASK-006: completed
TASK-007: completed
TASK-008: completed
TASK-009: completed
TASK-010: completed
TASK-011: completed
TASK-012: completed

View File

@@ -0,0 +1,117 @@
# Phase Summary
> Phase: `.plans/2026/03/09/01-security-fixes`
> Generated by: `/jp-summary`
## Project Overview
This phase addresses CRITICAL and HIGH severity vulnerabilities identified in a security audit of MeshCore Hub. The fixes span stored XSS in server-rendered and client-side code, timing attacks on authentication, proxy header forgery, and a legacy endpoint with missing authentication. All changes are backward-compatible and preserve existing API contracts.
### Goals
- Eliminate all CRITICAL and HIGH severity security vulnerabilities found in the audit
- Harden API key comparison against timing side-channel attacks
- Prevent XSS vectors in both Jinja2 templates and client-side JavaScript
- Add configurable proxy trust to defend against header forgery while maintaining backward compatibility
- Remove the redundant legacy HTML dashboard endpoint that lacks authentication
## Task Execution
### Overview
| Metric | Value |
|---|---|
| Total tasks | 12 |
| Completed | 12 |
| Failed | 0 |
| Blocked | 0 |
| Skipped | 0 |
### Task Details
| ID | Title | Role | Complexity | Status |
|---|---|---|---|---|
| TASK-001 | Remove legacy HTML dashboard endpoint | python | small | completed |
| TASK-002 | Replace API key comparisons with constant-time comparison | python | small | completed |
| TASK-003 | Add WEB_TRUSTED_PROXY_HOSTS configuration setting | python | small | completed |
| TASK-004 | Integrate trusted proxy hosts into web app middleware and add startup warning | python | medium | completed |
| TASK-005 | Escape config JSON in template script block to prevent XSS breakout | python | small | completed |
| TASK-006 | Fix stored XSS in admin node-tags page | frontend | medium | completed |
| TASK-007 | Fix stored XSS in admin members page | frontend | small | completed |
| TASK-008 | Write tests for legacy dashboard endpoint removal | python | small | completed |
| TASK-009 | Write tests for constant-time API key comparison | python | small | completed |
| TASK-010 | Write tests for trusted proxy hosts configuration and startup warning | python | medium | completed |
| TASK-011 | Write tests for config JSON script block escaping | python | small | completed |
| TASK-012 | Update documentation for WEB_TRUSTED_PROXY_HOSTS setting | docs | small | completed |
### Requirement Coverage
| Metric | Value |
|---|---|
| Total PRD requirements | 7 |
| Requirements covered by completed tasks | 7 |
| Requirements with incomplete coverage | 0 |
All functional requirements (REQ-001 through REQ-005) and non-functional requirements (REQ-006, REQ-007) are fully covered by completed tasks.
## Files Created and Modified
### Created
- `tests/test_web/test_app.py`
### Modified
- `src/meshcore_hub/api/routes/dashboard.py`
- `src/meshcore_hub/api/auth.py`
- `src/meshcore_hub/api/metrics.py`
- `src/meshcore_hub/common/config.py`
- `src/meshcore_hub/web/app.py`
- `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js`
- `src/meshcore_hub/web/static/js/spa/pages/admin/members.js`
- `tests/test_api/test_dashboard.py`
- `tests/test_api/test_auth.py`
- `tests/test_common/test_config.py`
- `README.md`
- `AGENTS.md`
- `PLAN.md`
## Review Rounds
### Overview
| Metric | Value |
|---|---|
| Total review rounds | 1 |
| Total issues found | 2 |
| Issues fixed | 2 |
| Issues deferred | 0 |
| Issues remaining | 0 |
| Regressions introduced | 0 |
### Round Details
#### Round 1 (scope: full)
- **Issues found:** 2 (0 CRITICAL, 0 MAJOR, 2 MINOR)
- **Issues fixed:** 2 (both MINOR issues were addressed post-review)
- **Exit reason:** success (no CRITICAL or MAJOR issues)
## Known Issues and Deferred Items
No known issues. Both MINOR issues identified in the code review were addressed:
- **ISSUE-001** (MINOR, integration) -- Startup warning for proxy hosts used `settings.web_admin_enabled` instead of the effective admin_enabled value. Fixed by computing `effective_admin` before the warning check.
- **ISSUE-002** (MINOR, style) -- `unsafeHTML()` calls on pre-escaped data lacked explanatory comments. Fixed by adding inline HTML comments explaining that dynamic values are pre-escaped.
## Decisions
- **Truthiness guards for `hmac.compare_digest()`** -- Added `read_key and ...` / `admin_key and ...` guards in `require_read` because either key can be `None` when only one is configured, and `hmac.compare_digest()` raises `TypeError` on `None` arguments. This ensures the existing behavior of accepting either key type when configured.
- **`unsafeHTML()` retained with `escapeHtml()` pre-processing** -- The `unsafeHTML()` calls in admin JS pages were retained because translation strings contain intentional HTML formatting tags (e.g., `<strong>`). API-sourced data is escaped before interpolation, making this pattern safe.
- **`innerHTML` retained for tag delete confirmation** -- The delete confirmation in `node-tags.js` uses `innerHTML` because the translation template includes `<span>` formatting. The dynamic tag key is escaped with `escapeHtml()` before interpolation.
## Suggested Next Steps
1. Run full manual testing of admin pages (node-tags, members) with XSS payloads to verify fixes in a browser environment.
2. Test `WEB_TRUSTED_PROXY_HOSTS` with a real reverse proxy (Traefik/Nginx) to verify proxy header trust restriction works as expected.
3. Push commits and create a pull request for merge into `main`.

View File

@@ -0,0 +1,401 @@
# Task list generated from PRD: .plans/2026/03/09/01-security-fixes/prd.md
# Generated by: /jp-task-list
tasks:
- id: "TASK-001"
title: "Remove legacy HTML dashboard endpoint"
description: |
Remove the `dashboard()` route handler from `src/meshcore_hub/api/routes/dashboard.py` (lines ~367-536).
This handler renders a standalone HTML page using f-string HTML with unescaped database content (stored XSS)
and has no authentication. The JSON sub-routes (`/stats`, `/activity`, `/message-activity`, `/node-count`)
must remain intact and unchanged.
Specifically:
1. Delete the `dashboard()` async function and its `@router.get("")` decorator (the handler that returns HTMLResponse).
2. Remove the `HTMLResponse` import from `fastapi.responses` if it is no longer used by any remaining route.
3. Verify that `GET /api/v1/dashboard/stats`, `/activity`, `/message-activity`, and `/node-count` still function.
requirements:
- "REQ-001"
- "REQ-006"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "The `dashboard()` route handler is removed from `api/routes/dashboard.py`"
- "`HTMLResponse` import is removed if no longer used"
- "`GET /api/v1/dashboard/` returns 404 or 405"
- "`GET /api/v1/dashboard/stats` returns valid JSON with authentication"
- "`GET /api/v1/dashboard/activity` returns valid JSON with authentication"
- "`GET /api/v1/dashboard/message-activity` returns valid JSON with authentication"
- "`GET /api/v1/dashboard/node-count` returns valid JSON with authentication"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/api/routes/dashboard.py"
- id: "TASK-002"
title: "Replace API key comparisons with constant-time comparison"
description: |
Replace all Python `==` comparisons of API keys and credentials with `hmac.compare_digest()` to prevent
timing side-channel attacks.
In `src/meshcore_hub/api/auth.py`:
1. Add `import hmac` at the top of the file.
2. Line ~82 in `require_read`: replace `if token == read_key or token == admin_key:` with
`if hmac.compare_digest(token, read_key) or hmac.compare_digest(token, admin_key):`.
3. Line ~127 in `require_admin`: replace `if token == admin_key:` with
`if hmac.compare_digest(token, admin_key):`.
In `src/meshcore_hub/api/metrics.py`:
1. Add `import hmac` at the top of the file.
2. Line ~57: replace `return username == "metrics" and password == read_key` with
`return hmac.compare_digest(username, "metrics") and hmac.compare_digest(password, read_key)`.
Note: `hmac.compare_digest()` requires both arguments to be strings (or both bytes). The existing code
already works with strings, so no type conversion is needed.
requirements:
- "REQ-002"
- "REQ-007"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "All API key comparisons in `api/auth.py` use `hmac.compare_digest()`"
- "All credential comparisons in `api/metrics.py` use `hmac.compare_digest()`"
- "`hmac` is imported in both files"
- "Valid API keys are accepted and invalid keys are rejected (no behavior change)"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/api/auth.py"
- "src/meshcore_hub/api/metrics.py"
- id: "TASK-003"
title: "Add WEB_TRUSTED_PROXY_HOSTS configuration setting"
description: |
Add a `web_trusted_proxy_hosts` field to the web settings in `src/meshcore_hub/common/config.py`.
1. In the `WebSettings` class (or the relevant settings class containing web config), add:
```python
web_trusted_proxy_hosts: str = Field(default="*", description="Comma-separated list of trusted proxy hosts or '*' for all")
```
2. The field should accept a string value. The `ProxyHeadersMiddleware` in uvicorn accepts either `"*"` or a list of strings.
If the value is `"*"`, pass it directly. Otherwise, split on commas and strip whitespace to produce a list.
This task only adds the configuration field. The middleware integration and startup warning are in TASK-004.
requirements:
- "REQ-003"
- "REQ-006"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "A `web_trusted_proxy_hosts` setting exists in the configuration with default value `*`"
- "The setting can be configured via the `WEB_TRUSTED_PROXY_HOSTS` environment variable"
- "The setting accepts `*` or a comma-separated list of hostnames/IPs"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/common/config.py"
- id: "TASK-004"
title: "Integrate trusted proxy hosts into web app middleware and add startup warning"
description: |
Update `src/meshcore_hub/web/app.py` to use the new `WEB_TRUSTED_PROXY_HOSTS` setting and emit a
startup warning when using the insecure default.
1. Find the `ProxyHeadersMiddleware` addition (line ~239):
```python
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts="*")
```
Replace the hardcoded `"*"` with the configured value. If the config value is `"*"`, pass `"*"`.
Otherwise, split the comma-separated string into a list of strings.
2. Add a startup warning (in the app factory or lifespan) when `WEB_ADMIN_ENABLED=true` and
`WEB_TRUSTED_PROXY_HOSTS` is `"*"`:
```python
import logging
logger = logging.getLogger(__name__)
if settings.web_admin_enabled and settings.web_trusted_proxy_hosts == "*":
logger.warning(
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*' (trust all). "
"Consider restricting to your reverse proxy IP for production deployments."
)
```
3. Verify that the `_is_authenticated_proxy_request` function still accepts `X-Forwarded-User`,
`X-Auth-Request-User`, and `Authorization: Basic` headers — do not modify that function.
requirements:
- "REQ-003"
- "REQ-006"
- "REQ-007"
dependencies:
- "TASK-003"
suggested_role: "python"
acceptance_criteria:
- "`ProxyHeadersMiddleware` uses the configured `trusted_hosts` value instead of hardcoded `*`"
- "A warning is logged at startup when admin is enabled and trusted hosts is `*`"
- "The warning recommends restricting trusted hosts to the proxy IP"
- "`_is_authenticated_proxy_request` still accepts all three header types"
- "Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP restricts proxy header trust"
estimated_complexity: "medium"
files_affected:
- "src/meshcore_hub/web/app.py"
- id: "TASK-005"
title: "Escape config JSON in template script block to prevent XSS breakout"
description: |
Prevent XSS via `</script>` breakout in the config JSON template injection in `src/meshcore_hub/web/app.py`.
In the `_build_config_json` function (or wherever `config_json` is prepared for the template, around
line 183), after calling `json.dumps(config)`, escape `</` sequences:
```python
config_json = json.dumps(config).replace("</", "<\\/")
```
This prevents a config value containing `</script><script>alert(1)</script>` from breaking out of the
`<script>` block in `spa.html` (line ~188: `window.__APP_CONFIG__ = {{ config_json|safe }};`).
The `|safe` filter in the template remains unchanged — the escaping happens in Python before the value
reaches Jinja2. The SPA client-side JavaScript can parse JSON containing `<\/` sequences because this
is valid JSON per the spec.
requirements:
- "REQ-004"
- "REQ-006"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "`config_json` is escaped by replacing `</` with `<\\/` before template rendering"
- "The `|safe` filter continues to be used in the template"
- "A config value containing `</script><script>alert(1)</script>` does not execute JavaScript"
- "The SPA application correctly parses the escaped config JSON"
- "Normal config values without special characters render unchanged"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/web/app.py"
- id: "TASK-006"
title: "Fix stored XSS in admin node-tags page"
description: |
Sanitize API-sourced data in `src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js` to prevent
stored XSS.
Three locations need fixing:
1. **Line ~243** — `unsafeHTML()` with nodeName in copy-all confirmation:
```javascript
<p class="mb-4">${unsafeHTML(t('common.copy_all_entity_description', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
```
Replace `unsafeHTML()` with safe rendering. Either escape `nodeName` with `escapeHtml()` before
passing to `t()`, or use `textContent`-based rendering.
2. **Line ~272** — `unsafeHTML()` with nodeName in delete-all confirmation:
```javascript
<p class="mb-4">${unsafeHTML(t('common.delete_all_entity_confirm', { count: tags.length, entity: t('entities.tags').toLowerCase(), name: nodeName }))}</p>
```
Same fix as above.
3. **Line ~454** — `innerHTML` with tag key in delete confirmation:
```javascript
container.querySelector('#delete_tag_confirm_message').innerHTML = confirmMsg;
```
where `confirmMsg` is built with `activeTagKey` interpolated into an HTML span. Replace `innerHTML`
with `textContent`, or escape `activeTagKey` with `escapeHtml()` before interpolation.
Import `escapeHtml` from `../components.js` if not already imported. The function escapes `<`, `>`,
`&`, `"`, and `'` characters using DOM textContent.
requirements:
- "REQ-005"
- "REQ-006"
dependencies: []
suggested_role: "frontend"
acceptance_criteria:
- "Node names in node-tags.js are escaped before HTML rendering"
- "Tag keys in node-tags.js are escaped before HTML rendering"
- "All `unsafeHTML()` calls on API-sourced data are replaced with safe alternatives"
- "All `innerHTML` assignments of API-sourced data are replaced with safe alternatives"
- "A node name containing `<img src=x onerror=alert(1)>` renders as text"
- "Normal names without special characters display correctly"
estimated_complexity: "medium"
files_affected:
- "src/meshcore_hub/web/static/js/spa/pages/admin/node-tags.js"
- id: "TASK-007"
title: "Fix stored XSS in admin members page"
description: |
Sanitize API-sourced data in `src/meshcore_hub/web/static/js/spa/pages/admin/members.js` to prevent
stored XSS.
**Line ~309** — `innerHTML` with memberName in delete confirmation:
```javascript
container.querySelector('#delete_confirm_message').innerHTML = confirmMsg;
```
where `confirmMsg` is built from `t('common.delete_entity_confirm', { entity: ..., name: memberName })`.
`memberName` comes from `row.dataset.memberName` which is API-sourced data.
Fix by escaping `memberName` with `escapeHtml()` before passing to `t()`, or replace `innerHTML` with
`textContent`.
Import `escapeHtml` from `../components.js` if not already imported.
requirements:
- "REQ-005"
- "REQ-006"
dependencies: []
suggested_role: "frontend"
acceptance_criteria:
- "Member names in members.js are escaped before HTML rendering"
- "The `innerHTML` assignment of API-sourced data is replaced with a safe alternative"
- "A member name containing `<script>alert(1)</script>` renders as text"
- "Normal member names display correctly"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/web/static/js/spa/pages/admin/members.js"
- id: "TASK-008"
title: "Write tests for legacy dashboard endpoint removal"
description: |
Add or update tests in `tests/test_api/` to verify that the legacy HTML dashboard endpoint is removed
while JSON sub-routes remain functional.
Tests to add/update:
1. `GET /api/v1/dashboard/` returns 404 or 405 (no longer serves HTML).
2. `GET /api/v1/dashboard/stats` returns 200 with valid JSON when authenticated.
3. `GET /api/v1/dashboard/activity` returns 200 with valid JSON when authenticated.
4. `GET /api/v1/dashboard/message-activity` returns 200 with valid JSON when authenticated.
5. `GET /api/v1/dashboard/node-count` returns 200 with valid JSON when authenticated.
Use the existing test fixtures and patterns from `tests/test_api/`. Check `tests/conftest.py` for
available fixtures (test client, db session, auth headers).
requirements:
- "REQ-001"
- "REQ-006"
dependencies:
- "TASK-001"
suggested_role: "python"
acceptance_criteria:
- "Test confirms `GET /api/v1/dashboard/` returns 404 or 405"
- "Tests confirm all four JSON sub-routes return valid JSON with authentication"
- "All tests pass"
estimated_complexity: "small"
files_affected:
- "tests/test_api/test_dashboard.py"
- id: "TASK-009"
title: "Write tests for constant-time API key comparison"
description: |
Add or update tests in `tests/test_api/` to verify that authentication still works correctly after
switching to `hmac.compare_digest()`.
Tests to add/update:
1. Valid read key is accepted by read-protected endpoints.
2. Valid admin key is accepted by admin-protected endpoints.
3. Invalid keys are rejected with 401/403.
4. Valid admin key also grants read access.
5. Metrics endpoint accepts valid credentials and rejects invalid ones (if metrics auth is testable).
These tests verify no behavioral regression from the `==` to `hmac.compare_digest()` change.
Use existing test patterns and fixtures from `tests/test_api/`.
requirements:
- "REQ-002"
- "REQ-007"
dependencies:
- "TASK-002"
suggested_role: "python"
acceptance_criteria:
- "Tests confirm valid read key is accepted"
- "Tests confirm valid admin key is accepted"
- "Tests confirm invalid keys are rejected"
- "Tests confirm metrics auth works correctly"
- "All tests pass"
estimated_complexity: "small"
files_affected:
- "tests/test_api/test_auth.py"
- id: "TASK-010"
title: "Write tests for trusted proxy hosts configuration and startup warning"
description: |
Add tests to verify the `WEB_TRUSTED_PROXY_HOSTS` configuration setting and the startup warning.
Tests to add:
1. Default value of `WEB_TRUSTED_PROXY_HOSTS` is `*`.
2. Setting `WEB_TRUSTED_PROXY_HOSTS` to a specific IP is correctly parsed.
3. Setting `WEB_TRUSTED_PROXY_HOSTS` to a comma-separated list is correctly parsed into a list.
4. A warning is logged when `WEB_ADMIN_ENABLED=true` and `WEB_TRUSTED_PROXY_HOSTS` is `*`.
5. No warning is logged when `WEB_TRUSTED_PROXY_HOSTS` is set to a specific value.
Place config tests in `tests/test_common/` and web app tests in `tests/test_web/`.
requirements:
- "REQ-003"
- "REQ-006"
dependencies:
- "TASK-003"
- "TASK-004"
suggested_role: "python"
acceptance_criteria:
- "Tests confirm default value is `*`"
- "Tests confirm specific IP/list parsing works"
- "Tests confirm startup warning is emitted with wildcard default"
- "Tests confirm no warning when specific hosts are configured"
- "All tests pass"
estimated_complexity: "medium"
files_affected:
- "tests/test_common/test_config.py"
- "tests/test_web/test_app.py"
- id: "TASK-011"
title: "Write tests for config JSON script block escaping"
description: |
Add tests in `tests/test_web/` to verify that the config JSON escaping prevents XSS breakout.
Tests to add:
1. A config value containing `</script><script>alert(1)</script>` is escaped to `<\/script>...` in
the rendered HTML.
2. A config value without special characters renders unchanged.
3. The escaped JSON is still valid and parseable by `json.loads()` (after un-escaping `<\/` back to `</`
if needed, though `json.loads` handles `<\/` fine).
Test by calling the config JSON builder function directly or by checking the rendered template output.
requirements:
- "REQ-004"
- "REQ-006"
dependencies:
- "TASK-005"
suggested_role: "python"
acceptance_criteria:
- "Test confirms `</script>` in config values is escaped to `<\\/script>`"
- "Test confirms normal config values are unaffected"
- "Test confirms escaped JSON is still valid and parseable"
- "All tests pass"
estimated_complexity: "small"
files_affected:
- "tests/test_web/test_app.py"
- id: "TASK-012"
title: "Update documentation for WEB_TRUSTED_PROXY_HOSTS setting"
description: |
Update project documentation to document the new `WEB_TRUSTED_PROXY_HOSTS` environment variable.
Files to update:
1. **README.md** — Add `WEB_TRUSTED_PROXY_HOSTS` to the environment variables table with description:
"Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts).
Recommended: set to your reverse proxy IP in production."
2. **AGENTS.md** — Add `WEB_TRUSTED_PROXY_HOSTS` to the Environment Variables section with the same description.
3. **PLAN.md** — If there is a configuration section, add the new variable there as well.
Ensure the documentation notes:
- Default is `*` for backward compatibility
- A startup warning is emitted when using the default with admin enabled
- Operators should set this to their reverse proxy IP in production
requirements:
- "REQ-003"
- "REQ-006"
dependencies:
- "TASK-003"
- "TASK-004"
suggested_role: "docs"
acceptance_criteria:
- "`WEB_TRUSTED_PROXY_HOSTS` is documented in README.md"
- "`WEB_TRUSTED_PROXY_HOSTS` is documented in AGENTS.md"
- "Documentation notes the default value, startup warning, and production recommendation"
estimated_complexity: "small"
files_affected:
- "README.md"
- "AGENTS.md"
- "PLAN.md"

View File

@@ -0,0 +1,81 @@
## TASK-001: Verify meshcore_py v2.3.0+ backwards compatibility
**Status:** completed
### Files Created
_(none)_
### Files Modified
_(none)_
### Notes
Research-only task. meshcore_py v2.3.0 handles multibyte path hashes transparently at the protocol level. Path hash size is self-describing in the wire format (upper 2 bits of path length byte encode hash size). The interface receiver, sender, and device wrapper pass event payloads through without manipulation, so no code changes are needed. pyproject.toml dependency confirmed at meshcore>=2.3.0.
---
## TASK-002: Update _normalize_hash_list to accept variable-length hex strings
**Status:** completed
### Files Created
_(none)_
### Files Modified
- `src/meshcore_hub/collector/letsmesh_normalizer.py`
### Notes
Changed length validation from `if len(token) != 2` to `if len(token) < 2 or len(token) % 2 != 0`. Updated docstring to describe variable-length hex hash support. Existing hex validation and uppercase normalization unchanged. All 98 collector tests pass.
---
## TASK-003: Update Pydantic schema descriptions for path_hashes fields
**Status:** completed
### Files Created
_(none)_
### Files Modified
- `src/meshcore_hub/common/schemas/events.py`
- `src/meshcore_hub/common/schemas/messages.py`
- `src/meshcore_hub/common/models/trace_path.py`
### Notes
Updated TraceDataEvent.path_hashes, TracePathRead.path_hashes, and TracePath model docstring to reflect variable-length hex strings. No Pydantic validators needed changes - both schemas use Optional[list[str]] with no per-element length constraints.
---
## TASK-004: Update SCHEMAS.md documentation for multibyte path hashes
**Status:** completed
### Files Created
_(none)_
### Files Modified
- `SCHEMAS.md`
### Notes
Updated path_hashes field description from "2-character" to variable-length hex. Updated example to include mixed-length hashes ["4a", "b3fa", "02"]. Added firmware v1.14 compatibility note.
---
## TASK-008: Verify web dashboard trace path display handles variable-length hashes
**Status:** completed
### Files Created
_(none)_
### Files Modified
_(none)_
### Notes
Verification-only task. The web dashboard SPA has no trace path page and no JavaScript/CSS code referencing path_hash or pathHash. Trace path data is only served by the REST API which returns path_hashes as list[str] with no length constraints. No changes needed.
---
## TASK-005: Write tests for multibyte path hash normalizer
**Status:** completed
### Files Created
- `tests/test_collector/test_letsmesh_normalizer.py`
### Files Modified
- `tests/test_collector/test_subscriber.py`
### Notes
Created 12 unit tests for _normalize_hash_list covering all 7 required scenarios plus edge cases. Added 2 integration tests to test_subscriber.py verifying multibyte path hashes flow through the full collector pipeline. All 35 collector tests pass.
---
## TASK-006: Write tests for database round-trip of multibyte path hashes
**Status:** completed
### Files Created
_(none)_
### Files Modified
- `tests/test_common/test_models.py`
### Notes
Added 2 new test methods to TestTracePathModel: test_multibyte_path_hashes_round_trip and test_mixed_length_path_hashes_round_trip. Verified JSON column handles variable-length strings natively. All 10 model tests pass. No Alembic migration needed.
---
## TASK-007: Write tests for API trace path responses with multibyte hashes
**Status:** completed
### Files Created
_(none)_
### Files Modified
- `tests/test_api/test_trace_paths.py`
### Notes
Added TestMultibytePathHashes class with 2 tests: list endpoint with multibyte hashes and detail endpoint with mixed-length hashes. All 9 API trace path tests pass.
---

View File

@@ -0,0 +1,146 @@
# Product Requirements Document
> Source: `.plans/2026/03/17/01-multibyte-support/prompt.md`
## Project Overview
MeshCore Hub must be updated to support multibyte path hashes introduced in MeshCore firmware v1.14 and the meshcore_py v2.3.0 Python bindings. Path hashes — node identifiers embedded in trace and route data — were previously fixed at 1 byte (2 hex characters) per hop but can now be multiple bytes, allowing longer repeater IDs at the cost of reduced maximum hops. The update must maintain backwards compatibility with nodes running older single-byte firmware.
## Goals
- Support variable-length (multibyte) path hashes throughout the data pipeline: interface → MQTT → collector → database → API → web dashboard.
- Maintain backwards compatibility so single-byte path hashes from older firmware nodes continue to work without modification.
- Update documentation and schemas to accurately describe the new variable-length path hash format.
## Functional Requirements
### REQ-001: Accept Variable-Length Path Hashes in Collector
**Description:** The collector's event handlers and normalizer must accept path hash strings of any even length (not just 2-character strings). Path hashes arriving from both the meshcore_py interface and LetsMesh-compatible ingest must be processed correctly regardless of byte length.
**Acceptance Criteria:**
- [ ] Path hashes with 2-character values (legacy single-byte) are accepted and stored correctly
- [ ] Path hashes with 4+ character values (multibyte) are accepted and stored correctly
- [ ] Mixed-length path hash arrays (e.g. `["4a", "b3fa", "02"]`) are accepted when the mesh contains nodes with different firmware versions
- [ ] The LetsMesh normalizer handles multibyte `pathHashes` values from decoded payloads
### REQ-002: Update Pydantic Schema Validation for Path Hashes
**Description:** The `path_hashes` field in event and message Pydantic schemas currently describes values as "2-character node hash identifiers". The schema description and any validation constraints must be updated to permit variable-length hex strings.
**Acceptance Criteria:**
- [ ] `TraceDataEvent.path_hashes` field description reflects variable-length hex strings
- [ ] `MessageEventBase.path_hashes` field description reflects variable-length hex strings (if applicable)
- [ ] No schema validation rejects path hash strings longer than 2 characters
### REQ-003: Verify Database Storage Compatibility
**Description:** The `path_hashes` column on the `trace_paths` table uses a JSON column type. Confirm that variable-length path hash strings are stored and retrieved correctly without requiring a schema migration.
**Acceptance Criteria:**
- [ ] Multibyte path hash arrays are round-tripped correctly through SQLAlchemy JSON column (store and retrieve)
- [ ] No Alembic migration is required (JSON column already supports arbitrary string lengths)
### REQ-004: Update API Responses for Variable-Length Path Hashes
**Description:** The trace paths API must return multibyte path hashes faithfully. API response schemas and any serialization logic must not truncate or assume a fixed length.
**Acceptance Criteria:**
- [ ] `GET /trace-paths` returns multibyte path hash arrays as-is from the database
- [ ] `GET /trace-paths/{id}` returns multibyte path hash arrays as-is from the database
- [ ] API response examples in documentation reflect variable-length path hashes
### REQ-005: Update Web Dashboard Trace/Path Display
**Description:** If the web dashboard displays path hashes (e.g. in trace path views), the rendering must handle variable-length strings without layout breakage or truncation.
**Acceptance Criteria:**
- [ ] Trace path views display multibyte path hashes correctly
- [ ] No fixed-width formatting assumes 2-character hash strings
### REQ-006: Verify meshcore_py Library Compatibility
**Description:** Confirm that the meshcore_py v2.3.0+ library handles backwards compatibility with single-byte firmware nodes transparently, so that MeshCore Hub does not need to implement compatibility logic itself.
**Acceptance Criteria:**
- [ ] meshcore_py v2.3.0+ is confirmed to handle mixed single-byte and multibyte path hashes at the protocol level
- [ ] The interface receiver and sender components work with the updated library without code changes beyond the dependency version bump (or with minimal changes if the library API changed)
## Non-Functional Requirements
### REQ-007: Backwards Compatibility
**Category:** Reliability
**Description:** The system must continue to operate correctly when receiving events from nodes running older (single-byte) firmware. No data loss or processing errors may occur for legacy path hash formats.
**Acceptance Criteria:**
- [ ] Existing test cases with 2-character path hashes continue to pass without modification
- [ ] New test cases with multibyte path hashes pass alongside legacy test cases
- [ ] No database migration is required that would break rollback to the previous version
### REQ-008: Documentation Accuracy
**Category:** Maintainability
**Description:** All documentation referencing path hash format must be updated to reflect the variable-length nature of multibyte path hashes.
**Acceptance Criteria:**
- [ ] `SCHEMAS.md` path hash descriptions updated from "2-character" to "variable-length hex string"
- [ ] Code docstrings and field descriptions in models/schemas updated
- [ ] Example payloads in documentation include at least one multibyte path hash example
## Technical Constraints and Assumptions
### Constraints
- Python 3.13+ (specified by project)
- meshcore_py >= 2.3.0 (already set in `pyproject.toml`)
- SQLite with JSON column for path hash storage (existing schema)
- No breaking changes to the REST API response format
### Assumptions
- The meshcore_py library handles protocol-level backwards compatibility for multibyte path hashes, so MeshCore Hub only needs to ensure its data pipeline accepts variable-length strings
- Path hashes are always valid hex strings (even number of characters)
- The JSON column type in SQLite/SQLAlchemy does not impose length restrictions on individual array element strings
- The `pyproject.toml` dependency has already been bumped to `meshcore>=2.3.0`
## Scope
### In Scope
- Updating Pydantic schema descriptions and validation for variable-length path hashes
- Updating collector handlers and normalizer for multibyte path hashes
- Verifying database storage compatibility (no migration expected)
- Verifying API response compatibility
- Updating web dashboard path hash display if applicable
- Updating `SCHEMAS.md` and code documentation
- Adding/updating tests for multibyte path hashes
- Confirming meshcore_py library handles backwards compatibility
### Out of Scope
- MeshCore firmware changes or device-side configuration
- Adding UI controls for selecting single-byte vs. multibyte mode
- Performance optimization of path hash processing
- Changes to MQTT topic structure or message format
- LetsMesh ingest protocol changes (beyond accepting multibyte values that LetsMesh already provides)
## Suggested Tech Stack
| Layer | Technology | Rationale |
|-------|-----------|-----------|
| MeshCore bindings | meshcore_py >= 2.3.0 | Specified by prompt; provides multibyte path hash support |
| Validation | Pydantic v2 | Existing stack — schema descriptions updated |
| Database | SQLAlchemy 2.0 + SQLite JSON | Existing stack — no migration needed |
| API | FastAPI | Existing stack — no changes to framework |
| Testing | pytest + pytest-asyncio | Existing stack — new test cases for multibyte |

View File

@@ -0,0 +1,17 @@
# Phase: 01-multibyte-support
## Overview
The latest MeshCore firmware (v1.14) has introduced multibyte support for multi-byte path hashes. The latest version of the MeshCore Python bindings (meshcore_py) has been updated to use this. This allows longer repeater IDs per hop, but reduces the maximum allowed hops. Nodes running older firmware only support 1-byte path hashes and will not receive messages if other nodes use multibyte path hashes.
## Goals
* Update Receiver/Sender component to use latest version of MeshCore Python bindings that support multibyte path hash handling.
## Requirements
* Must remain backwards compatible with previous version. Confirm whether this is handled by the Python library.
## References
* https://github.com/meshcore-dev/meshcore_py/releases/tag/v2.3.0

View File

@@ -0,0 +1,19 @@
# Code review round 001
# Phase: .plans/2026/03/17/01-multibyte-support
# Scope: full
# Generated by: /jp-codereview
issues: []
summary:
total_issues: 0
critical: 0
major: 0
minor: 0
by_category:
integration: 0
architecture: 0
security: 0
duplication: 0
error-handling: 0
style: 0

View File

@@ -0,0 +1,57 @@
# PRD Review
> Phase: `.plans/2026/03/17/01-multibyte-support`
> PRD: `.plans/2026/03/17/01-multibyte-support/prd.md`
> Prompt: `.plans/2026/03/17/01-multibyte-support/prompt.md`
## Verdict: PASS
The PRD comprehensively addresses the narrow scope of the original prompt. All prompt items are covered by specific requirements with testable acceptance criteria. The PRD appropriately expands the prompt's Receiver/Sender focus to cover the full data pipeline (collector, schemas, database, API, web), which is necessary for end-to-end multibyte support. No contradictions, feasibility concerns, or scope inconsistencies were found.
## Coverage Assessment
| Prompt Item | PRD Section | Covered? | Notes |
|---|---|---|---|
| Update Receiver/Sender to use latest meshcore_py with multibyte support | REQ-006 | Yes | Covered by library compatibility verification; receiver/sender work with updated bindings |
| Must remain backwards compatible with previous version | REQ-007 | Yes | Explicit non-functional requirement with 3 testable acceptance criteria |
| Confirm whether backwards compat is handled by the Python library | REQ-006 | Yes | First AC specifically calls for confirming library-level protocol compatibility |
| Reference to meshcore_py v2.3.0 release | Constraints, Tech Stack | Yes | Noted in constraints and suggested tech stack table |
**Coverage summary:** 4 of 4 prompt items fully covered, 0 partially covered, 0 not covered.
## Requirement Evaluation
All requirements passed evaluation. Minor observations:
### REQ-006: Verify meshcore_py Library Compatibility
- **Implementability:** Pass
- **Testability:** Pass -- though the first AC ("confirmed to handle...at the protocol level") is a verification/research task rather than an automated test, this is appropriate given the prompt explicitly asks to confirm library behavior
- **Completeness:** Pass
- **Consistency:** Pass
## Structural Issues
### Contradictions
None found.
### Ambiguities
None found. The PRD is appropriately specific for the scope of work.
### Missing Edge Cases
None significant. The PRD covers the key edge case of mixed-length path hash arrays from heterogeneous firmware networks (REQ-001 AC3).
### Feasibility Concerns
None. The changes are primarily documentation/description updates and verification tasks. The JSON column type inherently supports variable-length strings, and the meshcore_py dependency is already bumped.
### Scope Inconsistencies
None. The PRD's scope appropriately extends beyond the prompt's Receiver/Sender focus to cover downstream components (collector, API, web) that also handle path hashes. This is a necessary expansion, not scope creep.
## Action Items
No action items -- verdict is PASS.

View File

@@ -0,0 +1,89 @@
# Task Review
> Phase: `.plans/2026/03/17/01-multibyte-support`
> Tasks: `.plans/2026/03/17/01-multibyte-support/tasks.yaml`
> PRD: `.plans/2026/03/17/01-multibyte-support/prd.md`
## Verdict: PASS
The task list is structurally sound, correctly ordered, and fully covers all 8 PRD requirements. The dependency graph is a valid DAG with no cycles or invalid references. No ordering issues were found — no task references files that should be produced by a task outside its dependency chain. All tasks have valid roles, complexity values, and complete fields. The task breakdown is appropriate for the narrow scope of this phase.
## Dependency Validation
### Reference Validity
All dependency references are valid. Every task ID in every `dependencies` list corresponds to an existing task in the inventory.
### DAG Validation
The dependency graph is a valid DAG with no cycles. Maximum dependency depth is 1 (two test tasks depend on one implementation task each).
### Orphan Tasks
The following tasks are never referenced as dependencies by other tasks:
- **TASK-001** (Verify meshcore_py compatibility) — terminal verification task, expected
- **TASK-004** (Update SCHEMAS.md) — terminal documentation task, expected
- **TASK-005** (Tests for normalizer) — terminal test task, expected
- **TASK-006** (Tests for DB round-trip) — terminal test task, expected
- **TASK-007** (Tests for API responses) — terminal test task, expected
- **TASK-008** (Verify web dashboard) — terminal verification task, expected
All orphan tasks are leaf nodes (tests, docs, or verification tasks). No missing integration points.
## Ordering Check
No ordering issues detected. No task modifies a file that is also modified by another task outside its dependency chain. The `files_affected` sets across all tasks are disjoint except where proper dependency relationships exist.
## Coverage Check
### Uncovered Requirements
All PRD requirements are covered.
### Phantom References
No phantom references detected. Every requirement ID referenced in tasks exists in the PRD.
**Coverage summary:** 8 of 8 PRD requirements covered by tasks.
| Requirement | Covered By |
|---|---|
| REQ-001 | TASK-002, TASK-005 |
| REQ-002 | TASK-003 |
| REQ-003 | TASK-006 |
| REQ-004 | TASK-007 |
| REQ-005 | TASK-008 |
| REQ-006 | TASK-001 |
| REQ-007 | TASK-005, TASK-006, TASK-007 |
| REQ-008 | TASK-004 |
## Scope Check
### Tasks Too Large
No tasks flagged as too large. All tasks are `small` complexity except TASK-005 (`medium`), which is appropriately scoped for a test suite covering 7 unit test scenarios plus an integration test.
### Tasks Too Vague
No tasks flagged as too vague. All tasks have detailed descriptions (well over 50 characters), multiple testable acceptance criteria, and specific file paths.
### Missing Test Tasks
- **TASK-001** (Verify meshcore_py compatibility) — no associated test task. This is a research/verification task that does not produce source code, so a test task is not applicable. (Warning only)
- **TASK-004** (Update SCHEMAS.md) — no associated test task. This is a documentation-only task. (Warning only)
- **TASK-008** (Verify web dashboard) — no associated test task. This is a verification task that may result in no code changes. (Warning only)
All implementation tasks that modify source code (TASK-002, TASK-003) have corresponding test tasks (TASK-005, TASK-006, TASK-007).
### Field Validation
All tasks have valid fields:
- All `suggested_role` values are valid (`python`, `docs`, `frontend`)
- All `estimated_complexity` values are valid (`small`, `medium`)
- All tasks have at least one entry in `requirements`, `acceptance_criteria`, and `files_affected`
- All task IDs follow the `TASK-NNN` format with sequential numbering
## Action Items
No action items — verdict is PASS.

View File

@@ -0,0 +1,18 @@
status: completed
phase_path: .plans/2026/03/17/01-multibyte-support
branch: feature/multibyte-support
current_phase: completed
current_task: null
fix_round: 0
last_review_round: 1
review_loop_exit_reason: success
quality_gate: pass
tasks:
TASK-001: completed
TASK-002: completed
TASK-003: completed
TASK-004: completed
TASK-005: completed
TASK-006: completed
TASK-007: completed
TASK-008: completed

View File

@@ -0,0 +1,102 @@
# Phase Summary
> Phase: `.plans/2026/03/17/01-multibyte-support`
> Generated by: `/jp-summary`
## Project Overview
MeshCore Hub was updated to support multibyte path hashes introduced in MeshCore firmware v1.14 and meshcore_py v2.3.0. Path hashes — node identifiers embedded in trace and route data — were previously fixed at 1 byte (2 hex characters) per hop but can now be multiple bytes. The update maintains backwards compatibility with nodes running older single-byte firmware.
### Goals
- Support variable-length (multibyte) path hashes throughout the data pipeline: interface → MQTT → collector → database → API → web dashboard.
- Maintain backwards compatibility so single-byte path hashes from older firmware nodes continue to work without modification.
- Update documentation and schemas to accurately describe the new variable-length path hash format.
## Task Execution
### Overview
| Metric | Value |
|---|---|
| Total tasks | 8 |
| Completed | 8 |
| Failed | 0 |
| Blocked | 0 |
| Skipped | 0 |
### Task Details
| ID | Title | Role | Complexity | Status |
|---|---|---|---|---|
| TASK-001 | Verify meshcore_py v2.3.0+ backwards compatibility | python | small | completed |
| TASK-002 | Update _normalize_hash_list to accept variable-length hex strings | python | small | completed |
| TASK-003 | Update Pydantic schema descriptions for path_hashes fields | python | small | completed |
| TASK-004 | Update SCHEMAS.md documentation for multibyte path hashes | docs | small | completed |
| TASK-005 | Write tests for multibyte path hash normalizer | python | medium | completed |
| TASK-006 | Write tests for database round-trip of multibyte path hashes | python | small | completed |
| TASK-007 | Write tests for API trace path responses with multibyte hashes | python | small | completed |
| TASK-008 | Verify web dashboard trace path display handles variable-length hashes | frontend | small | completed |
### Requirement Coverage
| Metric | Value |
|---|---|
| Total PRD requirements | 8 |
| Requirements covered by completed tasks | 8 |
| Requirements with incomplete coverage | 0 |
## Files Created and Modified
### Created
- `tests/test_collector/test_letsmesh_normalizer.py`
### Modified
- `pyproject.toml`
- `SCHEMAS.md`
- `src/meshcore_hub/collector/letsmesh_normalizer.py`
- `src/meshcore_hub/common/schemas/events.py`
- `src/meshcore_hub/common/schemas/messages.py`
- `src/meshcore_hub/common/models/trace_path.py`
- `tests/test_collector/test_subscriber.py`
- `tests/test_common/test_models.py`
- `tests/test_api/test_trace_paths.py`
## Review Rounds
### Overview
| Metric | Value |
|---|---|
| Total review rounds | 1 |
| Total issues found | 0 |
| Issues fixed | 0 |
| Issues deferred | 0 |
| Issues remaining | 0 |
| Regressions introduced | 0 |
### Round Details
#### Round 1 (scope: full)
- **Issues found:** 0 (0 CRITICAL, 0 MAJOR, 0 MINOR)
- **Exit reason:** success (clean review, no fix rounds needed)
## Known Issues and Deferred Items
No known issues.
## Decisions
- **meshcore_py handles backwards compatibility transparently** -- Research (TASK-001) confirmed that meshcore_py v2.3.0 handles multibyte path hashes at the protocol level via self-describing wire format. No compatibility logic needed in MeshCore Hub's interface layer.
- **No database migration required** -- The existing JSON column type on `trace_paths.path_hashes` stores variable-length string arrays natively. Round-trip tests confirmed no data loss.
- **No web dashboard changes needed** -- The SPA has no trace path rendering page. Path hashes are only served via the REST API which uses `list[str]` with no length constraints.
- **Normalizer validation approach** -- Changed from exact length check (`len == 2`) to even-length minimum-2 check (`len >= 2 and len % 2 == 0`), preserving existing hex validation and uppercase normalization.
## Suggested Next Steps
1. Push the branch and create a pull request for review.
2. Perform manual integration testing with a MeshCore device running firmware v1.14+ to verify multibyte path hashes flow end-to-end.
3. Verify that mixed-firmware networks (some nodes v1.14+, some older) produce correct mixed-length path hash arrays in the database.

View File

@@ -0,0 +1,274 @@
# Task list generated from PRD: .plans/2026/03/17/01-multibyte-support/prd.md
# Generated by: /jp-task-list
tasks:
- id: "TASK-001"
title: "Verify meshcore_py v2.3.0+ backwards compatibility"
description: |
Research and confirm that meshcore_py v2.3.0+ handles backwards compatibility
with single-byte firmware nodes at the protocol level. Check the meshcore_py
v2.3.0 release notes and source code to determine whether the library
transparently handles mixed single-byte and multibyte path hashes, or whether
MeshCore Hub needs to implement any compatibility logic.
The pyproject.toml dependency is already set to meshcore>=2.3.0. Verify the
interface receiver (src/meshcore_hub/interface/receiver.py) and sender
(src/meshcore_hub/interface/sender.py) components work with the updated library
without code changes, or document any API changes that require updates.
Document findings as a comment block at the top of the PR description or in
the phase changelog.
requirements:
- "REQ-006"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "meshcore_py v2.3.0+ backwards compatibility behaviour is documented"
- "Any required interface code changes are identified (or confirmed unnecessary)"
- "pyproject.toml dependency version is confirmed correct at >=2.3.0"
estimated_complexity: "small"
files_affected:
- "pyproject.toml"
- id: "TASK-002"
title: "Update _normalize_hash_list to accept variable-length hex strings"
description: |
The LetsMesh normalizer method `_normalize_hash_list` in
src/meshcore_hub/collector/letsmesh_normalizer.py (line ~724) currently rejects
any path hash string that is not exactly 2 characters long:
if len(token) != 2:
continue
Update this method to accept variable-length hex strings (any even-length hex
string of 2+ characters). The validation should:
- Accept strings of length 2, 4, 6, etc. (even-length, minimum 2)
- Reject odd-length strings and empty strings
- Continue to validate that all characters are valid hexadecimal (0-9, A-F)
- Continue to uppercase-normalize the hex strings
Also update the method's docstring from "Normalize a list of one-byte hash
strings" to reflect variable-length support.
requirements:
- "REQ-001"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "_normalize_hash_list accepts 2-character hex strings (legacy single-byte)"
- "_normalize_hash_list accepts 4+ character hex strings (multibyte)"
- "_normalize_hash_list rejects odd-length strings"
- "_normalize_hash_list rejects non-hex characters"
- "_normalize_hash_list uppercases all hex strings"
- "Method docstring updated to describe variable-length support"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/collector/letsmesh_normalizer.py"
- id: "TASK-003"
done: true
title: "Update Pydantic schema descriptions for path_hashes fields"
description: |
Update the `path_hashes` field description in Pydantic schemas to reflect
variable-length hex strings instead of fixed 2-character strings.
Files and fields to update:
1. src/meshcore_hub/common/schemas/events.py - TraceDataEvent.path_hashes
(line ~134): Change description from "Array of 2-character node hash
identifiers" to "Array of hex-encoded node hash identifiers (variable
length, e.g. '4a' for single-byte or 'b3fa' for multibyte)"
2. src/meshcore_hub/common/schemas/messages.py - MessageEventBase.path_hashes
or TracePathRead.path_hashes (line ~157): Update description similarly
if it references fixed-length hashes.
3. src/meshcore_hub/common/models/trace_path.py - TracePath.path_hashes
docstring (line ~23): Change "JSON array of node hash identifiers" to
"JSON array of hex-encoded node hash identifiers (variable length)"
Ensure no Pydantic validators or Field constraints reject strings longer
than 2 characters. The current schemas use Optional[list[str]] with no
per-element length validation, so no validator changes should be needed.
requirements:
- "REQ-002"
dependencies: []
suggested_role: "python"
acceptance_criteria:
- "TraceDataEvent.path_hashes description reflects variable-length hex strings"
- "TracePathRead.path_hashes description reflects variable-length hex strings"
- "TracePath model docstring updated for variable-length path hashes"
- "No Pydantic validation rejects path hash strings longer than 2 characters"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/common/schemas/events.py"
- "src/meshcore_hub/common/schemas/messages.py"
- "src/meshcore_hub/common/models/trace_path.py"
- id: "TASK-004"
title: "Update SCHEMAS.md documentation for multibyte path hashes"
description: |
Update SCHEMAS.md to reflect the new variable-length path hash format
introduced in MeshCore firmware v1.14.
Changes needed:
1. Line ~228: Change "Array of 2-character node hash identifiers (ordered
by hops)" to "Array of hex-encoded node hash identifiers, variable length
(e.g. '4a' for single-byte, 'b3fa' for multibyte), ordered by hops"
2. Line ~239: Update the example path_hashes array to include at least one
multibyte hash, e.g.:
"path_hashes": ["4a", "b3fa", "02"]
This demonstrates mixed single-byte and multibyte hashes in the same trace.
3. Add a brief note explaining that firmware v1.14+ supports multibyte path
hashes and that older nodes use single-byte (2-character) hashes, so
mixed-length arrays are expected in heterogeneous networks.
requirements:
- "REQ-008"
dependencies: []
suggested_role: "docs"
acceptance_criteria:
- "path_hashes field description updated from '2-character' to 'variable-length hex'"
- "Example payload includes at least one multibyte path hash"
- "Note about firmware version compatibility is present"
estimated_complexity: "small"
files_affected:
- "SCHEMAS.md"
- id: "TASK-005"
done: true
title: "Write tests for multibyte path hash normalizer"
description: |
Add tests for the updated _normalize_hash_list method in the LetsMesh
normalizer to verify it handles variable-length hex strings correctly.
Add test cases in tests/test_collector/ (either in an existing normalizer
test file or a new test_letsmesh_normalizer.py if one doesn't exist):
1. Single-byte (2-char) hashes: ["4a", "b3", "fa"] -> accepted, uppercased
2. Multibyte (4-char) hashes: ["4a2b", "b3fa"] -> accepted, uppercased
3. Mixed-length hashes: ["4a", "b3fa", "02"] -> all accepted
4. Odd-length strings: ["4a", "b3f", "02"] -> "b3f" filtered out
5. Invalid hex characters: ["4a", "zz", "02"] -> "zz" filtered out
6. Empty list: [] -> returns None
7. Non-string items: [42, "4a"] -> 42 filtered out
Also add/update integration-level tests in tests/test_collector/test_subscriber.py
to verify that multibyte path hashes flow through the full collector pipeline
(subscriber -> handler -> database) correctly. The existing test cases at
lines ~607 and ~662 use 2-character hashes; add a parallel test case with
multibyte hashes.
requirements:
- "REQ-001"
- "REQ-007"
dependencies:
- "TASK-002"
suggested_role: "python"
acceptance_criteria:
- "Unit tests for _normalize_hash_list cover all 7 scenarios listed"
- "Integration test verifies multibyte path hashes stored correctly in database"
- "All existing 2-character path hash tests continue to pass"
- "All new tests pass"
estimated_complexity: "medium"
files_affected:
- "tests/test_collector/test_letsmesh_normalizer.py"
- "tests/test_collector/test_subscriber.py"
- id: "TASK-006"
title: "Write tests for database round-trip of multibyte path hashes"
description: |
Verify that the SQLAlchemy JSON column on the TracePath model correctly
stores and retrieves variable-length path hash arrays without data loss
or truncation.
Add test cases in tests/test_common/test_models.py (where existing
TracePath tests are at line ~129):
1. Store and retrieve a TracePath with multibyte path_hashes:
["4a2b", "b3fa", "02cd"] -> verify round-trip equality
2. Store and retrieve a TracePath with mixed-length path_hashes:
["4a", "b3fa", "02"] -> verify round-trip equality
3. Verify existing test with 2-character hashes still passes
These tests confirm REQ-003 (no migration needed) and contribute to
REQ-007 (backwards compatibility).
requirements:
- "REQ-003"
- "REQ-007"
dependencies:
- "TASK-003"
suggested_role: "python"
acceptance_criteria:
- "Test verifies multibyte path_hashes round-trip through JSON column correctly"
- "Test verifies mixed-length path_hashes round-trip correctly"
- "Existing 2-character path hash test continues to pass"
- "No Alembic migration is created or required"
estimated_complexity: "small"
files_affected:
- "tests/test_common/test_models.py"
- id: "TASK-007"
title: "Write tests for API trace path responses with multibyte hashes"
description: |
Add test cases in tests/test_api/test_trace_paths.py to verify that the
trace paths API returns multibyte path hashes faithfully.
The existing test fixtures in tests/test_api/conftest.py create
sample_trace_path objects with path_hashes like ["abc123", "def456",
"ghi789"] (line ~275). Note these are already 6-character strings, so
the API serialization likely already works. Add explicit test cases:
1. Create a trace path with multibyte path_hashes (e.g. ["4a2b", "b3fa"])
via the fixture, then GET /trace-paths and verify the response contains
the exact same array.
2. Create a trace path with mixed-length path_hashes (e.g. ["4a", "b3fa",
"02"]), then GET /trace-paths/{id} and verify the response.
3. Verify existing API tests with current path_hashes continue to pass.
These tests confirm REQ-004.
requirements:
- "REQ-004"
- "REQ-007"
dependencies:
- "TASK-003"
suggested_role: "python"
acceptance_criteria:
- "Test verifies GET /trace-paths returns multibyte path hashes correctly"
- "Test verifies GET /trace-paths/{id} returns mixed-length path hashes correctly"
- "Existing API trace path tests continue to pass"
estimated_complexity: "small"
files_affected:
- "tests/test_api/test_trace_paths.py"
- "tests/test_api/conftest.py"
- id: "TASK-008"
done: true
title: "Verify web dashboard trace path display handles variable-length hashes"
description: |
Verify that the web dashboard does not have any hardcoded assumptions about
2-character path hash strings. A grep of src/meshcore_hub/web/static/js/spa/
for "path_hash" and "trace" shows no direct references to path hashes in the
SPA JavaScript code, meaning path hashes are likely rendered generically
through the API data display.
Confirm this by:
1. Checking all web template and JavaScript files that render trace path data
2. Verifying no CSS or JS applies fixed-width formatting to path hash elements
3. If any fixed-width or truncation logic exists, update it to handle
variable-length strings
If no web code references path hashes directly (as initial grep suggests),
document that the web dashboard requires no changes for multibyte support.
This satisfies REQ-005.
requirements:
- "REQ-005"
dependencies: []
suggested_role: "frontend"
acceptance_criteria:
- "Web dashboard trace/path display verified to handle variable-length hashes"
- "No fixed-width formatting assumes 2-character hash strings"
- "Any necessary changes applied, or no-change finding documented"
estimated_complexity: "small"
files_affected:
- "src/meshcore_hub/web/static/js/spa/pages/trace-paths.js"

View File

@@ -1,3 +1,6 @@
default_language_version:
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
@@ -14,7 +17,6 @@ repos:
rev: 24.3.0
hooks:
- id: black
language_version: python3.11
args: ["--line-length=88"]
- repo: https://github.com/pycqa/flake8

View File

@@ -1 +1 @@
3.11
3.14

289
AGENTS.md
View File

@@ -12,13 +12,20 @@ This document provides context and guidelines for AI coding assistants working o
- `source .venv/bin/activate`
* You MUST install all project dependencies using `pip install -e ".[dev]"` command`
* You MUST install `pre-commit` for quality checks
* You MUST keep project documentation in sync with behavior/config/schema changes made in code (at minimum update relevant sections in `README.md`, `SCHEMAS.md`, `PLAN.md`, and/or `TASKS.md` when applicable)
* Before commiting:
- Run tests with `pytest` to ensure recent changes haven't broken anything
- Run **targeted tests** for the components you changed, not the full suite:
- `pytest tests/test_web/` for web-only changes (templates, static JS, web routes)
- `pytest tests/test_api/` for API changes
- `pytest tests/test_collector/` for collector changes
- `pytest tests/test_interface/` for interface/sender/receiver changes
- `pytest tests/test_common/` for common models/schemas/config changes
- Only run the full `pytest` if changes span multiple components
- Run `pre-commit run --all-files` to perform all quality checks
## Project Overview
MeshCore Hub is a Python 3.11+ monorepo for managing and orchestrating MeshCore mesh networks. It consists of five main components:
MeshCore Hub is a Python 3.13+ monorepo for managing and orchestrating MeshCore mesh networks. It consists of five main components:
- **meshcore_interface**: Serial/USB interface to MeshCore companion nodes, publishes/subscribes to MQTT
- **meshcore_collector**: Collects MeshCore events from MQTT and stores them in a database
@@ -37,7 +44,7 @@ MeshCore Hub is a Python 3.11+ monorepo for managing and orchestrating MeshCore
| Category | Technology |
|----------|------------|
| Language | Python 3.11+ |
| Language | Python 3.13+ |
| Package Management | pip with pyproject.toml |
| CLI Framework | Click |
| Configuration | Pydantic Settings |
@@ -46,7 +53,8 @@ MeshCore Hub is a Python 3.11+ monorepo for managing and orchestrating MeshCore
| REST API | FastAPI |
| MQTT Client | paho-mqtt |
| MeshCore Interface | meshcore |
| Templates | Jinja2 |
| Templates | Jinja2 (server), lit-html (SPA) |
| Frontend | ES Modules SPA with client-side routing |
| CSS Framework | Tailwind CSS + DaisyUI |
| Testing | pytest, pytest-asyncio |
| Formatting | black |
@@ -274,18 +282,26 @@ meshcore-hub/
│ │ ├── app.py # FastAPI app
│ │ ├── auth.py # Authentication
│ │ ├── dependencies.py
│ │ ├── routes/ # API routes
│ │ │ ├── members.py # Member CRUD endpoints
│ │ ── ...
│ │ └── templates/ # Dashboard HTML
│ │ ├── metrics.py # Prometheus metrics endpoint
│ │ └── routes/ # API routes
│ │ ── members.py # Member CRUD endpoints
│ │ └── ...
│ └── web/
│ ├── cli.py
│ ├── app.py # FastAPI app
│ ├── routes/ # Page routes
├── members.py # Members page
│ └── ...
├── templates/ # Jinja2 templates
└── static/ # CSS, JS
│ ├── pages.py # Custom markdown page loader
│ ├── templates/ # Jinja2 templates (spa.html shell)
└── static/
├── css/app.css # Custom styles
└── js/spa/ # SPA frontend (ES modules)
│ ├── app.js # Entry point, route registration
│ ├── router.js # Client-side History API router
│ ├── api.js # API fetch helper
│ ├── components.js # Shared UI components (lit-html)
│ ├── icons.js # SVG icon functions (lit-html)
│ └── pages/ # Page modules (lazy-loaded)
│ ├── home.js, dashboard.js, nodes.js, ...
│ └── admin/ # Admin page modules
├── tests/
│ ├── conftest.py
│ ├── test_common/
@@ -297,11 +313,19 @@ meshcore-hub/
│ ├── env.py
│ └── versions/
├── etc/
── mosquitto.conf # MQTT broker configuration
── mosquitto.conf # MQTT broker configuration
│ ├── prometheus/ # Prometheus configuration
│ │ ├── prometheus.yml # Scrape and alerting config
│ │ └── alerts.yml # Alert rules
│ └── alertmanager/ # Alertmanager configuration
│ └── alertmanager.yml # Routing and receiver config
├── example/
── seed/ # Example seed data files
├── node_tags.yaml # Example node tags
└── members.yaml # Example network members
── seed/ # Example seed data files
├── node_tags.yaml # Example node tags
└── members.yaml # Example network members
│ └── content/ # Example custom content
│ ├── pages/ # Example custom pages
│ └── media/ # Example media files
├── seed/ # Seed data directory (SEED_HOME)
│ ├── node_tags.yaml # Node tags for import
│ └── members.yaml # Network members for import
@@ -342,6 +366,25 @@ Examples:
- JSON columns for flexible data (path_hashes, parsed_data, etc.)
- Foreign keys reference nodes by UUID, not public_key
## Standard Node Tags
Node tags are flexible key-value pairs that allow custom metadata to be attached to nodes. While tags are completely optional and freeform, the following standard tag keys are recommended for consistent use across the web dashboard:
| Tag Key | Description | Usage |
|---------|-------------|-------|
| `name` | Node display name | Used as the primary display name throughout the UI (overrides the advertised name) |
| `description` | Short description | Displayed as supplementary text under the node name |
| `member_id` | Member identifier reference | Links the node to a network member (matches `member_id` in Members table) |
| `lat` | GPS latitude override | Overrides node-reported latitude for map display |
| `lon` | GPS longitude override | Overrides node-reported longitude for map display |
| `elevation` | GPS elevation override | Overrides node-reported elevation |
| `role` | Node role/purpose | Used for website presentation and filtering (e.g., "gateway", "repeater", "sensor") |
**Important Notes:**
- All tags are optional - nodes can function without any tags
- Tag keys are case-sensitive
- The `member_id` tag should reference a valid `member_id` from the Members table
## Testing Guidelines
### Unit Tests
@@ -418,13 +461,121 @@ async def client(db_session):
5. Add Alembic migration if schema changed
6. Add tests in `tests/test_collector/`
### Adding a New SPA Page
The web dashboard is a Single Page Application. Pages are ES modules loaded by the client-side router.
1. Create a page module in `web/static/js/spa/pages/` (e.g., `my-page.js`)
2. Export an `async function render(container, params, router)` that renders into `container` using `litRender(html\`...\`, container)`
3. Register the route in `web/static/js/spa/app.js` with `router.addRoute('/my-page', pageHandler(pages.myPage))`
4. Add the page title to `updatePageTitle()` in `app.js`
5. Add a nav link in `web/templates/spa.html` (both mobile and desktop menus)
**Key patterns:**
- Import `html`, `litRender`, `nothing` from `../components.js` (re-exports lit-html)
- Use `apiGet()` from `../api.js` for API calls
- For list pages with filters, use the `renderPage()` pattern: render the page header immediately, then re-render with the filter form + results after fetch (keeps the form out of the shell to avoid layout shift from data-dependent filter selects)
- Old page content stays visible until data is ready (navbar spinner indicates loading)
- Use `pageColors` from `components.js` for section-specific colors (reads CSS custom properties from `app.css`)
- Return a cleanup function if the page creates resources (e.g., Leaflet maps, Chart.js instances)
### Internationalization (i18n)
The web dashboard supports internationalization via JSON translation files. The default language is English.
**Translation files location:** `src/meshcore_hub/web/static/locales/`
**Key files:**
- `en.json` - English translations (reference implementation)
- `languages.md` - Comprehensive translation reference guide for translators
**Using translations in JavaScript:**
Import the `t()` function from `components.js`:
```javascript
import { t } from '../components.js';
// Simple translation
const label = t('common.save'); // "Save"
// Translation with variable interpolation
const title = t('common.add_entity', { entity: t('entities.node') }); // "Add Node"
// Composed patterns for consistency
const emptyMsg = t('common.no_entity_found', { entity: t('entities.nodes').toLowerCase() }); // "No nodes found"
```
**Translation architecture:**
1. **Entity-based composition:** Core entity names (`entities.*`) are referenced by composite patterns for consistency
2. **Reusable patterns:** Common UI patterns (`common.*`) use `{{variable}}` interpolation for dynamic content
3. **Separation of concerns:**
- Keys without `_label` suffix = table headers (title case, no colon)
- Keys with `_label` suffix = inline labels (sentence case, with colon)
**When adding/modifying translations:**
1. **Add new keys** to `en.json` following existing patterns:
- Use composition when possible (reference `entities.*` in `common.*` patterns)
- Group related keys by section (e.g., `admin_members.*`, `admin_node_tags.*`)
- Use `{{variable}}` syntax for dynamic content
2. **Update `languages.md`** with:
- Key name, English value, and usage context
- Variable descriptions if using interpolation
- Notes about HTML content or special formatting
3. **Add tests** in `tests/test_common/test_i18n.py`:
- Test new interpolation patterns
- Test required sections if adding new top-level sections
- Test composed patterns with entity references
4. **Run i18n tests:**
```bash
pytest tests/test_common/test_i18n.py -v
```
**Best practices:**
- **Avoid duplication:** Use `common.*` patterns instead of duplicating similar strings
- **Compose with entities:** Reference `entities.*` keys in patterns rather than hardcoding entity names
- **Preserve variables:** Keep `{{variable}}` placeholders unchanged when translating
- **Test composition:** Verify patterns work with all entity types (singular/plural, lowercase/uppercase)
- **Document context:** Always update `languages.md` so translators understand usage
**Example - adding a new entity and patterns:**
```javascript
// 1. Add entity to en.json
"entities": {
"sensor": "Sensor"
}
// 2. Use with existing common patterns
t('common.add_entity', { entity: t('entities.sensor') }) // "Add Sensor"
t('common.no_entity_found', { entity: t('entities.sensors').toLowerCase() }) // "No sensors found"
// 3. Update languages.md with context
// 4. Add test to test_i18n.py
```
**Translation loading:**
The i18n system (`src/meshcore_hub/common/i18n.py`) loads translations on startup:
- Defaults to English (`en`)
- Falls back to English for missing keys
- Returns the key itself if translation not found
For full translation guidelines, see `src/meshcore_hub/web/static/locales/languages.md`.
### Adding a New Database Model
1. Create model in `common/models/`
2. Export in `common/models/__init__.py`
3. Create Alembic migration: `alembic revision --autogenerate -m "description"`
3. Create Alembic migration: `meshcore-hub db revision --autogenerate -m "description"`
4. Review and adjust migration file
5. Test migration: `alembic upgrade head`
5. Test migration: `meshcore-hub db upgrade`
### Running the Development Environment
@@ -446,7 +597,7 @@ pytest
# Run specific component
meshcore-hub api --reload
meshcore-hub collector
meshcore-hub interface --mode receiver --mock
meshcore-hub interface receiver --mock
```
## Environment Variables
@@ -456,11 +607,22 @@ See [PLAN.md](PLAN.md#configuration-environment-variables) for complete list.
Key variables:
- `DATA_HOME` - Base directory for runtime data (default: `./data`)
- `SEED_HOME` - Directory containing seed data files (default: `./seed`)
- `CONTENT_HOME` - Directory containing custom content (pages, media) (default: `./content`)
- `MQTT_HOST`, `MQTT_PORT`, `MQTT_PREFIX` - MQTT broker connection
- `DATABASE_URL` - SQLAlchemy database URL (default: `sqlite:///{DATA_HOME}/collector/meshcore.db`)
- `MQTT_TLS` - Enable TLS/SSL for MQTT (default: `false`)
- `API_READ_KEY`, `API_ADMIN_KEY` - API authentication keys
- `WEB_ADMIN_ENABLED` - Enable admin interface at /a/ (default: `false`, requires auth proxy)
- `WEB_TRUSTED_PROXY_HOSTS` - Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. A startup warning is emitted when using the default `*` with admin enabled.
- `WEB_THEME` - Default theme for the web dashboard (default: `dark`, options: `dark`, `light`). Users can override via the theme toggle in the navbar, which persists their preference in browser localStorage.
- `WEB_AUTO_REFRESH_SECONDS` - Auto-refresh interval in seconds for list pages (default: `30`, `0` to disable)
- `TZ` - Timezone for web dashboard date/time display (default: `UTC`, e.g., `America/New_York`, `Europe/London`)
- `FEATURE_DASHBOARD`, `FEATURE_NODES`, `FEATURE_ADVERTISEMENTS`, `FEATURE_MESSAGES`, `FEATURE_MAP`, `FEATURE_MEMBERS`, `FEATURE_PAGES` - Feature flags to enable/disable specific web dashboard pages (default: all `true`). Dependencies: Dashboard auto-disables when all of Nodes/Advertisements/Messages are disabled. Map auto-disables when Nodes is disabled.
- `METRICS_ENABLED` - Enable Prometheus metrics endpoint at /metrics (default: `true`)
- `METRICS_CACHE_TTL` - Seconds to cache metrics output (default: `60`)
- `LOG_LEVEL` - Logging verbosity
The database defaults to `sqlite:///{DATA_HOME}/collector/meshcore.db` and does not typically need to be configured.
### Directory Structure
**Seed Data (`SEED_HOME`)** - Contains initial data files for database seeding:
@@ -470,6 +632,32 @@ ${SEED_HOME}/
└── members.yaml # Network members list
```
**Custom Content (`CONTENT_HOME`)** - Contains custom pages and media for the web dashboard:
```
${CONTENT_HOME}/
├── pages/ # Custom markdown pages
│ ├── about.md # Example: About page (/pages/about)
│ ├── faq.md # Example: FAQ page (/pages/faq)
│ └── getting-started.md # Example: Getting Started (/pages/getting-started)
└── media/ # Custom media files
└── images/
├── logo.svg # Full-color custom logo (default)
└── logo-invert.svg # Monochrome custom logo (darkened in light mode)
```
Pages use YAML frontmatter for metadata:
```markdown
---
title: About Us # Browser tab title and nav link (not rendered on page)
slug: about # URL path (default: filename without .md)
menu_order: 10 # Nav sort order (default: 100, lower = earlier)
---
# About Our Network
Markdown content here (include your own heading)...
```
**Runtime Data (`DATA_HOME`)** - Contains runtime data (gitignored):
```
${DATA_HOME}/
@@ -479,13 +667,23 @@ ${DATA_HOME}/
Services automatically create their subdirectories if they don't exist.
### Automatic Seeding
### Seeding
The collector automatically imports seed data on startup if YAML files exist in `SEED_HOME`:
The database can be seeded with node tags and network members from YAML files in `SEED_HOME`:
- `node_tags.yaml` - Node tag definitions (keyed by public_key)
- `members.yaml` - Network member definitions
Manual seeding can be triggered with: `meshcore-hub collector seed`
**Important:** Seeding is NOT automatic and must be run explicitly. This prevents seed files from overwriting user changes made via the admin UI.
```bash
# Native CLI
meshcore-hub collector seed
# With Docker Compose
docker compose --profile seed up
```
**Note:** Once the admin UI is enabled (`WEB_ADMIN_ENABLED=true`), tags should be managed through the web interface rather than seed files.
### Webhook Configuration
@@ -536,6 +734,22 @@ When enabled, the collector automatically removes nodes where:
**Note:** Both event data and node cleanup run on the same schedule (DATA_RETENTION_INTERVAL_HOURS).
**Contact Cleanup (Interface RECEIVER):**
The interface RECEIVER mode can automatically remove stale contacts from the MeshCore companion node's contact database. This prevents the companion node from resyncing old/dead contacts back to the collector, freeing up memory on the device (typically limited to ~100 contacts).
| Variable | Description |
|----------|-------------|
| `CONTACT_CLEANUP_ENABLED` | Enable automatic removal of stale contacts (default: true) |
| `CONTACT_CLEANUP_DAYS` | Remove contacts not advertised for this many days (default: 7) |
When enabled, during each contact sync the receiver checks each contact's `last_advert` timestamp:
- Contacts with `last_advert` older than `CONTACT_CLEANUP_DAYS` are removed from the device
- Stale contacts are not published to MQTT (preventing collector database pollution)
- Contacts without a `last_advert` timestamp are preserved (no removal without data)
This cleanup runs automatically whenever the receiver syncs contacts (on startup and after each advertisement event).
Manual cleanup can be triggered at any time with:
```bash
# Dry run to see what would be deleted
@@ -559,9 +773,13 @@ Webhook payload structure:
### Common Issues
1. **MQTT Connection Failed**: Check broker is running and `MQTT_HOST`/`MQTT_PORT` are correct
2. **Database Migration Errors**: Ensure `DATABASE_URL` is correct, run `alembic upgrade head`
2. **Database Migration Errors**: Ensure `DATA_HOME` is writable, run `meshcore-hub db upgrade`
3. **Import Errors**: Ensure package is installed with `pip install -e .`
4. **Type Errors**: Run `mypy src/` to check type annotations
4. **Type Errors**: Run `pre-commit run --all-files` to check type annotations and other issues
5. **NixOS greenlet errors**: On NixOS, the pre-built greenlet wheel may fail with `libstdc++.so.6` errors. Rebuild from source:
```bash
pip install --no-binary greenlet greenlet
```
### Debugging
@@ -622,8 +840,23 @@ await mc.start_auto_message_fetching()
On startup, the receiver performs these initialization steps:
1. Set device clock to current Unix timestamp
2. Send a local (non-flood) advertisement
3. Start automatic message fetching
2. Optionally set the device name (if `MESHCORE_DEVICE_NAME` is configured)
3. Send a flood advertisement (broadcasts device name to the mesh)
4. Start automatic message fetching
5. Sync the device's contact database
### Contact Sync Behavior
The receiver syncs the device's contact database in two scenarios:
1. **Startup**: Initial sync when receiver starts
2. **Advertisement Events**: Automatic sync triggered whenever an advertisement is received from the mesh
Since advertisements are typically received every ~20 minutes, contact sync happens automatically without manual intervention. Each contact from the device is published individually to MQTT:
- Topic: `{prefix}/{device_public_key}/event/contact`
- Payload: `{public_key, adv_name, type}`
This ensures the collector's database stays current with all nodes discovered on the mesh network.
## References

View File

@@ -4,7 +4,7 @@
# =============================================================================
# Stage 1: Builder - Install dependencies and build package
# =============================================================================
FROM python:3.11-slim AS builder
FROM python:3.14-slim AS builder
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE=1 \
@@ -39,7 +39,7 @@ RUN sed -i "s|__version__ = \"dev\"|__version__ = \"${BUILD_VERSION}\"|" src/mes
# =============================================================================
# Stage 2: Runtime - Final production image
# =============================================================================
FROM python:3.11-slim AS runtime
FROM python:3.14-slim AS runtime
# Labels
LABEL org.opencontainers.image.title="MeshCore Hub" \
@@ -65,9 +65,26 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
RUN apt-get update && apt-get install -y --no-install-recommends \
# For serial port access
udev \
# LetsMesh decoder runtime
nodejs \
npm \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p /data
# Install meshcore-decoder CLI.
RUN mkdir -p /opt/letsmesh-decoder \
&& cd /opt/letsmesh-decoder \
&& npm init -y >/dev/null 2>&1 \
&& npm install --omit=dev @michaelhart/meshcore-decoder@0.2.7 patch-package
# Apply maintained meshcore-decoder compatibility patch.
COPY patches/@michaelhart+meshcore-decoder+0.2.7.patch /opt/letsmesh-decoder/patches/@michaelhart+meshcore-decoder+0.2.7.patch
RUN cd /opt/letsmesh-decoder \
&& npx patch-package --error-on-fail \
&& npm uninstall patch-package \
&& npm prune --omit=dev
RUN ln -s /opt/letsmesh-decoder/node_modules/.bin/meshcore-decoder /usr/local/bin/meshcore-decoder
# Copy virtual environment from builder
COPY --from=builder /opt/venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"

14
PLAN.md
View File

@@ -489,6 +489,16 @@ ${DATA_HOME}/
|----------|---------|-------------|
| DATABASE_URL | sqlite:///{DATA_HOME}/collector/meshcore.db | SQLAlchemy URL |
| TAGS_FILE | {DATA_HOME}/collector/tags.json | Path to tags JSON file |
| COLLECTOR_INGEST_MODE | native | Ingest mode (`native` or `letsmesh_upload`) |
| COLLECTOR_LETSMESH_DECODER_ENABLED | true | Enable external packet decoding in LetsMesh mode |
LetsMesh compatibility parity note:
- `status` feed packets are stored as informational `letsmesh_status` events and do not create advertisement rows.
- Advertisement rows in LetsMesh mode are created from decoded payload type `4` only.
- Decoded payload type `11` is normalized to native `contact` updates.
- Decoded payload type `9` is normalized to native `trace_data`.
- Decoded payload type `8` is normalized to informational `path_updated`.
- Decoded payload type `1` can map to native response-style events when decrypted structured content is available.
### API
| Variable | Default | Description |
@@ -506,6 +516,10 @@ ${DATA_HOME}/
| WEB_PORT | 8080 | Web bind port |
| API_BASE_URL | http://localhost:8000 | API endpoint |
| API_KEY | | API key for queries |
| WEB_TRUSTED_PROXY_HOSTS | * | Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. |
| WEB_LOCALE | en | UI translation locale |
| WEB_DATETIME_LOCALE | en-US | Date formatting locale for UI timestamps |
| TZ | UTC | Timezone used for UI timestamp rendering |
| NETWORK_DOMAIN | | Network domain |
| NETWORK_NAME | MeshCore Network | Network name |
| NETWORK_CITY | | City location |

624
README.md
View File

@@ -1,9 +1,19 @@
# MeshCore Hub
Python 3.11+ platform for managing and orchestrating MeshCore mesh networks.
[![CI](https://github.com/ipnet-mesh/meshcore-hub/actions/workflows/ci.yml/badge.svg)](https://github.com/ipnet-mesh/meshcore-hub/actions/workflows/ci.yml)
[![Docker](https://github.com/ipnet-mesh/meshcore-hub/actions/workflows/docker.yml/badge.svg)](https://github.com/ipnet-mesh/meshcore-hub/actions/workflows/docker.yml)
[![codecov](https://codecov.io/github/ipnet-mesh/meshcore-hub/graph/badge.svg?token=DO4F82DLKS)](https://codecov.io/github/ipnet-mesh/meshcore-hub)
[![BuyMeACoffee](https://raw.githubusercontent.com/pachadotdev/buymeacoffee-badges/main/bmc-donate-yellow.svg)](https://www.buymeacoffee.com/jinglemansweep)
Python 3.13+ platform for managing and orchestrating MeshCore mesh networks.
![MeshCore Hub Web Dashboard](docs/images/web.png)
> [!IMPORTANT]
> **Help Translate MeshCore Hub** 🌍
>
> We need volunteers to translate the web dashboard! Currently only English is available. Check out the [Translation Guide](src/meshcore_hub/web/static/locales/languages.md) to contribute a language pack. Partial translations welcome!
## Overview
MeshCore Hub provides a complete solution for monitoring, collecting, and interacting with MeshCore mesh networks. It consists of multiple components that work together:
@@ -13,45 +23,49 @@ MeshCore Hub provides a complete solution for monitoring, collecting, and intera
| **Interface** | Connects to MeshCore companion nodes via Serial/USB, bridges events to/from MQTT |
| **Collector** | Subscribes to MQTT events and persists them to a database |
| **API** | REST API for querying data and sending commands to the network |
| **Web Dashboard** | User-friendly web interface for visualizing network status |
| **Web Dashboard** | Single Page Application (SPA) for visualizing network status |
## Architecture
```
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
MeshCore │ │ MeshCore │ │ MeshCore │
Device 1 │ │ Device 2 │ │ Device 3 │
└────────┬────────┘ └────────┬────────┘ └────────┬────────┘
│ │ │
│ Serial/USB │ Serial/USB │ Serial/USB
│ │ │
┌────────▼────────┐ ┌────────▼────────┐ ┌────────▼────────┐
Interface │ │ Interface │ │ Interface │
(RECEIVER) │ │ (RECEIVER) │ │ (SENDER) │
└────────┬────────┘ └────────┬────────┘ └────────▲────────┘
│ │ │
│ Publish │ Publish │ Subscribe
│ │ │
└───────────┬───────────┴───────────────────────┘
┌──────▼──────┐
│ MQTT │
│ Broker │
└──────┬──────┘
┌──────▼──────┐
│ Collector │
└──────┬──────┘
┌──────▼──────┐
│ Database │
└──────┬──────┘
┌───────────┴───────────┐
│ │
┌──────▼──────┐ ┌───────▼───────┐
│ API │◄──────│ Web Dashboard │
└─────────────┘ └───────────────┘
```mermaid
flowchart LR
subgraph Devices["MeshCore Devices"]
D1["Device 1"]
D2["Device 2"]
D3["Device 3"]
end
subgraph Interfaces["Interface Layer"]
I1["RECEIVER"]
I2["RECEIVER"]
I3["SENDER"]
end
D1 -->|Serial| I1
D2 -->|Serial| I2
D3 -->|Serial| I3
I1 -->|Publish| MQTT
I2 -->|Publish| MQTT
MQTT -->|Subscribe| I3
MQTT["MQTT Broker"]
subgraph Backend["Backend Services"]
Collector --> Database --> API
end
MQTT --> Collector
API --> Web["Web Dashboard"]
style Devices fill:none,stroke:#0288d1,stroke-width:2px
style Interfaces fill:none,stroke:#f57c00,stroke-width:2px
style Backend fill:none,stroke:#388e3c,stroke-width:2px
style MQTT fill:none,stroke:#7b1fa2,stroke-width:3px
style Collector fill:none,stroke:#388e3c,stroke-width:2px
style Database fill:none,stroke:#c2185b,stroke-width:2px
style API fill:none,stroke:#1976d2,stroke-width:2px
style Web fill:none,stroke:#ffa000,stroke-width:2px
```
## Features
@@ -62,6 +76,7 @@ MeshCore Hub provides a complete solution for monitoring, collecting, and intera
- **Command Dispatch**: Send messages and advertisements via the API
- **Node Tagging**: Add custom metadata to nodes for organization
- **Web Dashboard**: Visualize network status, node locations, and message history
- **Internationalization**: Full i18n support with composable translation patterns
- **Docker Ready**: Single image with all components, easy deployment
## Getting Started
@@ -76,9 +91,13 @@ The quickest way to get started is running the entire stack on a single machine
**Steps:**
```bash
# Clone the repository
git clone https://github.com/ipnet-mesh/meshcore-hub.git
# Create a directory, download the Docker Compose file and
# example environment configuration file
mkdir meshcore-hub
cd meshcore-hub
wget https://raw.githubusercontent.com/ipnet-mesh/meshcore-hub/refs/heads/main/docker-compose.yml
wget https://raw.githubusercontent.com/ipnet-mesh/meshcore-hub/refs/heads/main/.env.example
# Copy and configure environment
cp .env.example .env
@@ -97,33 +116,34 @@ This starts all services: MQTT broker, collector, API, web dashboard, and the in
For larger deployments, you can separate receiver nodes from the central infrastructure. This allows multiple community members to contribute receiver coverage while hosting the backend centrally.
```
┌─────────────────────────────────────────────────────────────────────┐
│ Community Members │
│ │
┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
│ Raspberry Pi │ │ Raspberry Pi │ │ Any Linux │ │
│ + MeshCore │ │ + MeshCore │ │ + MeshCore │ │
│ │ Device │ │ Device │ │ Device │ │
└──────┬───────┘ └──────┬───────┘ └──────┬───────┘ │
│ │ │ │
│ receiver profile only │ │
└──────────────────┼──────────────────┘ │
│ │
│ MQTT (port 1883) │
│ │ │
└────────────────────────────┼─────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────────────┐
Community VPS / Server │
│ │
│ ┌──────────┐ ┌───────────┐ ┌─────────┐ ┌──────────────┐ │
MQTT │──▶│ Collector │──▶│ API │◀──│ Web Dashboard│ │
Broker │ │ │ │ │ │ (public) │ │
└──────────┘ └───────────┘ └─────────┘ └──────────────┘ │
└─────────────────────────────────────────────────────────────────────┘
```mermaid
flowchart TB
subgraph Community["Community Members"]
R1["Raspberry Pi + MeshCore"]
R2["Raspberry Pi + MeshCore"]
R3["Any Linux + MeshCore"]
end
subgraph Server["Community VPS / Server"]
MQTT["MQTT Broker"]
Collector
API
Web["Web Dashboard (public)"]
MQTT --> Collector --> API
API <--- Web
end
R1 -->|MQTT port 1883| MQTT
R2 -->|MQTT port 1883| MQTT
R3 -->|MQTT port 1883| MQTT
style Community fill:none,stroke:#0288d1,stroke-width:2px
style Server fill:none,stroke:#388e3c,stroke-width:2px
style MQTT fill:none,stroke:#7b1fa2,stroke-width:3px
style Collector fill:none,stroke:#388e3c,stroke-width:2px
style API fill:none,stroke:#1976d2,stroke-width:2px
style Web fill:none,stroke:#ffa000,stroke-width:2px
```
**On each receiver node (Raspberry Pi, etc.):**
@@ -151,33 +171,26 @@ This architecture allows:
- Community members to contribute coverage with minimal setup
- The central server to be hosted anywhere with internet access
## Quick Start
## Deployment
### Using Docker Compose (Recommended)
### Docker Compose Profiles
Docker Compose uses **profiles** to select which services to run:
| Profile | Services | Use Case |
|---------|----------|----------|
| `core` | collector, api, web | Central server infrastructure |
| `core` | db-migrate, collector, api, web | Central server infrastructure |
| `receiver` | interface-receiver | Receiver node (events to MQTT) |
| `sender` | interface-sender | Sender node (MQTT to device) |
| `mqtt` | mosquitto broker | Local MQTT broker (optional) |
| `mock` | interface-mock-receiver | Testing without hardware |
| `migrate` | db-migrate | One-time database migration |
| `seed` | seed | One-time seed data import |
| `metrics` | prometheus, alertmanager | Prometheus metrics and alerting |
**Note:** Most deployments connect to an external MQTT broker. Add `--profile mqtt` only if you need a local broker.
```bash
# Clone the repository
git clone https://github.com/ipnet-mesh/meshcore-hub.git
cd meshcore-hub
# Copy and configure environment
cp .env.example .env
# Edit .env with your settings (API keys, serial port, network info)
# Create database schema
docker compose --profile migrate run --rm db-migrate
@@ -200,7 +213,7 @@ docker compose logs -f
docker compose down
```
#### Serial Device Access
### Serial Device Access
For production with real MeshCore devices, ensure the serial port is accessible:
@@ -216,13 +229,25 @@ SERIAL_PORT=/dev/ttyUSB0
SERIAL_PORT_SENDER=/dev/ttyUSB1 # If using separate sender device
```
**Tip:** If USB devices reconnect as different numeric IDs (e.g., `/dev/ttyUSB0` becomes `/dev/ttyUSB1`), use the stable `/dev/serial/by-id/` path instead:
```bash
# List available devices by ID
ls -la /dev/serial/by-id/
# Example output:
# usb-Silicon_Labs_CP2102N_USB_to_UART_Bridge_abc123-if00-port0 -> ../../ttyUSB0
# Configure using the stable ID
SERIAL_PORT=/dev/serial/by-id/usb-Silicon_Labs_CP2102N_USB_to_UART_Bridge_abc123-if00-port0
```
### Manual Installation
```bash
# Create virtual environment
python -m venv .venv
source .venv/bin/activate # Linux/macOS
# .venv\Scripts\activate # Windows
source .venv/bin/activate
# Install the package
pip install -e ".[dev]"
@@ -231,63 +256,12 @@ pip install -e ".[dev]"
meshcore-hub db upgrade
# Start components (in separate terminals)
meshcore-hub interface --mode receiver --port /dev/ttyUSB0
meshcore-hub interface receiver --port /dev/ttyUSB0
meshcore-hub collector
meshcore-hub api
meshcore-hub web
```
## Updating an Existing Installation
To update MeshCore Hub to the latest version:
```bash
# Navigate to your installation directory
cd meshcore-hub
# Pull the latest code
git pull
# Pull latest Docker images
docker compose --profile all pull
# Recreate and restart services
# For receiver/sender only installs:
docker compose --profile receiver up -d --force-recreate
# For core services with MQTT:
docker compose --profile mqtt --profile core up -d --force-recreate
# For core services without local MQTT:
docker compose --profile core up -d --force-recreate
# For complete stack (all services):
docker compose --profile mqtt --profile core --profile receiver up -d --force-recreate
# View logs to verify update
docker compose logs -f
```
**Note:** Database migrations run automatically on collector startup, so no manual migration step is needed when using Docker.
For manual installations:
```bash
# Pull latest code
git pull
# Activate virtual environment
source .venv/bin/activate
# Update dependencies
pip install -e ".[dev]"
# Run database migrations
meshcore-hub db upgrade
# Restart your services
```
## Configuration
All components are configured via environment variables. Create a `.env` file or export variables:
@@ -297,28 +271,68 @@ All components are configured via environment variables. Create a `.env` file or
| Variable | Default | Description |
|----------|---------|-------------|
| `LOG_LEVEL` | `INFO` | Logging level (DEBUG, INFO, WARNING, ERROR) |
| `DATA_HOME` | `./data` | Base directory for runtime data |
| `SEED_HOME` | `./seed` | Directory containing seed data files |
| `MQTT_HOST` | `localhost` | MQTT broker hostname |
| `MQTT_PORT` | `1883` | MQTT broker port |
| `MQTT_USERNAME` | *(none)* | MQTT username (optional) |
| `MQTT_PASSWORD` | *(none)* | MQTT password (optional) |
| `MQTT_PREFIX` | `meshcore` | Topic prefix for all MQTT messages |
| `MQTT_TLS` | `false` | Enable TLS/SSL for MQTT connection |
| `MQTT_TRANSPORT` | `tcp` | MQTT transport (`tcp` or `websockets`) |
| `MQTT_WS_PATH` | `/mqtt` | MQTT WebSocket path (used when `MQTT_TRANSPORT=websockets`) |
### Interface Settings
| Variable | Default | Description |
|----------|---------|-------------|
| `INTERFACE_MODE` | `RECEIVER` | Operating mode (RECEIVER or SENDER) |
| `SERIAL_PORT` | `/dev/ttyUSB0` | Serial port for MeshCore device |
| `SERIAL_BAUD` | `115200` | Serial baud rate |
| `MESHCORE_DEVICE_NAME` | *(none)* | Device/node name set on startup (broadcast in advertisements) |
| `MOCK_DEVICE` | `false` | Use mock device for testing |
| `NODE_ADDRESS` | *(none)* | Override for device public key (64-char hex string) |
| `NODE_ADDRESS_SENDER` | *(none)* | Override for sender device public key |
| `CONTACT_CLEANUP_ENABLED` | `true` | Enable automatic removal of stale contacts from companion node |
| `CONTACT_CLEANUP_DAYS` | `7` | Remove contacts not advertised for this many days |
### Collector Settings
| Variable | Default | Description |
|----------|---------|-------------|
| `DATABASE_URL` | `sqlite:///{data_home}/collector/meshcore.db` | SQLAlchemy database URL |
| `SEED_HOME` | `./seed` | Directory containing seed data files (node_tags.yaml, members.yaml) |
| `COLLECTOR_INGEST_MODE` | `native` | Ingest mode (`native` or `letsmesh_upload`) |
| `COLLECTOR_LETSMESH_DECODER_ENABLED` | `true` | Enable external LetsMesh packet decoding |
| `COLLECTOR_LETSMESH_DECODER_COMMAND` | `meshcore-decoder` | Decoder CLI command |
| `COLLECTOR_LETSMESH_DECODER_KEYS` | *(none)* | Additional decoder channel keys (`label=hex`, `label:hex`, or `hex`) |
| `COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS` | `2.0` | Timeout per decoder invocation |
#### Webhook Configuration
#### LetsMesh Upload Compatibility Mode
When `COLLECTOR_INGEST_MODE=letsmesh_upload`, the collector subscribes to:
- `<prefix>/+/packets`
- `<prefix>/+/status`
- `<prefix>/+/internal`
Normalization behavior:
- `status` packets are stored as informational `letsmesh_status` events and are not mapped to `advertisement` rows.
- Decoder payload type `4` is mapped to `advertisement` when node identity metadata is present.
- Decoder payload type `11` (control discover response) is mapped to `contact`.
- Decoder payload type `9` is mapped to `trace_data`.
- Decoder payload type `8` is mapped to informational `path_updated` events.
- Decoder payload type `1` can map to native response events (`telemetry_response`, `battery`, `path_updated`, `status_response`) when decrypted structured content is available.
- `packet_type=5` packets are mapped to `channel_msg_recv`.
- `packet_type=1`, `2`, and `7` packets are mapped to `contact_msg_recv` when decryptable text is available.
- For channel packets, if a channel key is available, a channel label is attached (for example `Public` or `#test`) for UI display.
- In the messages feed and dashboard channel sections, known channel indexes are preferred for labels (`17 -> Public`, `217 -> #test`) to avoid stale channel-name mismatches.
- Additional channel names are loaded from `COLLECTOR_LETSMESH_DECODER_KEYS` when entries are provided as `label=hex` (for example `bot=<key>`).
- Decoder-advertisement packets with location metadata update node GPS (`lat/lon`) for map display.
- This keeps advertisement listings closer to native mode behavior (node advert traffic only, not observer status telemetry).
- Packets without decryptable message text are kept as informational `letsmesh_packet` events and are not shown in the messages feed; when decode succeeds the decoded JSON is attached to those packet log events.
- When decoder output includes a human sender (`payload.decoded.decrypted.sender`), message text is normalized to `Name: Message` before storage; receiver/observer names are never used as sender fallback.
- The collector keeps built-in keys for `Public` and `#test`, and merges any additional keys from `COLLECTOR_LETSMESH_DECODER_KEYS`.
- Docker runtime installs `@michaelhart/meshcore-decoder@0.2.7` and applies `patches/@michaelhart+meshcore-decoder+0.2.7.patch` via `patch-package` for Node compatibility.
### Webhooks
The collector can forward certain events to external HTTP endpoints:
@@ -329,7 +343,9 @@ The collector can forward certain events to external HTTP endpoints:
| `WEBHOOK_MESSAGE_URL` | *(none)* | Webhook URL for all message events |
| `WEBHOOK_MESSAGE_SECRET` | *(none)* | Secret for message webhook |
| `WEBHOOK_CHANNEL_MESSAGE_URL` | *(none)* | Override URL for channel messages only |
| `WEBHOOK_CHANNEL_MESSAGE_SECRET` | *(none)* | Secret for channel message webhook |
| `WEBHOOK_DIRECT_MESSAGE_URL` | *(none)* | Override URL for direct messages only |
| `WEBHOOK_DIRECT_MESSAGE_SECRET` | *(none)* | Secret for direct message webhook |
| `WEBHOOK_TIMEOUT` | `10.0` | Request timeout in seconds |
| `WEBHOOK_MAX_RETRIES` | `3` | Max retry attempts on failure |
| `WEBHOOK_RETRY_BACKOFF` | `2.0` | Exponential backoff multiplier |
@@ -343,6 +359,18 @@ Webhook payload format:
}
```
### Data Retention
The collector automatically cleans up old event data and inactive nodes:
| Variable | Default | Description |
|----------|---------|-------------|
| `DATA_RETENTION_ENABLED` | `true` | Enable automatic cleanup of old events |
| `DATA_RETENTION_DAYS` | `30` | Days to retain event data |
| `DATA_RETENTION_INTERVAL_HOURS` | `24` | Hours between cleanup runs |
| `NODE_CLEANUP_ENABLED` | `true` | Enable removal of inactive nodes |
| `NODE_CLEANUP_DAYS` | `7` | Remove nodes not seen for this many days |
### API Settings
| Variable | Default | Description |
@@ -351,6 +379,8 @@ Webhook payload format:
| `API_PORT` | `8000` | API port |
| `API_READ_KEY` | *(none)* | Read-only API key |
| `API_ADMIN_KEY` | *(none)* | Admin API key (required for commands) |
| `METRICS_ENABLED` | `true` | Enable Prometheus metrics endpoint at `/metrics` |
| `METRICS_CACHE_TTL` | `60` | Seconds to cache metrics output (reduces database load) |
### Web Dashboard Settings
@@ -359,62 +389,173 @@ Webhook payload format:
| `WEB_HOST` | `0.0.0.0` | Web server bind address |
| `WEB_PORT` | `8080` | Web server port |
| `API_BASE_URL` | `http://localhost:8000` | API endpoint URL |
| `API_KEY` | *(none)* | API key for web dashboard queries (optional) |
| `WEB_THEME` | `dark` | Default theme (`dark` or `light`). Users can override via theme toggle in navbar. |
| `WEB_LOCALE` | `en` | Locale/language for the web dashboard (e.g., `en`, `es`, `fr`) |
| `WEB_DATETIME_LOCALE` | `en-US` | Locale used for date formatting in the web dashboard (e.g., `en-US` for MM/DD/YYYY, `en-GB` for DD/MM/YYYY). |
| `WEB_AUTO_REFRESH_SECONDS` | `30` | Auto-refresh interval in seconds for list pages (0 to disable) |
| `WEB_ADMIN_ENABLED` | `false` | Enable admin interface at /a/ (requires auth proxy: `X-Forwarded-User`/`X-Auth-Request-User` or forwarded `Authorization: Basic ...`) |
| `WEB_TRUSTED_PROXY_HOSTS` | `*` | Comma-separated list of trusted proxy hosts for admin authentication headers. Default: `*` (all hosts). Recommended: set to your reverse proxy IP in production. A startup warning is emitted when using the default `*` with admin enabled. |
| `TZ` | `UTC` | Timezone for displaying dates/times (e.g., `America/New_York`, `Europe/London`) |
| `NETWORK_DOMAIN` | *(none)* | Network domain name (optional) |
| `NETWORK_NAME` | `MeshCore Network` | Display name for the network |
| `NETWORK_CITY` | *(none)* | City where network is located |
| `NETWORK_COUNTRY` | *(none)* | Country code (ISO 3166-1 alpha-2) |
| `NETWORK_RADIO_CONFIG` | *(none)* | Radio config (comma-delimited: profile,freq,bw,sf,cr,power) |
| `NETWORK_WELCOME_TEXT` | *(none)* | Custom welcome text for homepage |
| `NETWORK_CONTACT_EMAIL` | *(none)* | Contact email address |
| `NETWORK_CONTACT_DISCORD` | *(none)* | Discord server link |
| `NETWORK_CONTACT_GITHUB` | *(none)* | GitHub repository URL |
| `NETWORK_CONTACT_YOUTUBE` | *(none)* | YouTube channel URL |
| `CONTENT_HOME` | `./content` | Directory containing custom content (pages/, media/) |
## CLI Reference
Timezone handling note:
- API timestamps that omit an explicit timezone suffix are treated as UTC before rendering in the configured `TZ`.
#### Nginx Proxy Manager (NPM) Admin Setup
Use two hostnames so the public map/site stays open while admin stays protected:
1. Public host: no Access List (normal users).
2. Admin host: Access List enabled (operators only).
Both proxy hosts should forward to the same web container:
- Scheme: `http`
- Forward Hostname/IP: your MeshCore Hub host
- Forward Port: `18080` (or your mapped web port)
- Websockets Support: `ON`
- Block Common Exploits: `ON`
Important:
- Do not host this app under a subpath (for example `/meshcore`); proxy it at `/`.
- `WEB_ADMIN_ENABLED` must be `true`.
In NPM, for the **admin host**, paste this in the `Advanced` field:
```nginx
# Forward authenticated identity for MeshCore Hub admin checks
proxy_set_header Authorization $http_authorization;
proxy_set_header X-Forwarded-User $remote_user;
proxy_set_header X-Auth-Request-User $remote_user;
proxy_set_header X-Forwarded-Email "";
proxy_set_header X-Forwarded-Groups "";
```
Then attach your NPM Access List (Basic auth users) to that admin host.
Verify auth forwarding:
```bash
# Show help
meshcore-hub --help
curl -s -u 'admin:password' "https://admin.example.com/config.js?t=$(date +%s)" \
| grep -o '"is_authenticated":[^,]*'
```
# Interface component
meshcore-hub interface --mode receiver --port /dev/ttyUSB0
meshcore-hub interface --mode receiver --device-name "Gateway Node" # Set device name
meshcore-hub interface --mode sender --mock # Use mock device
Expected:
# Collector component
meshcore-hub collector # Run collector (auto-seeds on startup)
meshcore-hub collector seed # Import all seed data from SEED_HOME
meshcore-hub collector import-tags # Import node tags from SEED_HOME/node_tags.yaml
meshcore-hub collector import-tags /path/to/file.yaml # Import from specific file
meshcore-hub collector import-members # Import members from SEED_HOME/members.yaml
meshcore-hub collector import-members /path/to/file.yaml # Import from specific file
```text
"is_authenticated": true
```
# API component
meshcore-hub api --host 0.0.0.0 --port 8000
If it still shows `false`, check:
1. You are using the admin hostname, not the public hostname.
2. The Access List is attached to that admin host.
3. The `Advanced` block above is present exactly.
4. `WEB_ADMIN_ENABLED=true` is loaded in the running web container.
# Web dashboard
meshcore-hub web --port 8080 --network-name "My Network"
#### Feature Flags
# Database management
meshcore-hub db upgrade # Run migrations
meshcore-hub db downgrade # Rollback one migration
meshcore-hub db current # Show current revision
Control which pages are visible in the web dashboard. Disabled features are fully hidden: removed from navigation, return 404 on their routes, and excluded from sitemap/robots.txt.
| Variable | Default | Description |
|----------|---------|-------------|
| `FEATURE_DASHBOARD` | `true` | Enable the `/dashboard` page |
| `FEATURE_NODES` | `true` | Enable the `/nodes` pages (list, detail, short links) |
| `FEATURE_ADVERTISEMENTS` | `true` | Enable the `/advertisements` page |
| `FEATURE_MESSAGES` | `true` | Enable the `/messages` page |
| `FEATURE_MAP` | `true` | Enable the `/map` page and `/map/data` endpoint |
| `FEATURE_MEMBERS` | `true` | Enable the `/members` page |
| `FEATURE_PAGES` | `true` | Enable custom markdown pages |
**Dependencies:** Dashboard auto-disables when all of Nodes/Advertisements/Messages are disabled. Map auto-disables when Nodes is disabled.
### Custom Content
The web dashboard supports custom content including markdown pages and media files. Content is organized in subdirectories:
Custom logo options:
- `logo.svg` — full-color logo, displayed as-is in both themes (no automatic darkening)
- `logo-invert.svg` — monochrome/two-tone logo, automatically darkened in light mode for visibility
```
content/
├── pages/ # Custom markdown pages
│ └── about.md
└── media/ # Custom media files
└── images/
├── logo.svg # Full-color custom logo (default)
└── logo-invert.svg # Monochrome custom logo (darkened in light mode)
```
**Setup:**
```bash
# Create content directory structure
mkdir -p content/pages content/media
# Create a custom page
cat > content/pages/about.md << 'EOF'
---
title: About Us
slug: about
menu_order: 10
---
# About Our Network
Welcome to our MeshCore mesh network!
## Getting Started
1. Get a compatible LoRa device
2. Flash MeshCore firmware
3. Configure your radio settings
EOF
```
**Frontmatter fields:**
| Field | Default | Description |
|-------|---------|-------------|
| `title` | Filename titlecased | Browser tab title and navigation link text (not rendered on page) |
| `slug` | Filename without `.md` | URL path (e.g., `about``/pages/about`) |
| `menu_order` | `100` | Sort order in navigation (lower = earlier) |
The markdown content is rendered as-is, so include your own `# Heading` if desired.
Pages automatically appear in the navigation menu and sitemap. With Docker, mount the content directory:
```yaml
# docker-compose.yml (already configured)
volumes:
- ${CONTENT_HOME:-./content}:/content:ro
environment:
- CONTENT_HOME=/content
```
## Seed Data
The collector supports seeding the database with node tags and network members on startup. Seed files are read from the `SEED_HOME` directory (default: `./seed`).
The database can be seeded with node tags and network members from YAML files in the `SEED_HOME` directory (default: `./seed`).
### Automatic Seeding
#### Running the Seed Process
When the collector starts, it automatically imports seed data from YAML files if they exist:
- `{SEED_HOME}/node_tags.yaml` - Node tag definitions
- `{SEED_HOME}/members.yaml` - Network member definitions
### Manual Seeding
Seeding is a separate process and must be run explicitly:
```bash
# Native CLI
meshcore-hub collector seed
# With Docker Compose
docker compose --profile seed up
```
### Directory Structure
This imports data from the following files (if they exist):
- `{SEED_HOME}/node_tags.yaml` - Node tag definitions
- `{SEED_HOME}/members.yaml` - Network member definitions
#### Directory Structure
```
seed/ # SEED_HOME (seed data files)
@@ -428,73 +569,62 @@ data/ # DATA_HOME (runtime data)
Example seed files are provided in `example/seed/`.
## Node Tags
### Node Tags
Node tags allow you to attach custom metadata to nodes (e.g., location, role, owner). Tags are stored in the database and returned with node data via the API.
### Node Tags YAML Format
#### Node Tags YAML Format
Tags are keyed by public key in YAML format:
```yaml
# Each key is a 64-character hex public key
0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef:
friendly_name: Gateway Node
name: Gateway Node
description: Main network gateway
role: gateway
lat: 37.7749
lon: -122.4194
is_online: true
member_id: alice
fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210:
friendly_name: Oakland Repeater
altitude: 150
location:
value: "37.8044,-122.2712"
type: coordinate
name: Oakland Repeater
elevation: 150
```
Tag values can be:
- **YAML primitives** (auto-detected type): strings, numbers, booleans
- **Explicit type** (for special types like coordinate):
- **Explicit type** (when you need to force a specific type):
```yaml
location:
value: "37.7749,-122.4194"
type: coordinate
altitude:
value: "150"
type: number
```
Supported types: `string`, `number`, `boolean`, `coordinate`
Supported types: `string`, `number`, `boolean`
### Import Tags Manually
```bash
# Import from default location ({SEED_HOME}/node_tags.yaml)
meshcore-hub collector import-tags
# Import from specific file
meshcore-hub collector import-tags /path/to/node_tags.yaml
# Skip tags for nodes that don't exist
meshcore-hub collector import-tags --no-create-nodes
```
## Network Members
### Network Members
Network members represent the people operating nodes in your network. Members can optionally be linked to nodes via their public key.
### Members YAML Format
#### Members YAML Format
```yaml
members:
- name: John Doe
callsign: N0CALL
role: Network Operator
description: Example member entry
contact: john@example.com
public_key: 0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
- member_id: walshie86
name: Walshie
callsign: Walshie86
role: member
description: IPNet Member
- member_id: craig
name: Craig
callsign: M7XCN
role: member
description: IPNet Member
```
| Field | Required | Description |
|-------|----------|-------------|
| `member_id` | Yes | Unique identifier for the member |
| `name` | Yes | Member's display name |
| `callsign` | No | Amateur radio callsign |
| `role` | No | Member's role in the network |
@@ -502,44 +632,6 @@ members:
| `contact` | No | Contact information |
| `public_key` | No | Associated node public key (64-char hex) |
### Import Members Manually
```bash
# Import from default location ({SEED_HOME}/members.yaml)
meshcore-hub collector import-members
# Import from specific file
meshcore-hub collector import-members /path/to/members.yaml
```
### Managing Tags via API
Tags can also be managed via the REST API:
```bash
# List tags for a node
curl http://localhost:8000/api/v1/nodes/{public_key}/tags
# Create a tag (requires admin key)
curl -X POST \
-H "Authorization: Bearer <API_ADMIN_KEY>" \
-H "Content-Type: application/json" \
-d '{"key": "location", "value": "Building A"}' \
http://localhost:8000/api/v1/nodes/{public_key}/tags
# Update a tag
curl -X PUT \
-H "Authorization: Bearer <API_ADMIN_KEY>" \
-H "Content-Type: application/json" \
-d '{"value": "Building B"}' \
http://localhost:8000/api/v1/nodes/{public_key}/tags/location
# Delete a tag
curl -X DELETE \
-H "Authorization: Bearer <API_ADMIN_KEY>" \
http://localhost:8000/api/v1/nodes/{public_key}/tags/location
```
## API Documentation
When running, the API provides interactive documentation at:
@@ -552,6 +644,7 @@ Health check endpoints are also available:
- **Health**: http://localhost:8000/health
- **Ready**: http://localhost:8000/health/ready (includes database check)
- **Metrics**: http://localhost:8000/metrics (Prometheus format)
### Authentication
@@ -575,15 +668,21 @@ curl -X POST \
|--------|----------|-------------|
| GET | `/api/v1/nodes` | List all known nodes |
| GET | `/api/v1/nodes/{public_key}` | Get node details |
| GET | `/api/v1/nodes/prefix/{prefix}` | Get node by public key prefix |
| GET | `/api/v1/nodes/{public_key}/tags` | Get node tags |
| POST | `/api/v1/nodes/{public_key}/tags` | Create node tag |
| GET | `/api/v1/messages` | List messages with filters |
| GET | `/api/v1/advertisements` | List advertisements |
| GET | `/api/v1/telemetry` | List telemetry data |
| GET | `/api/v1/trace-paths` | List trace paths |
| GET | `/api/v1/members` | List network members |
| POST | `/api/v1/commands/send-message` | Send direct message |
| POST | `/api/v1/commands/send-channel-message` | Send channel message |
| POST | `/api/v1/commands/send-advertisement` | Send advertisement |
| GET | `/api/v1/dashboard/stats` | Get network statistics |
| GET | `/api/v1/dashboard/activity` | Get daily advertisement activity |
| GET | `/api/v1/dashboard/message-activity` | Get daily message activity |
| GET | `/api/v1/dashboard/node-count` | Get cumulative node count history |
## Development
@@ -620,14 +719,8 @@ pytest -k "test_list"
### Code Quality
```bash
# Format code
black src/ tests/
# Lint
flake8 src/ tests/
# Type check
mypy src/
# Run all code quality checks (formatting, linting, type checking)
pre-commit run --all-files
```
### Creating Database Migrations
@@ -653,14 +746,27 @@ meshcore-hub/
│ ├── collector/ # MQTT event collector
│ ├── api/ # REST API
│ └── web/ # Web dashboard
│ ├── templates/ # Jinja2 templates (SPA shell)
│ └── static/
│ ├── js/spa/ # SPA frontend (ES modules, lit-html)
│ └── locales/ # Translation files (en.json, languages.md)
├── tests/ # Test suite
├── alembic/ # Database migrations
├── etc/ # Configuration files (mosquitto.conf)
├── example/ # Example files for testing
── seed/ # Example seed data files
├── node_tags.yaml # Example node tags
└── members.yaml # Example network members
├── etc/ # Configuration files (MQTT, Prometheus, Alertmanager)
├── example/ # Example files for reference
── seed/ # Example seed data files
├── node_tags.yaml # Example node tags
└── members.yaml # Example network members
│ └── content/ # Example custom content
│ ├── pages/ # Example custom pages
│ │ └── join.md # Example join page
│ └── media/ # Example media files
│ └── images/ # Custom images
├── seed/ # Seed data directory (SEED_HOME, copy from example/seed/)
├── content/ # Custom content directory (CONTENT_HOME, optional)
│ ├── pages/ # Custom markdown pages
│ └── media/ # Custom media files
│ └── images/ # Custom images (logo.svg/png/jpg/jpeg/webp replace default logo)
├── data/ # Runtime data directory (DATA_HOME, created at runtime)
├── Dockerfile # Docker build configuration
├── docker-compose.yml # Docker Compose services
@@ -684,7 +790,7 @@ meshcore-hub/
1. Fork the repository
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
3. Make your changes
4. Run tests and linting (`pytest && black . && flake8`)
4. Run tests and quality checks (`pytest && pre-commit run --all-files`)
5. Commit your changes (`git commit -m 'Add amazing feature'`)
6. Push to the branch (`git push origin feature/amazing-feature`)
7. Open a Pull Request

View File

@@ -45,15 +45,19 @@ Node advertisements announcing presence and metadata.
"public_key": "string (64 hex chars)",
"name": "string (optional)",
"adv_type": "string (optional)",
"flags": "integer (optional)"
"flags": "integer (optional)",
"lat": "number (optional)",
"lon": "number (optional)"
}
```
**Field Descriptions**:
- `public_key`: Node's full 64-character hexadecimal public key (required)
- `name`: Node name/alias (e.g., "Gateway-01", "Alice")
- `adv_type`: Node type - one of: `"chat"`, `"repeater"`, `"room"`, `"none"`
- `adv_type`: Node type - common values: `"chat"`, `"repeater"`, `"room"`, `"companion"` (other values may appear from upstream feeds and are normalized by the collector when possible)
- `flags`: Node capability/status flags (bitmask)
- `lat`: GPS latitude when provided by decoder metadata
- `lon`: GPS longitude when provided by decoder metadata
**Example**:
```json
@@ -61,7 +65,9 @@ Node advertisements announcing presence and metadata.
"public_key": "4767c2897c256df8d85a5fa090574284bfd15b92d47359741b0abd5098ed30c4",
"name": "Gateway-01",
"adv_type": "repeater",
"flags": 218
"flags": 218,
"lat": 42.470001,
"lon": -71.330001
}
```
@@ -90,7 +96,7 @@ Direct/private messages between two nodes.
```
**Field Descriptions**:
- `pubkey_prefix`: First 12 characters of sender's public key
- `pubkey_prefix`: First 12 characters of sender's public key (or source hash prefix in compatibility ingest modes)
- `path_len`: Number of hops message traveled
- `txt_type`: Message type indicator (0=plain, 2=signed, etc.)
- `signature`: Message signature (8 hex chars) when `txt_type=2`
@@ -128,7 +134,9 @@ Group/broadcast messages on specific channels.
**Payload Schema**:
```json
{
"channel_idx": "integer",
"channel_idx": "integer (optional)",
"channel_name": "string (optional)",
"pubkey_prefix": "string (12 chars, optional)",
"path_len": "integer (optional)",
"txt_type": "integer (optional)",
"signature": "string (optional)",
@@ -139,7 +147,9 @@ Group/broadcast messages on specific channels.
```
**Field Descriptions**:
- `channel_idx`: Channel number (0-255)
- `channel_idx`: Channel number (0-255) when available
- `channel_name`: Channel display label (e.g., `"Public"`, `"#test"`) when available
- `pubkey_prefix`: First 12 characters of sender's public key when available
- `path_len`: Number of hops message traveled
- `txt_type`: Message type indicator (0=plain, 2=signed, etc.)
- `signature`: Message signature (8 hex chars) when `txt_type=2`
@@ -166,6 +176,25 @@ Group/broadcast messages on specific channels.
- Send only text: `$.data.text`
- Send channel + text: `$.data.[channel_idx,text]`
**Compatibility ingest note**:
- In LetsMesh upload compatibility mode, packet type `5` is normalized to `CHANNEL_MSG_RECV` and packet types `1`, `2`, and `7` are normalized to `CONTACT_MSG_RECV` when decryptable text is available.
- LetsMesh packets without decryptable message text are treated as informational `letsmesh_packet` events instead of message events.
- For UI labels, known channel indexes are mapped (`17 -> Public`, `217 -> #test`) and preferred over ambiguous/stale channel-name hints.
- Additional channel labels can be provided through `COLLECTOR_LETSMESH_DECODER_KEYS` using `label=hex` entries.
- When decoder output includes a human sender (`payload.decoded.decrypted.sender`), message text is normalized to `Name: Message`; sender identity remains unknown when only hash/prefix metadata is available.
**Compatibility ingest note (advertisements)**:
- In LetsMesh upload compatibility mode, `status` feed payloads are persisted as informational `letsmesh_status` events and are not normalized to `ADVERTISEMENT`.
- In LetsMesh upload compatibility mode, decoded payload type `4` is normalized to `ADVERTISEMENT` when node identity metadata is present.
- Payload type `4` location metadata (`appData.location.latitude/longitude`) is mapped to node `lat/lon` for map rendering.
- This keeps advertisement persistence aligned with native mode expectations (advertisement traffic only).
**Compatibility ingest note (non-message structured events)**:
- Decoded payload type `9` is normalized to `TRACE_DATA` (`traceTag`, flags, auth, path hashes, and SNR values).
- Decoded payload type `11` (`Control/NodeDiscoverResp`) is normalized to `contact` events for node upsert parity.
- Decoded payload type `8` is normalized to informational `PATH_UPDATED` events (`hop_count` + path hashes).
- Decoded payload type `1` can be normalized to `TELEMETRY_RESPONSE`, `BATTERY`, `PATH_UPDATED`, or `STATUS_RESPONSE` when decrypted response content is structured and parseable.
---
## Persisted Events (Non-Webhook)
@@ -196,7 +225,7 @@ Network trace path results showing route and signal strength.
- `path_len`: Length of the path
- `flags`: Trace flags/options
- `auth`: Authentication/validation data
- `path_hashes`: Array of 2-character node hash identifiers (ordered by hops)
- `path_hashes`: Array of hex-encoded node hash identifiers, variable length (e.g., `"4a"` for single-byte, `"b3fa"` for multibyte), ordered by hops
- `snr_values`: Array of SNR values corresponding to each hop
- `hop_count`: Total number of hops
@@ -207,12 +236,14 @@ Network trace path results showing route and signal strength.
"path_len": 3,
"flags": 0,
"auth": 1,
"path_hashes": ["4a", "b3", "fa"],
"path_hashes": ["4a", "b3fa", "02"],
"snr_values": [25.3, 18.7, 12.4],
"hop_count": 3
}
```
**Note**: MeshCore firmware v1.14+ supports multibyte path hashes. Older nodes use single-byte (2-character) hashes. Mixed-length hash arrays are expected in heterogeneous networks where nodes run different firmware versions.
**Webhook Trigger**: No
**REST API**: `GET /api/v1/trace-paths`

View File

@@ -753,6 +753,9 @@ This document tracks implementation progress for the MeshCore Hub project. Each
### Decisions Made
*(Record architectural decisions and answers to clarifying questions here)*
- [x] LetsMesh/native advertisement parity: in `letsmesh_upload` mode, observer `status` feed stays informational (`letsmesh_status`) and does not populate `advertisements`.
- [x] LetsMesh advertisement persistence source: decoded packet payload type `4` maps to `advertisement`; payload type `11` maps to `contact` parity updates.
- [x] LetsMesh native-event parity extensions: payload type `9` maps to `trace_data`, payload type `8` maps to informational `path_updated`, and payload type `1` can map to response-style native events when decryptable structured content exists.
- [ ] Q1 (MQTT Broker):
- [ ] Q2 (Database):
- [ ] Q3 (Web Dashboard Separation):

View File

@@ -0,0 +1,39 @@
"""Make Node.last_seen nullable
Revision ID: 0b944542ccd8
Revises: 005
Create Date: 2025-12-08 00:07:49.891245+00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "0b944542ccd8"
down_revision: Union[str, None] = "005"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Make Node.last_seen nullable since nodes from contact sync
# haven't actually been "seen" on the mesh yet
with op.batch_alter_table("nodes", schema=None) as batch_op:
batch_op.alter_column("last_seen", existing_type=sa.DATETIME(), nullable=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Revert Node.last_seen to non-nullable
# Note: This will fail if there are NULL values in last_seen
with op.batch_alter_table("nodes", schema=None) as batch_op:
batch_op.alter_column("last_seen", existing_type=sa.DATETIME(), nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,111 @@
"""Add member_id field to members table
Revision ID: 03b9b2451bd9
Revises: 0b944542ccd8
Create Date: 2025-12-08 14:34:30.337799+00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "03b9b2451bd9"
down_revision: Union[str, None] = "0b944542ccd8"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("advertisements", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_advertisements_event_hash_unique"))
batch_op.create_unique_constraint(
"uq_advertisements_event_hash", ["event_hash"]
)
with op.batch_alter_table("members", schema=None) as batch_op:
# Add member_id as nullable first to handle existing data
batch_op.add_column(
sa.Column("member_id", sa.String(length=100), nullable=True)
)
# Generate member_id for existing members based on their name
# Convert name to lowercase and replace spaces with underscores
connection = op.get_bind()
connection.execute(
sa.text(
"UPDATE members SET member_id = LOWER(REPLACE(name, ' ', '_')) WHERE member_id IS NULL"
)
)
with op.batch_alter_table("members", schema=None) as batch_op:
# Now make it non-nullable and add unique index
batch_op.alter_column("member_id", nullable=False)
batch_op.drop_index(batch_op.f("ix_members_name"))
batch_op.create_index(
batch_op.f("ix_members_member_id"), ["member_id"], unique=True
)
with op.batch_alter_table("messages", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_messages_event_hash_unique"))
batch_op.create_unique_constraint("uq_messages_event_hash", ["event_hash"])
with op.batch_alter_table("nodes", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_nodes_public_key"))
batch_op.create_index(
batch_op.f("ix_nodes_public_key"), ["public_key"], unique=True
)
with op.batch_alter_table("telemetry", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_telemetry_event_hash_unique"))
batch_op.create_unique_constraint("uq_telemetry_event_hash", ["event_hash"])
with op.batch_alter_table("trace_paths", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_trace_paths_event_hash_unique"))
batch_op.create_unique_constraint("uq_trace_paths_event_hash", ["event_hash"])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("trace_paths", schema=None) as batch_op:
batch_op.drop_constraint("uq_trace_paths_event_hash", type_="unique")
batch_op.create_index(
batch_op.f("ix_trace_paths_event_hash_unique"), ["event_hash"], unique=1
)
with op.batch_alter_table("telemetry", schema=None) as batch_op:
batch_op.drop_constraint("uq_telemetry_event_hash", type_="unique")
batch_op.create_index(
batch_op.f("ix_telemetry_event_hash_unique"), ["event_hash"], unique=1
)
with op.batch_alter_table("nodes", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_nodes_public_key"))
batch_op.create_index(
batch_op.f("ix_nodes_public_key"), ["public_key"], unique=False
)
with op.batch_alter_table("messages", schema=None) as batch_op:
batch_op.drop_constraint("uq_messages_event_hash", type_="unique")
batch_op.create_index(
batch_op.f("ix_messages_event_hash_unique"), ["event_hash"], unique=1
)
with op.batch_alter_table("members", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_members_member_id"))
batch_op.create_index(batch_op.f("ix_members_name"), ["name"], unique=False)
batch_op.drop_column("member_id")
with op.batch_alter_table("advertisements", schema=None) as batch_op:
batch_op.drop_constraint("uq_advertisements_event_hash", type_="unique")
batch_op.create_index(
batch_op.f("ix_advertisements_event_hash_unique"), ["event_hash"], unique=1
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,57 @@
"""Remove member_nodes table
Revision ID: aa1162502616
Revises: 03b9b2451bd9
Create Date: 2025-12-08 15:04:37.260923+00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "aa1162502616"
down_revision: Union[str, None] = "03b9b2451bd9"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Drop the member_nodes table
# Nodes are now associated with members via a 'member_id' tag on the node
op.drop_table("member_nodes")
def downgrade() -> None:
# Recreate the member_nodes table if needed for rollback
op.create_table(
"member_nodes",
sa.Column("id", sa.String(length=36), nullable=False),
sa.Column("member_id", sa.String(length=36), nullable=False),
sa.Column("public_key", sa.String(length=64), nullable=False),
sa.Column("node_role", sa.String(length=50), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(
["member_id"],
["members.id"],
name=op.f("fk_member_nodes_member_id_members"),
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id", name=op.f("pk_member_nodes")),
)
op.create_index(
op.f("ix_member_nodes_member_id"), "member_nodes", ["member_id"], unique=False
)
op.create_index(
op.f("ix_member_nodes_public_key"), "member_nodes", ["public_key"], unique=False
)
op.create_index(
"ix_member_nodes_member_public_key",
"member_nodes",
["member_id", "public_key"],
unique=False,
)

View File

@@ -0,0 +1,37 @@
"""add lat lon columns to nodes
Revision ID: 4e2e787a1660
Revises: aa1162502616
Create Date: 2026-01-09 20:04:04.273741+00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "4e2e787a1660"
down_revision: Union[str, None] = "aa1162502616"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("nodes", schema=None) as batch_op:
batch_op.add_column(sa.Column("lat", sa.Float(), nullable=True))
batch_op.add_column(sa.Column("lon", sa.Float(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("nodes", schema=None) as batch_op:
batch_op.drop_column("lon")
batch_op.drop_column("lat")
# ### end Alembic commands ###

View File

@@ -14,7 +14,7 @@ services:
- "${MQTT_EXTERNAL_PORT:-1883}:1883"
- "${MQTT_WS_PORT:-9001}:9001"
volumes:
- ./etc/mosquitto.conf:/mosquitto/config/mosquitto.conf:ro
# - ./etc/mosquitto.conf:/mosquitto/config/mosquitto.conf:ro
- mosquitto_data:/mosquitto/data
- mosquitto_log:/mosquitto/log
healthcheck:
@@ -48,6 +48,8 @@ services:
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
- MQTT_TLS=${MQTT_TLS:-false}
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
- SERIAL_PORT=${SERIAL_PORT:-/dev/ttyUSB0}
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
- NODE_ADDRESS=${NODE_ADDRESS:-}
@@ -83,6 +85,8 @@ services:
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
- MQTT_TLS=${MQTT_TLS:-false}
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
- SERIAL_PORT=${SERIAL_PORT_SENDER:-/dev/ttyUSB1}
- SERIAL_BAUD=${SERIAL_BAUD:-115200}
- NODE_ADDRESS=${NODE_ADDRESS_SENDER:-}
@@ -115,6 +119,8 @@ services:
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
- MQTT_TLS=${MQTT_TLS:-false}
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
- MOCK_DEVICE=true
- NODE_ADDRESS=${NODE_ADDRESS:-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef}
command: ["interface", "receiver", "--mock"]
@@ -138,8 +144,11 @@ services:
- all
- core
restart: unless-stopped
depends_on:
db-migrate:
condition: service_completed_successfully
volumes:
- ${DATA_HOME:-./data}:/data
- hub_data:/data
- ${SEED_HOME:-./seed}:/seed
environment:
- LOG_LEVEL=${LOG_LEVEL:-INFO}
@@ -149,10 +158,15 @@ services:
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
- MQTT_TLS=${MQTT_TLS:-false}
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
- COLLECTOR_INGEST_MODE=${COLLECTOR_INGEST_MODE:-native}
- COLLECTOR_LETSMESH_DECODER_ENABLED=${COLLECTOR_LETSMESH_DECODER_ENABLED:-true}
- COLLECTOR_LETSMESH_DECODER_COMMAND=${COLLECTOR_LETSMESH_DECODER_COMMAND:-meshcore-decoder}
- COLLECTOR_LETSMESH_DECODER_KEYS=${COLLECTOR_LETSMESH_DECODER_KEYS:-}
- COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS=${COLLECTOR_LETSMESH_DECODER_TIMEOUT_SECONDS:-2.0}
- DATA_HOME=/data
- SEED_HOME=/seed
# Explicitly unset to use DATA_HOME-based default path
- DATABASE_URL=
# Webhook configuration
- WEBHOOK_ADVERTISEMENT_URL=${WEBHOOK_ADVERTISEMENT_URL:-}
- WEBHOOK_ADVERTISEMENT_SECRET=${WEBHOOK_ADVERTISEMENT_SECRET:-}
@@ -193,13 +207,14 @@ services:
- core
restart: unless-stopped
depends_on:
db-migrate:
condition: service_completed_successfully
collector:
condition: service_started
ports:
- "${API_PORT:-8000}:8000"
volumes:
# Mount data directory (uses collector/meshcore.db)
- ${DATA_HOME:-./data}:/data
- hub_data:/data
environment:
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- MQTT_HOST=${MQTT_HOST:-mqtt}
@@ -208,13 +223,15 @@ services:
- MQTT_PASSWORD=${MQTT_PASSWORD:-}
- MQTT_PREFIX=${MQTT_PREFIX:-meshcore}
- MQTT_TLS=${MQTT_TLS:-false}
- MQTT_TRANSPORT=${MQTT_TRANSPORT:-tcp}
- MQTT_WS_PATH=${MQTT_WS_PATH:-/mqtt}
- DATA_HOME=/data
# Explicitly unset to use DATA_HOME-based default path
- DATABASE_URL=
- API_HOST=0.0.0.0
- API_PORT=8000
- API_READ_KEY=${API_READ_KEY:-}
- API_ADMIN_KEY=${API_ADMIN_KEY:-}
- METRICS_ENABLED=${METRICS_ENABLED:-true}
- METRICS_CACHE_TTL=${METRICS_CACHE_TTL:-60}
command: ["api"]
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
@@ -241,12 +258,20 @@ services:
condition: service_healthy
ports:
- "${WEB_PORT:-8080}:8080"
volumes:
- ${CONTENT_HOME:-./content}:/content:ro
environment:
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- API_BASE_URL=http://api:8000
- API_KEY=${API_READ_KEY:-}
# Use ADMIN key to allow write operations from admin interface
# Falls back to READ key if ADMIN key is not set
- API_KEY=${API_ADMIN_KEY:-${API_READ_KEY:-}}
- WEB_HOST=0.0.0.0
- WEB_PORT=8080
- WEB_THEME=${WEB_THEME:-dark}
- WEB_LOCALE=${WEB_LOCALE:-en}
- WEB_DATETIME_LOCALE=${WEB_DATETIME_LOCALE:-en-US}
- WEB_ADMIN_ENABLED=${WEB_ADMIN_ENABLED:-false}
- NETWORK_NAME=${NETWORK_NAME:-MeshCore Network}
- NETWORK_CITY=${NETWORK_CITY:-}
- NETWORK_COUNTRY=${NETWORK_COUNTRY:-}
@@ -254,7 +279,19 @@ services:
- NETWORK_CONTACT_EMAIL=${NETWORK_CONTACT_EMAIL:-}
- NETWORK_CONTACT_DISCORD=${NETWORK_CONTACT_DISCORD:-}
- NETWORK_CONTACT_GITHUB=${NETWORK_CONTACT_GITHUB:-}
- NETWORK_CONTACT_YOUTUBE=${NETWORK_CONTACT_YOUTUBE:-}
- NETWORK_WELCOME_TEXT=${NETWORK_WELCOME_TEXT:-}
- CONTENT_HOME=/content
- TZ=${TZ:-UTC}
- COLLECTOR_LETSMESH_DECODER_KEYS=${COLLECTOR_LETSMESH_DECODER_KEYS:-}
# Feature flags (set to false to disable specific pages)
- FEATURE_DASHBOARD=${FEATURE_DASHBOARD:-true}
- FEATURE_NODES=${FEATURE_NODES:-true}
- FEATURE_ADVERTISEMENTS=${FEATURE_ADVERTISEMENTS:-true}
- FEATURE_MESSAGES=${FEATURE_MESSAGES:-true}
- FEATURE_MAP=${FEATURE_MAP:-true}
- FEATURE_MEMBERS=${FEATURE_MEMBERS:-true}
- FEATURE_PAGES=${FEATURE_PAGES:-true}
command: ["web"]
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8080/health')"]
@@ -274,18 +311,20 @@ services:
container_name: meshcore-db-migrate
profiles:
- all
- core
- migrate
restart: "no"
volumes:
# Mount data directory (uses collector/meshcore.db)
- ${DATA_HOME:-./data}:/data
- hub_data:/data
environment:
- DATA_HOME=/data
# Explicitly unset to use DATA_HOME-based default path
- DATABASE_URL=
command: ["db", "upgrade"]
# ==========================================================================
# Seed Data - Import node_tags.json and members.json from SEED_HOME
# Seed Data - Import node_tags.yaml and members.yaml from SEED_HOME
# NOTE: This is NOT run automatically. Use --profile seed to run explicitly.
# Since tags are now managed via the admin UI, automatic seeding would
# overwrite user changes.
# ==========================================================================
seed:
image: ghcr.io/ipnet-mesh/meshcore-hub:${IMAGE_VERSION:-latest}
@@ -294,27 +333,71 @@ services:
dockerfile: Dockerfile
container_name: meshcore-seed
profiles:
- all
- seed
restart: "no"
volumes:
# Mount data directory for database (read-write)
- ${DATA_HOME:-./data}:/data
# Mount seed directory for seed files (read-only)
- hub_data:/data
- ${SEED_HOME:-./seed}:/seed:ro
environment:
- DATA_HOME=/data
- SEED_HOME=/seed
- LOG_LEVEL=${LOG_LEVEL:-INFO}
# Explicitly unset to use DATA_HOME-based default path
- DATABASE_URL=
# Imports both node_tags.json and members.json if they exist
# Imports both node_tags.yaml and members.yaml if they exist
command: ["collector", "seed"]
# ==========================================================================
# Prometheus - Metrics collection and monitoring (optional, use --profile metrics)
# ==========================================================================
prometheus:
image: prom/prometheus:latest
container_name: meshcore-prometheus
profiles:
- all
- metrics
restart: unless-stopped
depends_on:
api:
condition: service_healthy
ports:
- "${PROMETHEUS_PORT:-9090}:9090"
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.retention.time=30d'
volumes:
- ./etc/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
- ./etc/prometheus/alerts.yml:/etc/prometheus/alerts.yml:ro
- prometheus_data:/prometheus
# ==========================================================================
# Alertmanager - Alert routing and notifications (optional, use --profile metrics)
# ==========================================================================
alertmanager:
image: prom/alertmanager:latest
container_name: meshcore-alertmanager
profiles:
- all
- metrics
restart: unless-stopped
ports:
- "${ALERTMANAGER_PORT:-9093}:9093"
volumes:
- ./etc/alertmanager/alertmanager.yml:/etc/alertmanager/alertmanager.yml:ro
- alertmanager_data:/alertmanager
command:
- '--config.file=/etc/alertmanager/alertmanager.yml'
- '--storage.path=/alertmanager'
# ==========================================================================
# Volumes
# ==========================================================================
volumes:
hub_data:
name: meshcore_hub_data
mosquitto_data:
name: meshcore_mosquitto_data
mosquitto_log:
name: meshcore_mosquitto_log
prometheus_data:
name: meshcore_prometheus_data
alertmanager_data:
name: meshcore_alertmanager_data

Binary file not shown.

Before

Width:  |  Height:  |  Size: 133 KiB

After

Width:  |  Height:  |  Size: 238 KiB

View File

@@ -0,0 +1,35 @@
# Alertmanager configuration for MeshCore Hub
#
# Default configuration routes all alerts to a "blackhole" receiver
# (logs only, no external notifications).
#
# To receive notifications, configure a receiver below.
# See: https://prometheus.io/docs/alerting/latest/configuration/
#
# Examples:
#
# Email:
# receivers:
# - name: 'email'
# email_configs:
# - to: 'admin@example.com'
# from: 'alertmanager@example.com'
# smarthost: 'smtp.example.com:587'
# auth_username: 'alertmanager@example.com'
# auth_password: 'password'
#
# Webhook (e.g. Slack incoming webhook, ntfy, Gotify):
# receivers:
# - name: 'webhook'
# webhook_configs:
# - url: 'https://example.com/webhook'
route:
receiver: 'default'
group_by: ['alertname']
group_wait: 30s
group_interval: 5m
repeat_interval: 4h
receivers:
- name: 'default'

16
etc/prometheus/alerts.yml Normal file
View File

@@ -0,0 +1,16 @@
# Prometheus alert rules for MeshCore Hub
#
# These rules are evaluated by Prometheus and fired alerts are sent
# to Alertmanager for routing and notification.
groups:
- name: meshcore
rules:
- alert: NodeNotSeen
expr: time() - meshcore_node_last_seen_timestamp_seconds{role="infra"} > 48 * 3600
for: 5m
labels:
severity: warning
annotations:
summary: "Node {{ $labels.node_name }} ({{ $labels.role }}) not seen for 48+ hours"
description: "Node {{ $labels.public_key }} ({{ $labels.adv_type }}, role={{ $labels.role }}) last seen {{ $value | humanizeDuration }} ago."

View File

@@ -0,0 +1,29 @@
# Prometheus scrape configuration for MeshCore Hub
#
# This file is used when running Prometheus via Docker Compose:
# docker compose --profile core --profile metrics up -d
#
# The scrape interval matches the default metrics cache TTL (60s)
# to avoid unnecessary database queries.
global:
scrape_interval: 60s
evaluation_interval: 60s
alerting:
alertmanagers:
- static_configs:
- targets: ['alertmanager:9093']
rule_files:
- 'alerts.yml'
scrape_configs:
- job_name: 'meshcore-hub'
metrics_path: '/metrics'
# Uncomment basic_auth if API_READ_KEY is configured
# basic_auth:
# username: 'metrics'
# password: '<API_READ_KEY>'
static_configs:
- targets: ['api:8000']

View File

@@ -0,0 +1,61 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
viewBox="0 0 115 100"
width="115"
height="100"
version="1.1"
id="svg4"
sodipodi:docname="logo-dark.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs4" />
<sodipodi:namedview
id="namedview4"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1" />
<!-- I letter - muted -->
<rect
x="0"
y="0"
width="25"
height="100"
rx="2"
fill="#ffffff"
opacity="0.5"
id="rect1" />
<!-- P vertical stem -->
<rect
x="35"
y="0"
width="25"
height="100"
rx="2"
fill="#ffffff"
id="rect2" />
<!-- WiFi arcs: center at mid-stem (90, 60), sweeping from right up to top -->
<g
fill="none"
stroke="#ffffff"
stroke-width="10"
stroke-linecap="round"
id="g4"
transform="translate(-30,-10)">
<path
d="M 110,65 A 20,20 0 0 0 90,45"
id="path2" />
<path
d="M 125,65 A 35,35 0 0 0 90,30"
id="path3" />
<path
d="M 140,65 A 50,50 0 0 0 90,15"
id="path4" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -0,0 +1,87 @@
---
title: Join
slug: join
menu_order: 10
---
# Getting Started with MeshCore
MeshCore is an open-source off-grid LoRa mesh networking platform. This guide will help you get connected to the network.
For detailed documentation, see the [MeshCore FAQ](https://github.com/meshcore-dev/MeshCore/blob/main/docs/faq.md).
## Node Types
MeshCore devices operate in different modes:
| Mode | Description |
|------|-------------|
| **Companion** | Connects to your phone via Bluetooth. Use this for messaging and interacting with the network. |
| **Repeater** | Standalone node that extends network coverage. Place these in elevated locations for best results. |
| **Room Server** | Hosts chat rooms that persist messages for offline users. |
Most users start with a **Companion** node paired to their phone.
## Frequency Regulations
MeshCore uses LoRa radio, which operates on unlicensed ISM bands. You **must** use the correct frequency for your region:
| Region | Frequency | Notes |
|--------|-----------|-------|
| Europe (EU) | 868 MHz | EU868 band |
| United Kingdom | 868 MHz | Same as EU |
| North America | 915 MHz | US915 band |
| Australia | 915 MHz | AU915 band |
Using the wrong frequency is illegal and may cause interference. Check your local regulations.
## Compatible Hardware
MeshCore runs on inexpensive low-power LoRa devices. Popular options include:
### Recommended Devices
| Device | Manufacturer | Features |
|--------|--------------|----------|
| [Heltec V3](https://heltec.org/project/wifi-lora-32-v3/) | Heltec | Budget-friendly, OLED display |
| [T114](https://heltec.org/project/mesh-node-t114/) | Heltec | Compact, GPS, colour display |
| [T1000-E](https://www.seeedstudio.com/SenseCAP-Card-Tracker-T1000-E-for-Meshtastic-p-5913.html) | Seeed Studio | Credit-card sized, GPS, weatherproof |
| [T-Deck Plus](https://www.lilygo.cc/products/t-deck-plus) | LilyGO | Built-in keyboard, touchscreen, GPS |
Ensure you purchase the correct frequency variant (868MHz for EU/UK, 915MHz for US/AU).
### Where to Buy
- **Heltec**: [Official Store](https://heltec.org/) or AliExpress
- **LilyGO**: [Official Store](https://lilygo.cc/) or AliExpress
- **Seeed Studio**: [Official Store](https://www.seeedstudio.com/)
- **Amazon**: Search for device name + "LoRa 868" (or 915 for US)
## Mobile Apps
Connect to your Companion node using the official MeshCore apps:
| Platform | App | Link |
|----------|-----|------|
| Android | MeshCore | [Google Play](https://play.google.com/store/apps/details?id=com.liamcottle.meshcore.android) |
| iOS | MeshCore | [App Store](https://apps.apple.com/us/app/meshcore/id6742354151) |
The app connects via Bluetooth to your Companion node, allowing you to send messages, view the network, and configure your device.
## Flashing Firmware
1. Use the [MeshCore Web Flasher](https://flasher.meshcore.co.uk/) for easy browser-based flashing
2. Select your device type and region (frequency)
3. Connect via USB and flash
## Next Steps
Once your device is flashed and paired:
1. Open the MeshCore app on your phone
2. Enable Bluetooth and pair with your device
3. Set your node name in the app settings
4. Configure your radio settings/profile for your region
4. You should start seeing other nodes on the network
Welcome to the mesh!

View File

@@ -1,16 +1,14 @@
# Example members seed file
# Each member can have multiple nodes with different roles (chat, repeater, etc.)
# Note: Nodes are associated with members via a 'member_id' tag on the node.
# Use node_tags.yaml to set member_id tags on nodes.
members:
- name: Example Member
- member_id: example_member
name: Example Member
callsign: N0CALL
role: Network Operator
description: Example member entry with multiple nodes
nodes:
- public_key: 0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
node_role: chat
- public_key: fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210
node_role: repeater
- name: Simple Member
description: Example network operator member
- member_id: simple_member
name: Simple Member
callsign: N0CALL2
role: Observer
description: Member without any nodes
description: Example observer member

View File

@@ -7,12 +7,12 @@
# elevation: 150 # number
# is_online: true # boolean
#
# - Explicit type (for special types like coordinate):
# location:
# value: "37.7749,-122.4194"
# type: coordinate
# - Explicit type (when you need to force a specific type):
# altitude:
# value: "150"
# type: number
#
# Supported types: string, number, boolean, coordinate
# Supported types: string, number, boolean
0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef:
friendly_name: Gateway Node

View File

@@ -0,0 +1,58 @@
diff --git a/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js b/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
index d33ffd6..8d040d0 100644
--- a/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
+++ b/node_modules/@michaelhart/meshcore-decoder/dist/crypto/ed25519-verifier.js
@@ -36,7 +36,27 @@ var __importStar = (this && this.__importStar) || (function () {
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Ed25519SignatureVerifier = void 0;
-const ed25519 = __importStar(require("@noble/ed25519"));
+let _ed25519 = null;
+async function getEd25519() {
+ if (_ed25519) {
+ return _ed25519;
+ }
+ const mod = await import("@noble/ed25519");
+ _ed25519 = mod.default ? mod.default : mod;
+ try {
+ _ed25519.etc.sha512Async = sha512Hash;
+ }
+ catch (error) {
+ console.debug("Could not set async SHA-512:", error);
+ }
+ try {
+ _ed25519.etc.sha512Sync = sha512HashSync;
+ }
+ catch (error) {
+ console.debug("Could not set up synchronous SHA-512:", error);
+ }
+ return _ed25519;
+}
const hex_1 = require("../utils/hex");
const orlp_ed25519_wasm_1 = require("./orlp-ed25519-wasm");
// Cross-platform SHA-512 implementation
@@ -90,16 +110,6 @@ function sha512HashSync(data) {
throw new Error('No SHA-512 implementation available for synchronous operation');
}
}
-// Set up SHA-512 for @noble/ed25519
-ed25519.etc.sha512Async = sha512Hash;
-// Always set up sync version - @noble/ed25519 requires it
-// It will throw in browser environments, which @noble/ed25519 can handle
-try {
- ed25519.etc.sha512Sync = sha512HashSync;
-}
-catch (error) {
- console.debug('Could not set up synchronous SHA-512:', error);
-}
class Ed25519SignatureVerifier {
/**
* Verify an Ed25519 signature for MeshCore advertisement packets
@@ -116,6 +126,7 @@ class Ed25519SignatureVerifier {
// Construct the signed message according to MeshCore format
const message = this.constructAdvertSignedMessage(publicKeyHex, timestamp, appData);
// Verify the signature using noble-ed25519
+ const ed25519 = await getEd25519();
return await ed25519.verify(signature, message, publicKey);
}
catch (error) {

View File

@@ -8,7 +8,7 @@ version = "0.0.0"
description = "Python monorepo for managing and orchestrating MeshCore mesh networks"
readme = "README.md"
license = {text = "GPL-3.0-or-later"}
requires-python = ">=3.11"
requires-python = ">=3.13"
authors = [
{name = "MeshCore Hub Contributors"}
]
@@ -18,8 +18,7 @@ classifiers = [
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Topic :: Communications",
"Topic :: System :: Networking",
]
@@ -38,8 +37,11 @@ dependencies = [
"python-multipart>=0.0.6",
"httpx>=0.25.0",
"aiosqlite>=0.19.0",
"meshcore>=2.2.0",
"meshcore>=2.3.0",
"pyyaml>=6.0.0",
"python-frontmatter>=1.0.0",
"markdown>=3.5.0",
"prometheus-client>=0.20.0",
]
[project.optional-dependencies]
@@ -51,6 +53,7 @@ dev = [
"flake8>=6.1.0",
"mypy>=1.5.0",
"pre-commit>=3.4.0",
"beautifulsoup4>=4.12.0",
"types-paho-mqtt>=1.6.0",
"types-PyYAML>=6.0.0",
]
@@ -78,7 +81,7 @@ meshcore_hub = ["py.typed"]
[tool.black]
line-length = 88
target-version = ["py311"]
target-version = ["py312"]
include = '\.pyi?$'
extend-exclude = '''
/(
@@ -97,7 +100,7 @@ extend-exclude = '''
'''
[tool.mypy]
python_version = "3.11"
python_version = "3.13"
warn_return_any = true
warn_unused_ignores = true
disallow_untyped_defs = true
@@ -112,6 +115,9 @@ module = [
"uvicorn.*",
"alembic.*",
"meshcore.*",
"frontmatter.*",
"markdown.*",
"prometheus_client.*",
]
ignore_missing_imports = true

6
renovate.json Normal file
View File

@@ -0,0 +1,6 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended"
]
}

View File

@@ -51,9 +51,15 @@ def create_app(
admin_key: str | None = None,
mqtt_host: str = "localhost",
mqtt_port: int = 1883,
mqtt_username: str | None = None,
mqtt_password: str | None = None,
mqtt_prefix: str = "meshcore",
mqtt_tls: bool = False,
mqtt_transport: str = "tcp",
mqtt_ws_path: str = "/mqtt",
cors_origins: list[str] | None = None,
metrics_enabled: bool = True,
metrics_cache_ttl: int = 60,
) -> FastAPI:
"""Create and configure the FastAPI application.
@@ -63,9 +69,15 @@ def create_app(
admin_key: Admin API key
mqtt_host: MQTT broker host
mqtt_port: MQTT broker port
mqtt_username: MQTT username
mqtt_password: MQTT password
mqtt_prefix: MQTT topic prefix
mqtt_tls: Enable TLS/SSL for MQTT connection
mqtt_transport: MQTT transport protocol (tcp or websockets)
mqtt_ws_path: WebSocket path (used when transport=websockets)
cors_origins: Allowed CORS origins
metrics_enabled: Enable Prometheus metrics endpoint at /metrics
metrics_cache_ttl: Seconds to cache metrics output
Returns:
Configured FastAPI application
@@ -86,8 +98,13 @@ def create_app(
app.state.admin_key = admin_key
app.state.mqtt_host = mqtt_host
app.state.mqtt_port = mqtt_port
app.state.mqtt_username = mqtt_username
app.state.mqtt_password = mqtt_password
app.state.mqtt_prefix = mqtt_prefix
app.state.mqtt_tls = mqtt_tls
app.state.mqtt_transport = mqtt_transport
app.state.mqtt_ws_path = mqtt_ws_path
app.state.metrics_cache_ttl = metrics_cache_ttl
# Configure CORS
if cors_origins is None:
@@ -106,6 +123,12 @@ def create_app(
app.include_router(api_router, prefix="/api/v1")
# Include Prometheus metrics endpoint
if metrics_enabled:
from meshcore_hub.api.metrics import router as metrics_router
app.include_router(metrics_router)
# Health check endpoints
@app.get("/health", tags=["Health"])
async def health() -> dict:

View File

@@ -1,5 +1,6 @@
"""Authentication middleware for the API."""
import hmac
import logging
from typing import Annotated
@@ -79,7 +80,9 @@ async def require_read(
)
# Check if token matches any key
if token == read_key or token == admin_key:
if (read_key and hmac.compare_digest(token, read_key)) or (
admin_key and hmac.compare_digest(token, admin_key)
):
return token
raise HTTPException(
@@ -124,7 +127,7 @@ async def require_admin(
)
# Check if token matches admin key
if token == admin_key:
if hmac.compare_digest(token, admin_key):
return token
raise HTTPException(

View File

@@ -60,11 +60,25 @@ import click
envvar="MQTT_PORT",
help="MQTT broker port",
)
@click.option(
"--mqtt-username",
type=str,
default=None,
envvar="MQTT_USERNAME",
help="MQTT username",
)
@click.option(
"--mqtt-password",
type=str,
default=None,
envvar="MQTT_PASSWORD",
help="MQTT password",
)
@click.option(
"--mqtt-prefix",
type=str,
default="meshcore",
envvar="MQTT_TOPIC_PREFIX",
envvar=["MQTT_PREFIX", "MQTT_TOPIC_PREFIX"],
help="MQTT topic prefix",
)
@click.option(
@@ -74,6 +88,20 @@ import click
envvar="MQTT_TLS",
help="Enable TLS/SSL for MQTT connection",
)
@click.option(
"--mqtt-transport",
type=click.Choice(["tcp", "websockets"], case_sensitive=False),
default="tcp",
envvar="MQTT_TRANSPORT",
help="MQTT transport protocol",
)
@click.option(
"--mqtt-ws-path",
type=str,
default="/mqtt",
envvar="MQTT_WS_PATH",
help="MQTT WebSocket path (used when transport=websockets)",
)
@click.option(
"--cors-origins",
type=str,
@@ -81,6 +109,19 @@ import click
envvar="CORS_ORIGINS",
help="Comma-separated list of allowed CORS origins",
)
@click.option(
"--metrics-enabled/--no-metrics",
default=True,
envvar="METRICS_ENABLED",
help="Enable Prometheus metrics endpoint at /metrics",
)
@click.option(
"--metrics-cache-ttl",
type=int,
default=60,
envvar="METRICS_CACHE_TTL",
help="Seconds to cache metrics output (reduces database load)",
)
@click.option(
"--reload",
is_flag=True,
@@ -98,9 +139,15 @@ def api(
admin_key: str | None,
mqtt_host: str,
mqtt_port: int,
mqtt_username: str | None,
mqtt_password: str | None,
mqtt_prefix: str,
mqtt_tls: bool,
mqtt_transport: str,
mqtt_ws_path: str,
cors_origins: str | None,
metrics_enabled: bool,
metrics_cache_ttl: int,
reload: bool,
) -> None:
"""Run the REST API server.
@@ -146,9 +193,12 @@ def api(
click.echo(f"Data home: {effective_data_home}")
click.echo(f"Database: {effective_db_url}")
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {mqtt_prefix})")
click.echo(f"MQTT transport: {mqtt_transport} (ws_path: {mqtt_ws_path})")
click.echo(f"Read key configured: {read_key is not None}")
click.echo(f"Admin key configured: {admin_key is not None}")
click.echo(f"CORS origins: {cors_origins or 'none'}")
click.echo(f"Metrics enabled: {metrics_enabled}")
click.echo(f"Metrics cache TTL: {metrics_cache_ttl}s")
click.echo(f"Reload mode: {reload}")
click.echo("=" * 50)
@@ -178,9 +228,15 @@ def api(
admin_key=admin_key,
mqtt_host=mqtt_host,
mqtt_port=mqtt_port,
mqtt_username=mqtt_username,
mqtt_password=mqtt_password,
mqtt_prefix=mqtt_prefix,
mqtt_tls=mqtt_tls,
mqtt_transport=mqtt_transport,
mqtt_ws_path=mqtt_ws_path,
cors_origins=origins_list,
metrics_enabled=metrics_enabled,
metrics_cache_ttl=metrics_cache_ttl,
)
click.echo("\nStarting API server...")

View File

@@ -56,17 +56,25 @@ def get_mqtt_client(request: Request) -> MQTTClient:
"""
mqtt_host = getattr(request.app.state, "mqtt_host", "localhost")
mqtt_port = getattr(request.app.state, "mqtt_port", 1883)
mqtt_username = getattr(request.app.state, "mqtt_username", None)
mqtt_password = getattr(request.app.state, "mqtt_password", None)
mqtt_prefix = getattr(request.app.state, "mqtt_prefix", "meshcore")
mqtt_tls = getattr(request.app.state, "mqtt_tls", False)
mqtt_transport = getattr(request.app.state, "mqtt_transport", "tcp")
mqtt_ws_path = getattr(request.app.state, "mqtt_ws_path", "/mqtt")
# Use unique client ID to allow multiple API instances
unique_id = uuid.uuid4().hex[:8]
config = MQTTConfig(
host=mqtt_host,
port=mqtt_port,
username=mqtt_username,
password=mqtt_password,
prefix=mqtt_prefix,
client_id=f"meshcore-api-{unique_id}",
tls=mqtt_tls,
transport=mqtt_transport,
ws_path=mqtt_ws_path,
)
client = MQTTClient(config)

View File

@@ -0,0 +1,334 @@
"""Prometheus metrics endpoint for MeshCore Hub API."""
import base64
import hmac
import logging
import time
from typing import Any
from fastapi import APIRouter, Request, Response
from fastapi.responses import PlainTextResponse
from prometheus_client import CollectorRegistry, Gauge, generate_latest
from sqlalchemy import func, select
from meshcore_hub.common.models import (
Advertisement,
EventLog,
Member,
Message,
Node,
NodeTag,
Telemetry,
TracePath,
)
logger = logging.getLogger(__name__)
router = APIRouter()
# Module-level cache
_cache: dict[str, Any] = {"output": b"", "expires_at": 0.0}
def verify_basic_auth(request: Request) -> bool:
"""Verify HTTP Basic Auth credentials for metrics endpoint.
Uses username 'metrics' and the API read key as password.
Returns True if no read key is configured (public access).
Args:
request: FastAPI request
Returns:
True if authentication passes
"""
read_key = getattr(request.app.state, "read_key", None)
# No read key configured = public access
if not read_key:
return True
auth_header = request.headers.get("Authorization", "")
if not auth_header.startswith("Basic "):
return False
try:
decoded = base64.b64decode(auth_header[6:]).decode("utf-8")
username, password = decoded.split(":", 1)
return hmac.compare_digest(username, "metrics") and hmac.compare_digest(
password, read_key
)
except Exception:
return False
def collect_metrics(session: Any) -> bytes:
"""Collect all metrics from the database and generate Prometheus output.
Creates a fresh CollectorRegistry per call to avoid global state issues.
Args:
session: SQLAlchemy database session
Returns:
Prometheus text exposition format as bytes
"""
from meshcore_hub import __version__
registry = CollectorRegistry()
# -- Info gauge --
info_gauge = Gauge(
"meshcore_info",
"MeshCore Hub application info",
["version"],
registry=registry,
)
info_gauge.labels(version=__version__).set(1)
# -- Nodes total --
nodes_total = Gauge(
"meshcore_nodes_total",
"Total number of nodes",
registry=registry,
)
count = session.execute(select(func.count(Node.id))).scalar() or 0
nodes_total.set(count)
# -- Nodes active by time window --
nodes_active = Gauge(
"meshcore_nodes_active",
"Number of active nodes in time window",
["window"],
registry=registry,
)
for window, hours in [("1h", 1), ("24h", 24), ("7d", 168), ("30d", 720)]:
cutoff = time.time() - (hours * 3600)
from datetime import datetime, timezone
cutoff_dt = datetime.fromtimestamp(cutoff, tz=timezone.utc)
count = (
session.execute(
select(func.count(Node.id)).where(Node.last_seen >= cutoff_dt)
).scalar()
or 0
)
nodes_active.labels(window=window).set(count)
# -- Nodes by type --
nodes_by_type = Gauge(
"meshcore_nodes_by_type",
"Number of nodes by advertisement type",
["adv_type"],
registry=registry,
)
type_counts = session.execute(
select(Node.adv_type, func.count(Node.id)).group_by(Node.adv_type)
).all()
for adv_type, count in type_counts:
nodes_by_type.labels(adv_type=adv_type or "unknown").set(count)
# -- Nodes with location --
nodes_with_location = Gauge(
"meshcore_nodes_with_location",
"Number of nodes with GPS coordinates",
registry=registry,
)
count = (
session.execute(
select(func.count(Node.id)).where(
Node.lat.isnot(None), Node.lon.isnot(None)
)
).scalar()
or 0
)
nodes_with_location.set(count)
# -- Node last seen timestamp --
node_last_seen = Gauge(
"meshcore_node_last_seen_timestamp_seconds",
"Unix timestamp of when the node was last seen",
["public_key", "node_name", "adv_type", "role"],
registry=registry,
)
role_subq = (
select(NodeTag.node_id, NodeTag.value.label("role"))
.where(NodeTag.key == "role")
.subquery()
)
nodes_with_last_seen = session.execute(
select(
Node.public_key,
Node.name,
Node.adv_type,
Node.last_seen,
role_subq.c.role,
)
.outerjoin(role_subq, Node.id == role_subq.c.node_id)
.where(Node.last_seen.isnot(None))
).all()
for public_key, name, adv_type, last_seen, role in nodes_with_last_seen:
node_last_seen.labels(
public_key=public_key,
node_name=name or "",
adv_type=adv_type or "unknown",
role=role or "",
).set(last_seen.timestamp())
# -- Messages total by type --
messages_total = Gauge(
"meshcore_messages_total",
"Total number of messages by type",
["type"],
registry=registry,
)
msg_type_counts = session.execute(
select(Message.message_type, func.count(Message.id)).group_by(
Message.message_type
)
).all()
for msg_type, count in msg_type_counts:
messages_total.labels(type=msg_type).set(count)
# -- Messages received by type and window --
messages_received = Gauge(
"meshcore_messages_received",
"Messages received in time window by type",
["type", "window"],
registry=registry,
)
for window, hours in [("1h", 1), ("24h", 24), ("7d", 168), ("30d", 720)]:
cutoff = time.time() - (hours * 3600)
cutoff_dt = datetime.fromtimestamp(cutoff, tz=timezone.utc)
window_counts = session.execute(
select(Message.message_type, func.count(Message.id))
.where(Message.received_at >= cutoff_dt)
.group_by(Message.message_type)
).all()
for msg_type, count in window_counts:
messages_received.labels(type=msg_type, window=window).set(count)
# -- Advertisements total --
advertisements_total = Gauge(
"meshcore_advertisements_total",
"Total number of advertisements",
registry=registry,
)
count = session.execute(select(func.count(Advertisement.id))).scalar() or 0
advertisements_total.set(count)
# -- Advertisements received by window --
advertisements_received = Gauge(
"meshcore_advertisements_received",
"Advertisements received in time window",
["window"],
registry=registry,
)
for window, hours in [("1h", 1), ("24h", 24), ("7d", 168), ("30d", 720)]:
cutoff = time.time() - (hours * 3600)
cutoff_dt = datetime.fromtimestamp(cutoff, tz=timezone.utc)
count = (
session.execute(
select(func.count(Advertisement.id)).where(
Advertisement.received_at >= cutoff_dt
)
).scalar()
or 0
)
advertisements_received.labels(window=window).set(count)
# -- Telemetry total --
telemetry_total = Gauge(
"meshcore_telemetry_total",
"Total number of telemetry records",
registry=registry,
)
count = session.execute(select(func.count(Telemetry.id))).scalar() or 0
telemetry_total.set(count)
# -- Trace paths total --
trace_paths_total = Gauge(
"meshcore_trace_paths_total",
"Total number of trace path records",
registry=registry,
)
count = session.execute(select(func.count(TracePath.id))).scalar() or 0
trace_paths_total.set(count)
# -- Events by type --
events_total = Gauge(
"meshcore_events_total",
"Total events by type from event log",
["event_type"],
registry=registry,
)
event_counts = session.execute(
select(EventLog.event_type, func.count(EventLog.id)).group_by(
EventLog.event_type
)
).all()
for event_type, count in event_counts:
events_total.labels(event_type=event_type).set(count)
# -- Members total --
members_total = Gauge(
"meshcore_members_total",
"Total number of network members",
registry=registry,
)
count = session.execute(select(func.count(Member.id))).scalar() or 0
members_total.set(count)
output: bytes = generate_latest(registry)
return output
@router.get("/metrics")
async def metrics(request: Request) -> Response:
"""Prometheus metrics endpoint.
Returns metrics in Prometheus text exposition format.
Supports HTTP Basic Auth with username 'metrics' and API read key as password.
Results are cached with a configurable TTL to reduce database load.
"""
# Check authentication
if not verify_basic_auth(request):
return PlainTextResponse(
"Unauthorized",
status_code=401,
headers={"WWW-Authenticate": 'Basic realm="metrics"'},
)
# Check cache
cache_ttl = getattr(request.app.state, "metrics_cache_ttl", 60)
now = time.time()
if _cache["output"] and now < _cache["expires_at"]:
return Response(
content=_cache["output"],
media_type="text/plain; version=0.0.4; charset=utf-8",
)
# Collect fresh metrics
try:
from meshcore_hub.api.app import get_db_manager
db_manager = get_db_manager()
with db_manager.session_scope() as session:
output = collect_metrics(session)
# Update cache
_cache["output"] = output
_cache["expires_at"] = now + cache_ttl
return Response(
content=output,
media_type="text/plain; version=0.0.4; charset=utf-8",
)
except Exception as e:
logger.exception("Failed to collect metrics: %s", e)
return PlainTextResponse(
f"# Error collecting metrics: {e}\n",
status_code=500,
media_type="text/plain; version=0.0.4; charset=utf-8",
)

View File

@@ -4,7 +4,7 @@ from datetime import datetime
from typing import Optional
from fastapi import APIRouter, HTTPException, Query
from sqlalchemy import func, select
from sqlalchemy import func, or_, select
from sqlalchemy.orm import aliased, selectinload
from meshcore_hub.api.auth import RequireRead
@@ -19,12 +19,22 @@ from meshcore_hub.common.schemas.messages import (
router = APIRouter()
def _get_friendly_name(node: Optional[Node]) -> Optional[str]:
"""Extract friendly_name tag from a node's tags."""
def _get_tag_name(node: Optional[Node]) -> Optional[str]:
"""Extract name tag from a node's tags."""
if not node or not node.tags:
return None
for tag in node.tags:
if tag.key == "friendly_name":
if tag.key == "name":
return tag.value
return None
def _get_tag_description(node: Optional[Node]) -> Optional[str]:
"""Extract description tag from a node's tags."""
if not node or not node.tags:
return None
for tag in node.tags:
if tag.key == "description":
return tag.value
return None
@@ -57,15 +67,15 @@ def _fetch_receivers_for_events(
receivers_by_hash: dict[str, list[ReceiverInfo]] = {}
node_ids = [r.node_id for r in results]
friendly_names: dict[str, str] = {}
tag_names: dict[str, str] = {}
if node_ids:
fn_query = (
tag_query = (
select(NodeTag.node_id, NodeTag.value)
.where(NodeTag.node_id.in_(node_ids))
.where(NodeTag.key == "friendly_name")
.where(NodeTag.key == "name")
)
for node_id, value in session.execute(fn_query).all():
friendly_names[node_id] = value
for node_id, value in session.execute(tag_query).all():
tag_names[node_id] = value
for row in results:
if row.event_hash not in receivers_by_hash:
@@ -76,7 +86,7 @@ def _fetch_receivers_for_events(
node_id=row.node_id,
public_key=row.public_key,
name=row.name,
friendly_name=friendly_names.get(row.node_id),
tag_name=tag_names.get(row.node_id),
snr=row.snr,
received_at=row.received_at,
)
@@ -89,10 +99,16 @@ def _fetch_receivers_for_events(
async def list_advertisements(
_: RequireRead,
session: DbSession,
search: Optional[str] = Query(
None, description="Search in name tag, node name, or public key"
),
public_key: Optional[str] = Query(None, description="Filter by public key"),
received_by: Optional[str] = Query(
None, description="Filter by receiver node public key"
),
member_id: Optional[str] = Query(
None, description="Filter by member_id tag value of source node"
),
since: Optional[datetime] = Query(None, description="Start timestamp"),
until: Optional[datetime] = Query(None, description="End timestamp"),
limit: int = Query(50, ge=1, le=100, description="Page size"),
@@ -118,12 +134,38 @@ async def list_advertisements(
.outerjoin(SourceNode, Advertisement.node_id == SourceNode.id)
)
if search:
# Search in public key, advertisement name, node name, or name tag
search_pattern = f"%{search}%"
query = query.where(
or_(
Advertisement.public_key.ilike(search_pattern),
Advertisement.name.ilike(search_pattern),
SourceNode.name.ilike(search_pattern),
SourceNode.id.in_(
select(NodeTag.node_id).where(
NodeTag.key == "name", NodeTag.value.ilike(search_pattern)
)
),
)
)
if public_key:
query = query.where(Advertisement.public_key == public_key)
if received_by:
query = query.where(ReceiverNode.public_key == received_by)
if member_id:
# Filter advertisements from nodes that have a member_id tag with the specified value
query = query.where(
SourceNode.id.in_(
select(NodeTag.node_id).where(
NodeTag.key == "member_id", NodeTag.value == member_id
)
)
)
if since:
query = query.where(Advertisement.received_at >= since)
@@ -173,11 +215,12 @@ async def list_advertisements(
data = {
"received_by": row.receiver_pk,
"receiver_name": row.receiver_name,
"receiver_friendly_name": _get_friendly_name(receiver_node),
"receiver_tag_name": _get_tag_name(receiver_node),
"public_key": adv.public_key,
"name": adv.name,
"node_name": row.source_name,
"node_friendly_name": _get_friendly_name(source_node),
"node_tag_name": _get_tag_name(source_node),
"node_tag_description": _get_tag_description(source_node),
"adv_type": adv.adv_type or row.source_adv_type,
"flags": adv.flags,
"received_at": adv.received_at,
@@ -255,11 +298,12 @@ async def get_advertisement(
data = {
"received_by": result.receiver_pk,
"receiver_name": result.receiver_name,
"receiver_friendly_name": _get_friendly_name(receiver_node),
"receiver_tag_name": _get_tag_name(receiver_node),
"public_key": adv.public_key,
"name": adv.name,
"node_name": result.source_name,
"node_friendly_name": _get_friendly_name(source_node),
"node_tag_name": _get_tag_name(source_node),
"node_tag_description": _get_tag_description(source_node),
"adv_type": adv.adv_type or result.source_adv_type,
"flags": adv.flags,
"received_at": adv.received_at,

View File

@@ -2,8 +2,7 @@
from datetime import datetime, timedelta, timezone
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
from fastapi import APIRouter
from sqlalchemy import func, select
from meshcore_hub.api.auth import RequireRead
@@ -31,6 +30,7 @@ async def get_stats(
now = datetime.now(timezone.utc)
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
yesterday = now - timedelta(days=1)
seven_days_ago = now - timedelta(days=7)
# Total nodes
total_nodes = session.execute(select(func.count()).select_from(Node)).scalar() or 0
@@ -73,6 +73,26 @@ async def get_stats(
or 0
)
# Advertisements in last 7 days
advertisements_7d = (
session.execute(
select(func.count())
.select_from(Advertisement)
.where(Advertisement.received_at >= seven_days_ago)
).scalar()
or 0
)
# Messages in last 7 days
messages_7d = (
session.execute(
select(func.count())
.select_from(Message)
.where(Message.received_at >= seven_days_ago)
).scalar()
or 0
)
# Recent advertisements (last 10)
recent_ads = (
session.execute(
@@ -82,11 +102,11 @@ async def get_stats(
.all()
)
# Get node names, adv_types, and friendly_name tags for the advertised nodes
# Get node names, adv_types, and name tags for the advertised nodes
ad_public_keys = [ad.public_key for ad in recent_ads]
node_names: dict[str, str] = {}
node_adv_types: dict[str, str] = {}
friendly_names: dict[str, str] = {}
tag_names: dict[str, str] = {}
if ad_public_keys:
# Get node names and adv_types from Node table
node_query = select(Node.public_key, Node.name, Node.adv_type).where(
@@ -98,21 +118,21 @@ async def get_stats(
if adv_type:
node_adv_types[public_key] = adv_type
# Get friendly_name tags
friendly_name_query = (
# Get name tags
tag_name_query = (
select(Node.public_key, NodeTag.value)
.join(NodeTag, Node.id == NodeTag.node_id)
.where(Node.public_key.in_(ad_public_keys))
.where(NodeTag.key == "friendly_name")
.where(NodeTag.key == "name")
)
for public_key, value in session.execute(friendly_name_query).all():
friendly_names[public_key] = value
for public_key, value in session.execute(tag_name_query).all():
tag_names[public_key] = value
recent_advertisements = [
RecentAdvertisement(
public_key=ad.public_key,
name=ad.name or node_names.get(ad.public_key),
friendly_name=friendly_names.get(ad.public_key),
tag_name=tag_names.get(ad.public_key),
adv_type=ad.adv_type or node_adv_types.get(ad.public_key),
received_at=ad.received_at,
)
@@ -146,7 +166,7 @@ async def get_stats(
# Look up sender names for these messages
msg_prefixes = [m.pubkey_prefix for m in channel_msgs if m.pubkey_prefix]
msg_sender_names: dict[str, str] = {}
msg_friendly_names: dict[str, str] = {}
msg_tag_names: dict[str, str] = {}
if msg_prefixes:
for prefix in set(msg_prefixes):
sender_node_query = select(Node.public_key, Node.name).where(
@@ -156,14 +176,14 @@ async def get_stats(
if name:
msg_sender_names[public_key[:12]] = name
sender_friendly_query = (
sender_tag_query = (
select(Node.public_key, NodeTag.value)
.join(NodeTag, Node.id == NodeTag.node_id)
.where(Node.public_key.startswith(prefix))
.where(NodeTag.key == "friendly_name")
.where(NodeTag.key == "name")
)
for public_key, value in session.execute(sender_friendly_query).all():
msg_friendly_names[public_key[:12]] = value
for public_key, value in session.execute(sender_tag_query).all():
msg_tag_names[public_key[:12]] = value
channel_messages[int(channel_idx)] = [
ChannelMessage(
@@ -171,8 +191,8 @@ async def get_stats(
sender_name=(
msg_sender_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
),
sender_friendly_name=(
msg_friendly_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
sender_tag_name=(
msg_tag_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
),
pubkey_prefix=m.pubkey_prefix,
received_at=m.received_at,
@@ -185,8 +205,10 @@ async def get_stats(
active_nodes=active_nodes,
total_messages=total_messages,
messages_today=messages_today,
messages_7d=messages_7d,
total_advertisements=total_advertisements,
advertisements_24h=advertisements_24h,
advertisements_7d=advertisements_7d,
recent_advertisements=recent_advertisements,
channel_message_counts=channel_message_counts,
channel_messages=channel_messages,
@@ -205,15 +227,15 @@ async def get_activity(
days: Number of days to include (default 30, max 90)
Returns:
Daily advertisement counts for each day in the period
Daily advertisement counts for each day in the period (excluding today)
"""
# Limit to max 90 days
days = min(days, 90)
now = datetime.now(timezone.utc)
start_date = (now - timedelta(days=days - 1)).replace(
hour=0, minute=0, second=0, microsecond=0
)
# End at start of today (exclude today's incomplete data)
end_date = now.replace(hour=0, minute=0, second=0, microsecond=0)
start_date = end_date - timedelta(days=days)
# Query advertisement counts grouped by date
# Use SQLite's date() function for grouping (returns string 'YYYY-MM-DD')
@@ -225,6 +247,7 @@ async def get_activity(
func.count().label("count"),
)
.where(Advertisement.received_at >= start_date)
.where(Advertisement.received_at < end_date)
.group_by(date_expr)
.order_by(date_expr)
)
@@ -257,14 +280,14 @@ async def get_message_activity(
days: Number of days to include (default 30, max 90)
Returns:
Daily message counts for each day in the period
Daily message counts for each day in the period (excluding today)
"""
days = min(days, 90)
now = datetime.now(timezone.utc)
start_date = (now - timedelta(days=days - 1)).replace(
hour=0, minute=0, second=0, microsecond=0
)
# End at start of today (exclude today's incomplete data)
end_date = now.replace(hour=0, minute=0, second=0, microsecond=0)
start_date = end_date - timedelta(days=days)
# Query message counts grouped by date
date_expr = func.date(Message.received_at)
@@ -275,6 +298,7 @@ async def get_message_activity(
func.count().label("count"),
)
.where(Message.received_at >= start_date)
.where(Message.received_at < end_date)
.group_by(date_expr)
.order_by(date_expr)
)
@@ -308,14 +332,14 @@ async def get_node_count_history(
days: Number of days to include (default 30, max 90)
Returns:
Cumulative node count for each day in the period
Cumulative node count for each day in the period (excluding today)
"""
days = min(days, 90)
now = datetime.now(timezone.utc)
start_date = (now - timedelta(days=days - 1)).replace(
hour=0, minute=0, second=0, microsecond=0
)
# End at start of today (exclude today's incomplete data)
end_date = now.replace(hour=0, minute=0, second=0, microsecond=0)
start_date = end_date - timedelta(days=days)
# Get all nodes with their creation dates
# Count nodes created on or before each date
@@ -337,175 +361,3 @@ async def get_node_count_history(
data.append(DailyActivityPoint(date=date_str, count=count))
return NodeCountHistory(days=days, data=data)
@router.get("/", response_class=HTMLResponse)
async def dashboard(
request: Request,
session: DbSession,
) -> HTMLResponse:
"""Simple HTML dashboard page."""
now = datetime.now(timezone.utc)
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
yesterday = now - timedelta(days=1)
# Get stats
total_nodes = session.execute(select(func.count()).select_from(Node)).scalar() or 0
active_nodes = (
session.execute(
select(func.count()).select_from(Node).where(Node.last_seen >= yesterday)
).scalar()
or 0
)
total_messages = (
session.execute(select(func.count()).select_from(Message)).scalar() or 0
)
messages_today = (
session.execute(
select(func.count())
.select_from(Message)
.where(Message.received_at >= today_start)
).scalar()
or 0
)
# Get recent nodes
recent_nodes = (
session.execute(select(Node).order_by(Node.last_seen.desc()).limit(10))
.scalars()
.all()
)
# Get recent messages
recent_messages = (
session.execute(select(Message).order_by(Message.received_at.desc()).limit(10))
.scalars()
.all()
)
# Build HTML
html = f"""
<!DOCTYPE html>
<html>
<head>
<title>MeshCore Hub Dashboard</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="refresh" content="30">
<style>
body {{
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
margin: 0;
padding: 20px;
background: #f5f5f5;
color: #333;
}}
h1 {{ color: #2c3e50; }}
.container {{ max-width: 1200px; margin: 0 auto; }}
.stats {{
display: grid;
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
gap: 20px;
margin-bottom: 30px;
}}
.stat-card {{
background: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
}}
.stat-card h3 {{ margin: 0 0 10px 0; color: #666; font-size: 14px; }}
.stat-card .value {{ font-size: 32px; font-weight: bold; color: #2c3e50; }}
.section {{
background: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
margin-bottom: 20px;
}}
table {{ width: 100%; border-collapse: collapse; }}
th, td {{ padding: 10px; text-align: left; border-bottom: 1px solid #eee; }}
th {{ background: #f8f9fa; font-weight: 600; }}
.text-muted {{ color: #666; }}
.truncate {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}
</style>
</head>
<body>
<div class="container">
<h1>MeshCore Hub Dashboard</h1>
<p class="text-muted">Last updated: {now.strftime('%Y-%m-%d %H:%M:%S UTC')}</p>
<div class="stats">
<div class="stat-card">
<h3>Total Nodes</h3>
<div class="value">{total_nodes}</div>
</div>
<div class="stat-card">
<h3>Active Nodes (24h)</h3>
<div class="value">{active_nodes}</div>
</div>
<div class="stat-card">
<h3>Total Messages</h3>
<div class="value">{total_messages}</div>
</div>
<div class="stat-card">
<h3>Messages Today</h3>
<div class="value">{messages_today}</div>
</div>
</div>
<div class="section">
<h2>Recent Nodes</h2>
<table>
<thead>
<tr>
<th>Name</th>
<th>Public Key</th>
<th>Type</th>
<th>Last Seen</th>
</tr>
</thead>
<tbody>
{"".join(f'''
<tr>
<td>{n.name or '-'}</td>
<td class="truncate">{n.public_key[:16]}...</td>
<td>{n.adv_type or '-'}</td>
<td>{n.last_seen.strftime('%Y-%m-%d %H:%M') if n.last_seen else '-'}</td>
</tr>
''' for n in recent_nodes)}
</tbody>
</table>
</div>
<div class="section">
<h2>Recent Messages</h2>
<table>
<thead>
<tr>
<th>Type</th>
<th>From/Channel</th>
<th>Text</th>
<th>Received</th>
</tr>
</thead>
<tbody>
{"".join(f'''
<tr>
<td>{m.message_type}</td>
<td>{m.pubkey_prefix or f'Ch {m.channel_idx}' or '-'}</td>
<td class="truncate">{m.text[:50]}{'...' if len(m.text) > 50 else ''}</td>
<td>{m.received_at.strftime('%Y-%m-%d %H:%M') if m.received_at else '-'}</td>
</tr>
''' for m in recent_messages)}
</tbody>
</table>
</div>
</div>
</body>
</html>
"""
return HTMLResponse(content=html)

View File

@@ -2,15 +2,13 @@
from fastapi import APIRouter, HTTPException, Query
from sqlalchemy import func, select
from sqlalchemy.orm import selectinload
from meshcore_hub.api.auth import RequireAdmin, RequireRead
from meshcore_hub.api.dependencies import DbSession
from meshcore_hub.common.models import Member, MemberNode, Node
from meshcore_hub.common.models import Member
from meshcore_hub.common.schemas.members import (
MemberCreate,
MemberList,
MemberNodeRead,
MemberRead,
MemberUpdate,
)
@@ -18,50 +16,6 @@ from meshcore_hub.common.schemas.members import (
router = APIRouter()
def _enrich_member_nodes(
member: Member, node_info: dict[str, dict]
) -> list[MemberNodeRead]:
"""Enrich member nodes with node details from the database.
Args:
member: The member with nodes to enrich
node_info: Dict mapping public_key to node details
Returns:
List of MemberNodeRead with node details populated
"""
enriched_nodes = []
for mn in member.nodes:
info = node_info.get(mn.public_key, {})
enriched_nodes.append(
MemberNodeRead(
public_key=mn.public_key,
node_role=mn.node_role,
created_at=mn.created_at,
updated_at=mn.updated_at,
node_name=info.get("name"),
node_adv_type=info.get("adv_type"),
friendly_name=info.get("friendly_name"),
)
)
return enriched_nodes
def _member_to_read(member: Member, node_info: dict[str, dict]) -> MemberRead:
"""Convert a Member model to MemberRead with enriched node data."""
return MemberRead(
id=member.id,
name=member.name,
callsign=member.callsign,
role=member.role,
description=member.description,
contact=member.contact,
nodes=_enrich_member_nodes(member, node_info),
created_at=member.created_at,
updated_at=member.updated_at,
)
@router.get("", response_model=MemberList)
async def list_members(
_: RequireRead,
@@ -74,45 +28,12 @@ async def list_members(
count_query = select(func.count()).select_from(Member)
total = session.execute(count_query).scalar() or 0
# Get members with nodes eagerly loaded
query = (
select(Member)
.options(selectinload(Member.nodes))
.order_by(Member.name)
.limit(limit)
.offset(offset)
)
# Get members ordered by name
query = select(Member).order_by(Member.name).limit(limit).offset(offset)
members = list(session.execute(query).scalars().all())
# Collect all public keys from member nodes
all_public_keys = set()
for m in members:
for mn in m.nodes:
all_public_keys.add(mn.public_key)
# Fetch node info for all public keys in one query
node_info: dict[str, dict] = {}
if all_public_keys:
node_query = (
select(Node)
.options(selectinload(Node.tags))
.where(Node.public_key.in_(all_public_keys))
)
nodes = session.execute(node_query).scalars().all()
for node in nodes:
friendly_name = None
for tag in node.tags:
if tag.key == "friendly_name":
friendly_name = tag.value
break
node_info[node.public_key] = {
"name": node.name,
"adv_type": node.adv_type,
"friendly_name": friendly_name,
}
return MemberList(
items=[_member_to_read(m, node_info) for m in members],
items=[MemberRead.model_validate(m) for m in members],
total=total,
limit=limit,
offset=offset,
@@ -126,37 +47,13 @@ async def get_member(
member_id: str,
) -> MemberRead:
"""Get a specific member by ID."""
query = (
select(Member).options(selectinload(Member.nodes)).where(Member.id == member_id)
)
query = select(Member).where(Member.id == member_id)
member = session.execute(query).scalar_one_or_none()
if not member:
raise HTTPException(status_code=404, detail="Member not found")
# Fetch node info for member's nodes
node_info: dict[str, dict] = {}
public_keys = [mn.public_key for mn in member.nodes]
if public_keys:
node_query = (
select(Node)
.options(selectinload(Node.tags))
.where(Node.public_key.in_(public_keys))
)
nodes = session.execute(node_query).scalars().all()
for node in nodes:
friendly_name = None
for tag in node.tags:
if tag.key == "friendly_name":
friendly_name = tag.value
break
node_info[node.public_key] = {
"name": node.name,
"adv_type": node.adv_type,
"friendly_name": friendly_name,
}
return _member_to_read(member, node_info)
return MemberRead.model_validate(member)
@router.post("", response_model=MemberRead, status_code=201)
@@ -166,8 +63,18 @@ async def create_member(
member: MemberCreate,
) -> MemberRead:
"""Create a new member."""
# Check if member_id already exists
query = select(Member).where(Member.member_id == member.member_id)
existing = session.execute(query).scalar_one_or_none()
if existing:
raise HTTPException(
status_code=400,
detail=f"Member with member_id '{member.member_id}' already exists",
)
# Create member
new_member = Member(
member_id=member.member_id,
name=member.name,
callsign=member.callsign,
role=member.role,
@@ -175,18 +82,6 @@ async def create_member(
contact=member.contact,
)
session.add(new_member)
session.flush() # Get the ID for the member
# Add nodes if provided
if member.nodes:
for node_data in member.nodes:
node = MemberNode(
member_id=new_member.id,
public_key=node_data.public_key.lower(),
node_role=node_data.node_role,
)
session.add(node)
session.commit()
session.refresh(new_member)
@@ -201,15 +96,25 @@ async def update_member(
member: MemberUpdate,
) -> MemberRead:
"""Update a member."""
query = (
select(Member).options(selectinload(Member.nodes)).where(Member.id == member_id)
)
query = select(Member).where(Member.id == member_id)
existing = session.execute(query).scalar_one_or_none()
if not existing:
raise HTTPException(status_code=404, detail="Member not found")
# Update fields
if member.member_id is not None:
# Check if new member_id is already taken by another member
check_query = select(Member).where(
Member.member_id == member.member_id, Member.id != member_id
)
collision = session.execute(check_query).scalar_one_or_none()
if collision:
raise HTTPException(
status_code=400,
detail=f"Member with member_id '{member.member_id}' already exists",
)
existing.member_id = member.member_id
if member.name is not None:
existing.name = member.name
if member.callsign is not None:
@@ -221,20 +126,6 @@ async def update_member(
if member.contact is not None:
existing.contact = member.contact
# Update nodes if provided (replaces existing nodes)
if member.nodes is not None:
# Clear existing nodes
existing.nodes.clear()
# Add new nodes
for node_data in member.nodes:
node = MemberNode(
member_id=existing.id,
public_key=node_data.public_key.lower(),
node_role=node_data.node_role,
)
existing.nodes.append(node)
session.commit()
session.refresh(existing)

View File

@@ -15,12 +15,12 @@ from meshcore_hub.common.schemas.messages import MessageList, MessageRead, Recei
router = APIRouter()
def _get_friendly_name(node: Optional[Node]) -> Optional[str]:
"""Extract friendly_name tag from a node's tags."""
def _get_tag_name(node: Optional[Node]) -> Optional[str]:
"""Extract name tag from a node's tags."""
if not node or not node.tags:
return None
for tag in node.tags:
if tag.key == "friendly_name":
if tag.key == "name":
return tag.value
return None
@@ -64,17 +64,17 @@ def _fetch_receivers_for_events(
# Group by event_hash
receivers_by_hash: dict[str, list[ReceiverInfo]] = {}
# Get friendly names for receiver nodes
# Get tag names for receiver nodes
node_ids = [r.node_id for r in results]
friendly_names: dict[str, str] = {}
tag_names: dict[str, str] = {}
if node_ids:
fn_query = (
tag_query = (
select(NodeTag.node_id, NodeTag.value)
.where(NodeTag.node_id.in_(node_ids))
.where(NodeTag.key == "friendly_name")
.where(NodeTag.key == "name")
)
for node_id, value in session.execute(fn_query).all():
friendly_names[node_id] = value
for node_id, value in session.execute(tag_query).all():
tag_names[node_id] = value
for row in results:
if row.event_hash not in receivers_by_hash:
@@ -85,7 +85,7 @@ def _fetch_receivers_for_events(
node_id=row.node_id,
public_key=row.public_key,
name=row.name,
friendly_name=friendly_names.get(row.node_id),
tag_name=tag_names.get(row.node_id),
snr=row.snr,
received_at=row.received_at,
)
@@ -153,10 +153,10 @@ async def list_messages(
# Execute
results = session.execute(query).all()
# Look up sender names and friendly_names for senders with pubkey_prefix
# Look up sender names and tag names for senders with pubkey_prefix
pubkey_prefixes = [r[0].pubkey_prefix for r in results if r[0].pubkey_prefix]
sender_names: dict[str, str] = {}
friendly_names: dict[str, str] = {}
sender_tag_names: dict[str, str] = {}
if pubkey_prefixes:
# Find nodes whose public_key starts with any of these prefixes
for prefix in set(pubkey_prefixes):
@@ -168,15 +168,15 @@ async def list_messages(
if name:
sender_names[public_key[:12]] = name
# Get friendly_name tag
friendly_name_query = (
# Get name tag
tag_name_query = (
select(Node.public_key, NodeTag.value)
.join(NodeTag, Node.id == NodeTag.node_id)
.where(Node.public_key.startswith(prefix))
.where(NodeTag.key == "friendly_name")
.where(NodeTag.key == "name")
)
for public_key, value in session.execute(friendly_name_query).all():
friendly_names[public_key[:12]] = value
for public_key, value in session.execute(tag_name_query).all():
sender_tag_names[public_key[:12]] = value
# Collect receiver node IDs to fetch tags
receiver_ids = set()
@@ -214,14 +214,14 @@ async def list_messages(
"receiver_node_id": m.receiver_node_id,
"received_by": receiver_pk,
"receiver_name": receiver_name,
"receiver_friendly_name": _get_friendly_name(receiver_node),
"receiver_tag_name": _get_tag_name(receiver_node),
"message_type": m.message_type,
"pubkey_prefix": m.pubkey_prefix,
"sender_name": (
sender_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
),
"sender_friendly_name": (
friendly_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
"sender_tag_name": (
sender_tag_names.get(m.pubkey_prefix) if m.pubkey_prefix else None
),
"channel_idx": m.channel_idx,
"text": m.text,

View File

@@ -6,7 +6,13 @@ from sqlalchemy import select
from meshcore_hub.api.auth import RequireAdmin, RequireRead
from meshcore_hub.api.dependencies import DbSession
from meshcore_hub.common.models import Node, NodeTag
from meshcore_hub.common.schemas.nodes import NodeTagCreate, NodeTagRead, NodeTagUpdate
from meshcore_hub.common.schemas.nodes import (
NodeTagCreate,
NodeTagMove,
NodeTagRead,
NodeTagsCopyResult,
NodeTagUpdate,
)
router = APIRouter()
@@ -130,6 +136,131 @@ async def update_node_tag(
return NodeTagRead.model_validate(node_tag)
@router.put("/nodes/{public_key}/tags/{key}/move", response_model=NodeTagRead)
async def move_node_tag(
_: RequireAdmin,
session: DbSession,
public_key: str,
key: str,
data: NodeTagMove,
) -> NodeTagRead:
"""Move a node tag to a different node."""
# Check if source and destination are the same
if public_key == data.new_public_key:
raise HTTPException(
status_code=400,
detail="Source and destination nodes are the same",
)
# Find source node
source_query = select(Node).where(Node.public_key == public_key)
source_node = session.execute(source_query).scalar_one_or_none()
if not source_node:
raise HTTPException(status_code=404, detail="Source node not found")
# Find tag
tag_query = select(NodeTag).where(
(NodeTag.node_id == source_node.id) & (NodeTag.key == key)
)
node_tag = session.execute(tag_query).scalar_one_or_none()
if not node_tag:
raise HTTPException(status_code=404, detail="Tag not found")
# Find destination node
dest_query = select(Node).where(Node.public_key == data.new_public_key)
dest_node = session.execute(dest_query).scalar_one_or_none()
if not dest_node:
raise HTTPException(status_code=404, detail="Destination node not found")
# Check if tag already exists on destination node
conflict_query = select(NodeTag).where(
(NodeTag.node_id == dest_node.id) & (NodeTag.key == key)
)
conflict = session.execute(conflict_query).scalar_one_or_none()
if conflict:
raise HTTPException(
status_code=409,
detail=f"Tag '{key}' already exists on destination node",
)
# Move tag to destination node
node_tag.node_id = dest_node.id
session.commit()
session.refresh(node_tag)
return NodeTagRead.model_validate(node_tag)
@router.post(
"/nodes/{public_key}/tags/copy-to/{dest_public_key}",
response_model=NodeTagsCopyResult,
)
async def copy_all_tags(
_: RequireAdmin,
session: DbSession,
public_key: str,
dest_public_key: str,
) -> NodeTagsCopyResult:
"""Copy all tags from one node to another.
Tags that already exist on the destination node are skipped.
"""
# Check if source and destination are the same
if public_key == dest_public_key:
raise HTTPException(
status_code=400,
detail="Source and destination nodes are the same",
)
# Find source node
source_query = select(Node).where(Node.public_key == public_key)
source_node = session.execute(source_query).scalar_one_or_none()
if not source_node:
raise HTTPException(status_code=404, detail="Source node not found")
# Find destination node
dest_query = select(Node).where(Node.public_key == dest_public_key)
dest_node = session.execute(dest_query).scalar_one_or_none()
if not dest_node:
raise HTTPException(status_code=404, detail="Destination node not found")
# Get existing tags on destination node
existing_query = select(NodeTag.key).where(NodeTag.node_id == dest_node.id)
existing_keys = set(session.execute(existing_query).scalars().all())
# Copy tags
copied = 0
skipped_keys = []
for tag in source_node.tags:
if tag.key in existing_keys:
skipped_keys.append(tag.key)
continue
new_tag = NodeTag(
node_id=dest_node.id,
key=tag.key,
value=tag.value,
value_type=tag.value_type,
)
session.add(new_tag)
copied += 1
session.commit()
return NodeTagsCopyResult(
copied=copied,
skipped=len(skipped_keys),
skipped_keys=skipped_keys,
)
@router.delete("/nodes/{public_key}/tags/{key}", status_code=204)
async def delete_node_tag(
_: RequireAdmin,
@@ -156,3 +287,27 @@ async def delete_node_tag(
session.delete(node_tag)
session.commit()
@router.delete("/nodes/{public_key}/tags")
async def delete_all_node_tags(
_: RequireAdmin,
session: DbSession,
public_key: str,
) -> dict:
"""Delete all tags for a node."""
# Find node
node_query = select(Node).where(Node.public_key == public_key)
node = session.execute(node_query).scalar_one_or_none()
if not node:
raise HTTPException(status_code=404, detail="Node not found")
# Count and delete all tags
count = len(node.tags)
for tag in node.tags:
session.delete(tag)
session.commit()
return {"deleted": count}

View File

@@ -2,12 +2,13 @@
from typing import Optional
from fastapi import APIRouter, HTTPException, Query
from sqlalchemy import func, select
from fastapi import APIRouter, HTTPException, Path, Query
from sqlalchemy import func, or_, select
from sqlalchemy.orm import selectinload
from meshcore_hub.api.auth import RequireRead
from meshcore_hub.api.dependencies import DbSession
from meshcore_hub.common.models import Node
from meshcore_hub.common.models import Node, NodeTag
from meshcore_hub.common.schemas.nodes import NodeList, NodeRead
router = APIRouter()
@@ -17,28 +18,95 @@ router = APIRouter()
async def list_nodes(
_: RequireRead,
session: DbSession,
search: Optional[str] = Query(None, description="Search in name or public key"),
search: Optional[str] = Query(
None, description="Search in name tag, node name, or public key"
),
adv_type: Optional[str] = Query(None, description="Filter by advertisement type"),
member_id: Optional[str] = Query(None, description="Filter by member_id tag value"),
role: Optional[str] = Query(None, description="Filter by role tag value"),
limit: int = Query(50, ge=1, le=500, description="Page size"),
offset: int = Query(0, ge=0, description="Page offset"),
) -> NodeList:
"""List all nodes with pagination and filtering."""
# Build query
query = select(Node)
# Build base query with tags loaded
query = select(Node).options(selectinload(Node.tags))
if search:
# Search in public key, node name, or name tag
# For name tag search, we need to join with NodeTag
search_pattern = f"%{search}%"
query = query.where(
(Node.name.ilike(f"%{search}%")) | (Node.public_key.ilike(f"%{search}%"))
or_(
Node.public_key.ilike(search_pattern),
Node.name.ilike(search_pattern),
Node.id.in_(
select(NodeTag.node_id).where(
NodeTag.key == "name", NodeTag.value.ilike(search_pattern)
)
),
)
)
if adv_type:
query = query.where(Node.adv_type == adv_type)
normalized_adv_type = adv_type.strip().lower()
if normalized_adv_type == "repeater":
query = query.where(
or_(
Node.adv_type == "repeater",
Node.adv_type.ilike("%repeater%"),
Node.adv_type.ilike("%relay%"),
)
)
elif normalized_adv_type == "companion":
query = query.where(
or_(
Node.adv_type == "companion",
Node.adv_type.ilike("%companion%"),
Node.adv_type.ilike("%observer%"),
)
)
elif normalized_adv_type == "room":
query = query.where(
or_(
Node.adv_type == "room",
Node.adv_type.ilike("%room%"),
)
)
elif normalized_adv_type == "chat":
query = query.where(
or_(
Node.adv_type == "chat",
Node.adv_type.ilike("%chat%"),
)
)
else:
query = query.where(Node.adv_type == adv_type)
if member_id:
# Filter nodes that have a member_id tag with the specified value
query = query.where(
Node.id.in_(
select(NodeTag.node_id).where(
NodeTag.key == "member_id", NodeTag.value == member_id
)
)
)
if role:
# Filter nodes that have a role tag with the specified value
query = query.where(
Node.id.in_(
select(NodeTag.node_id).where(
NodeTag.key == "role", NodeTag.value == role
)
)
)
# Get total count
count_query = select(func.count()).select_from(query.subquery())
total = session.execute(count_query).scalar() or 0
# Apply pagination
# Apply pagination and ordering
query = query.order_by(Node.last_seen.desc()).offset(offset).limit(limit)
# Execute
@@ -52,14 +120,43 @@ async def list_nodes(
)
@router.get("/{public_key}", response_model=NodeRead)
async def get_node(
@router.get("/prefix/{prefix}", response_model=NodeRead)
async def get_node_by_prefix(
_: RequireRead,
session: DbSession,
public_key: str,
prefix: str = Path(description="Public key prefix to search for"),
) -> NodeRead:
"""Get a single node by public key."""
query = select(Node).where(Node.public_key == public_key)
"""Get a single node by public key prefix.
Returns the first node (alphabetically by public_key) that matches the prefix.
"""
query = (
select(Node)
.options(selectinload(Node.tags))
.where(Node.public_key.startswith(prefix))
.order_by(Node.public_key)
.limit(1)
)
node = session.execute(query).scalar_one_or_none()
if not node:
raise HTTPException(status_code=404, detail="Node not found")
return NodeRead.model_validate(node)
@router.get("/{public_key}", response_model=NodeRead)
async def get_node(
_: RequireRead,
session: DbSession,
public_key: str = Path(description="Full 64-character public key"),
) -> NodeRead:
"""Get a single node by exact public key match."""
query = (
select(Node)
.options(selectinload(Node.tags))
.where(Node.public_key == public_key)
)
node = session.execute(query).scalar_one_or_none()
if not node:

View File

@@ -54,6 +54,31 @@ if TYPE_CHECKING:
envvar="MQTT_TLS",
help="Enable TLS/SSL for MQTT connection",
)
@click.option(
"--mqtt-transport",
type=click.Choice(["tcp", "websockets"], case_sensitive=False),
default="tcp",
envvar="MQTT_TRANSPORT",
help="MQTT transport protocol",
)
@click.option(
"--mqtt-ws-path",
type=str,
default="/mqtt",
envvar="MQTT_WS_PATH",
help="MQTT WebSocket path (used when transport=websockets)",
)
@click.option(
"--ingest-mode",
"collector_ingest_mode",
type=click.Choice(["native", "letsmesh_upload"], case_sensitive=False),
default="native",
envvar="COLLECTOR_INGEST_MODE",
help=(
"Collector ingest mode: native MeshCore events or LetsMesh upload "
"(packets/status/internal)"
),
)
@click.option(
"--data-home",
type=str,
@@ -90,6 +115,9 @@ def collector(
mqtt_password: str | None,
prefix: str,
mqtt_tls: bool,
mqtt_transport: str,
mqtt_ws_path: str,
collector_ingest_mode: str,
data_home: str | None,
seed_home: str | None,
database_url: str | None,
@@ -134,6 +162,9 @@ def collector(
ctx.obj["mqtt_password"] = mqtt_password
ctx.obj["prefix"] = prefix
ctx.obj["mqtt_tls"] = mqtt_tls
ctx.obj["mqtt_transport"] = mqtt_transport
ctx.obj["mqtt_ws_path"] = mqtt_ws_path
ctx.obj["collector_ingest_mode"] = collector_ingest_mode
ctx.obj["data_home"] = data_home or settings.data_home
ctx.obj["seed_home"] = settings.effective_seed_home
ctx.obj["database_url"] = effective_db_url
@@ -149,6 +180,9 @@ def collector(
mqtt_password=mqtt_password,
prefix=prefix,
mqtt_tls=mqtt_tls,
mqtt_transport=mqtt_transport,
mqtt_ws_path=mqtt_ws_path,
ingest_mode=collector_ingest_mode,
database_url=effective_db_url,
log_level=log_level,
data_home=data_home or settings.data_home,
@@ -163,6 +197,9 @@ def _run_collector_service(
mqtt_password: str | None,
prefix: str,
mqtt_tls: bool,
mqtt_transport: str,
mqtt_ws_path: str,
ingest_mode: str,
database_url: str,
log_level: str,
data_home: str,
@@ -170,8 +207,8 @@ def _run_collector_service(
) -> None:
"""Run the collector service.
On startup, automatically seeds the database from YAML files in seed_home
if they exist.
Note: Seed data import should be done using the 'meshcore-hub collector seed'
command or the dedicated seed container before starting the collector service.
Webhooks can be configured via environment variables:
- WEBHOOK_ADVERTISEMENT_URL: Webhook for advertisement events
@@ -191,38 +228,16 @@ def _run_collector_service(
click.echo(f"Data home: {data_home}")
click.echo(f"Seed home: {seed_home}")
click.echo(f"MQTT: {mqtt_host}:{mqtt_port} (prefix: {prefix})")
click.echo(f"MQTT transport: {mqtt_transport} (ws_path: {mqtt_ws_path})")
click.echo(f"Ingest mode: {ingest_mode}")
click.echo(f"Database: {database_url}")
# Initialize database (schema managed by Alembic migrations)
from meshcore_hub.common.database import DatabaseManager
db = DatabaseManager(database_url)
# Auto-seed from seed files on startup
click.echo("")
click.echo("Checking for seed files...")
seed_home_path = Path(seed_home)
node_tags_exists = (seed_home_path / "node_tags.yaml").exists()
members_exists = (seed_home_path / "members.yaml").exists()
if node_tags_exists or members_exists:
click.echo("Running seed import...")
_run_seed_import(
seed_home=seed_home,
db=db,
create_nodes=True,
verbose=True,
)
else:
click.echo(f"No seed files found in {seed_home}")
db.dispose()
# Load webhook configuration from settings
from meshcore_hub.collector.webhook import (
WebhookDispatcher,
create_webhooks_from_settings,
)
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
from meshcore_hub.common.config import get_collector_settings
settings = get_collector_settings()
@@ -259,6 +274,24 @@ def _run_collector_service(
if settings.data_retention_enabled or settings.node_cleanup_enabled:
click.echo(f" Interval: {settings.data_retention_interval_hours} hours")
if ingest_mode.lower() == "letsmesh_upload":
click.echo("")
click.echo("LetsMesh decode configuration:")
if settings.collector_letsmesh_decoder_enabled:
builtin_keys = len(LetsMeshPacketDecoder.BUILTIN_CHANNEL_KEYS)
env_keys = len(settings.collector_letsmesh_decoder_keys_list)
click.echo(
" Decoder: Enabled " f"({settings.collector_letsmesh_decoder_command})"
)
click.echo(f" Built-in keys: {builtin_keys}")
click.echo(" Additional keys from .env: " f"{env_keys} configured")
click.echo(
" Timeout: "
f"{settings.collector_letsmesh_decoder_timeout_seconds:.2f}s"
)
else:
click.echo(" Decoder: Disabled")
click.echo("")
click.echo("Starting MQTT subscriber...")
run_collector(
@@ -268,6 +301,9 @@ def _run_collector_service(
mqtt_password=mqtt_password,
mqtt_prefix=prefix,
mqtt_tls=mqtt_tls,
mqtt_transport=mqtt_transport,
mqtt_ws_path=mqtt_ws_path,
ingest_mode=ingest_mode,
database_url=database_url,
webhook_dispatcher=webhook_dispatcher,
cleanup_enabled=settings.data_retention_enabled,
@@ -275,6 +311,12 @@ def _run_collector_service(
cleanup_interval_hours=settings.data_retention_interval_hours,
node_cleanup_enabled=settings.node_cleanup_enabled,
node_cleanup_days=settings.node_cleanup_days,
letsmesh_decoder_enabled=settings.collector_letsmesh_decoder_enabled,
letsmesh_decoder_command=settings.collector_letsmesh_decoder_command,
letsmesh_decoder_channel_keys=settings.collector_letsmesh_decoder_keys_list,
letsmesh_decoder_timeout_seconds=(
settings.collector_letsmesh_decoder_timeout_seconds
),
)
@@ -292,6 +334,9 @@ def run_cmd(ctx: click.Context) -> None:
mqtt_password=ctx.obj["mqtt_password"],
prefix=ctx.obj["prefix"],
mqtt_tls=ctx.obj["mqtt_tls"],
mqtt_transport=ctx.obj["mqtt_transport"],
mqtt_ws_path=ctx.obj["mqtt_ws_path"],
ingest_mode=ctx.obj["collector_ingest_mode"],
database_url=ctx.obj["database_url"],
log_level=ctx.obj["log_level"],
data_home=ctx.obj["data_home"],
@@ -383,8 +428,11 @@ def _run_seed_import(
file_path=str(node_tags_file),
db=db,
create_nodes=create_nodes,
clear_existing=True,
)
if verbose:
if stats["deleted"]:
click.echo(f" Deleted {stats['deleted']} existing tags")
click.echo(
f" Tags: {stats['created']} created, {stats['updated']} updated"
)
@@ -428,16 +476,24 @@ def _run_seed_import(
default=False,
help="Skip tags for nodes that don't exist (default: create nodes)",
)
@click.option(
"--clear-existing",
is_flag=True,
default=False,
help="Delete all existing tags before importing",
)
@click.pass_context
def import_tags_cmd(
ctx: click.Context,
file: str | None,
no_create_nodes: bool,
clear_existing: bool,
) -> None:
"""Import node tags from a YAML file.
Reads a YAML file containing tag definitions and upserts them
into the database. Existing tags are updated, new tags are created.
into the database. By default, existing tags are updated and new tags are created.
Use --clear-existing to delete all tags before importing.
FILE is the path to the YAML file containing tags.
If not provided, defaults to {SEED_HOME}/node_tags.yaml.
@@ -447,12 +503,12 @@ def import_tags_cmd(
\b
0123456789abcdef...:
friendly_name: My Node
location:
value: "52.0,1.0"
type: coordinate
altitude:
value: "150"
type: number
active:
value: "true"
type: boolean
Shorthand is also supported (string values with default type):
@@ -461,7 +517,7 @@ def import_tags_cmd(
friendly_name: My Node
role: gateway
Supported types: string, number, boolean, coordinate
Supported types: string, number, boolean
"""
from pathlib import Path
@@ -492,11 +548,14 @@ def import_tags_cmd(
file_path=tags_file,
db=db,
create_nodes=not no_create_nodes,
clear_existing=clear_existing,
)
# Report results
click.echo("")
click.echo("Import complete:")
if stats["deleted"]:
click.echo(f" Tags deleted: {stats['deleted']}")
click.echo(f" Total tags in file: {stats['total']}")
click.echo(f" Tags created: {stats['created']}")
click.echo(f" Tags updated: {stats['updated']}")
@@ -674,3 +733,212 @@ def cleanup_cmd(
db.dispose()
click.echo("")
click.echo("Cleanup complete." if not dry_run else "Dry run complete.")
@collector.command("truncate")
@click.option(
"--members",
is_flag=True,
default=False,
help="Truncate members table",
)
@click.option(
"--nodes",
is_flag=True,
default=False,
help="Truncate nodes table (also clears tags, advertisements, messages, telemetry, trace paths)",
)
@click.option(
"--messages",
is_flag=True,
default=False,
help="Truncate messages table",
)
@click.option(
"--advertisements",
is_flag=True,
default=False,
help="Truncate advertisements table",
)
@click.option(
"--telemetry",
is_flag=True,
default=False,
help="Truncate telemetry table",
)
@click.option(
"--trace-paths",
is_flag=True,
default=False,
help="Truncate trace_paths table",
)
@click.option(
"--event-logs",
is_flag=True,
default=False,
help="Truncate event_logs table",
)
@click.option(
"--all",
"truncate_all",
is_flag=True,
default=False,
help="Truncate ALL tables (use with caution!)",
)
@click.option(
"--yes",
is_flag=True,
default=False,
help="Skip confirmation prompt",
)
@click.pass_context
def truncate_cmd(
ctx: click.Context,
members: bool,
nodes: bool,
messages: bool,
advertisements: bool,
telemetry: bool,
trace_paths: bool,
event_logs: bool,
truncate_all: bool,
yes: bool,
) -> None:
"""Truncate (clear) data tables.
WARNING: This permanently deletes data! Use with caution.
Examples:
# Clear members table
meshcore-hub collector truncate --members
# Clear messages and advertisements
meshcore-hub collector truncate --messages --advertisements
# Clear everything (requires confirmation)
meshcore-hub collector truncate --all
Note: Clearing nodes also clears all related data (tags, advertisements,
messages, telemetry, trace paths) due to foreign key constraints.
"""
configure_logging(level=ctx.obj["log_level"])
# Determine what to truncate
if truncate_all:
tables_to_clear = {
"members": True,
"nodes": True,
"messages": True,
"advertisements": True,
"telemetry": True,
"trace_paths": True,
"event_logs": True,
}
else:
tables_to_clear = {
"members": members,
"nodes": nodes,
"messages": messages,
"advertisements": advertisements,
"telemetry": telemetry,
"trace_paths": trace_paths,
"event_logs": event_logs,
}
# Check if any tables selected
if not any(tables_to_clear.values()):
click.echo("No tables specified. Use --help to see available options.")
return
# Show what will be cleared
click.echo("Database: " + ctx.obj["database_url"])
click.echo("")
click.echo("The following tables will be PERMANENTLY CLEARED:")
for table, should_clear in tables_to_clear.items():
if should_clear:
click.echo(f" - {table}")
if tables_to_clear.get("nodes"):
click.echo("")
click.echo(
"WARNING: Clearing nodes will also clear all related data due to foreign keys:"
)
click.echo(" - node_tags")
click.echo(" - advertisements")
click.echo(" - messages")
click.echo(" - telemetry")
click.echo(" - trace_paths")
click.echo("")
# Confirm
if not yes:
if not click.confirm(
"Are you sure you want to permanently delete this data?", default=False
):
click.echo("Aborted.")
return
from meshcore_hub.common.database import DatabaseManager
from meshcore_hub.common.models import (
Advertisement,
EventLog,
Member,
Message,
Node,
NodeTag,
Telemetry,
TracePath,
)
from sqlalchemy import delete
from sqlalchemy.engine import CursorResult
db = DatabaseManager(ctx.obj["database_url"])
with db.session_scope() as session:
# Truncate in correct order to respect foreign keys
cleared: list[str] = []
# Clear members (no dependencies)
if tables_to_clear.get("members"):
result: CursorResult = session.execute(delete(Member)) # type: ignore
cleared.append(f"members: {result.rowcount} rows")
# Clear event-specific tables first (they depend on nodes)
if tables_to_clear.get("messages"):
result = session.execute(delete(Message)) # type: ignore
cleared.append(f"messages: {result.rowcount} rows")
if tables_to_clear.get("advertisements"):
result = session.execute(delete(Advertisement)) # type: ignore
cleared.append(f"advertisements: {result.rowcount} rows")
if tables_to_clear.get("telemetry"):
result = session.execute(delete(Telemetry)) # type: ignore
cleared.append(f"telemetry: {result.rowcount} rows")
if tables_to_clear.get("trace_paths"):
result = session.execute(delete(TracePath)) # type: ignore
cleared.append(f"trace_paths: {result.rowcount} rows")
if tables_to_clear.get("event_logs"):
result = session.execute(delete(EventLog)) # type: ignore
cleared.append(f"event_logs: {result.rowcount} rows")
# Clear nodes last (this will cascade delete tags and any remaining events)
if tables_to_clear.get("nodes"):
# Delete tags first (they depend on nodes)
tag_result: CursorResult = session.execute(delete(NodeTag)) # type: ignore
cleared.append(f"node_tags: {tag_result.rowcount} rows (cascade)")
# Delete nodes (will cascade to remaining related tables)
node_result: CursorResult = session.execute(delete(Node)) # type: ignore
cleared.append(f"nodes: {node_result.rowcount} rows")
db.dispose()
click.echo("")
click.echo("Truncate complete. Cleared:")
for item in cleared:
click.echo(f" - {item}")
click.echo("")

View File

@@ -14,6 +14,20 @@ from meshcore_hub.common.models import Advertisement, Node, add_event_receiver
logger = logging.getLogger(__name__)
def _coerce_float(value: Any) -> float | None:
"""Convert int/float/string values to float when possible."""
if value is None:
return None
if isinstance(value, (int, float)):
return float(value)
if isinstance(value, str):
try:
return float(value.strip())
except ValueError:
return None
return None
def handle_advertisement(
public_key: str,
event_type: str,
@@ -40,6 +54,22 @@ def handle_advertisement(
name = payload.get("name")
adv_type = payload.get("adv_type")
flags = payload.get("flags")
lat = payload.get("lat")
lon = payload.get("lon")
if lat is None:
lat = payload.get("adv_lat")
if lon is None:
lon = payload.get("adv_lon")
location = payload.get("location")
if isinstance(location, dict):
if lat is None:
lat = location.get("latitude")
if lon is None:
lon = location.get("longitude")
lat = _coerce_float(lat)
lon = _coerce_float(lon)
now = datetime.now(timezone.utc)
# Compute event hash for deduplication (30-second time bucket)
@@ -79,6 +109,10 @@ def handle_advertisement(
node_query = select(Node).where(Node.public_key == adv_public_key)
node = session.execute(node_query).scalar_one_or_none()
if node:
if lat is not None:
node.lat = lat
if lon is not None:
node.lon = lon
node.last_seen = now
# Add this receiver to the junction table
@@ -110,6 +144,10 @@ def handle_advertisement(
node.adv_type = adv_type
if flags is not None:
node.flags = flags
if lat is not None:
node.lat = lat
if lon is not None:
node.lon = lon
node.last_seen = now
else:
# Create new node
@@ -120,6 +158,8 @@ def handle_advertisement(
flags=flags,
first_seen=now,
last_seen=now,
lat=lat,
lon=lon,
)
session.add(node)
session.flush()

View File

@@ -47,6 +47,10 @@ def handle_contact(
# Device uses 'adv_name' for the advertised name
name = payload.get("adv_name") or payload.get("name")
# GPS coordinates (optional)
lat = payload.get("adv_lat")
lon = payload.get("adv_lon")
logger.info(f"Processing contact: {contact_key[:12]}... adv_name={name}")
# Device uses numeric 'type' field, convert to string
@@ -73,15 +77,24 @@ def handle_contact(
node.name = name
if node_type and not node.adv_type:
node.adv_type = node_type
node.last_seen = now
# Update GPS coordinates if provided
if lat is not None:
node.lat = lat
if lon is not None:
node.lon = lon
# Do NOT update last_seen for contact sync - only advertisement events
# should update last_seen since that's when the node was actually seen
else:
# Create new node
# Create new node from contact database
# Set last_seen=None since we haven't actually seen this node advertise yet
node = Node(
public_key=contact_key,
name=name,
adv_type=node_type,
first_seen=now,
last_seen=now,
last_seen=None, # Will be set when we receive an advertisement
lat=lat,
lon=lon,
)
session.add(node)
logger.info(f"Created node from contact: {contact_key[:12]}... ({name})")

View File

@@ -70,7 +70,7 @@ def _handle_message(
now = datetime.now(timezone.utc)
# Extract fields based on message type
pubkey_prefix = payload.get("pubkey_prefix") if message_type == "contact" else None
pubkey_prefix = payload.get("pubkey_prefix")
channel_idx = payload.get("channel_idx") if message_type == "channel" else None
path_len = payload.get("path_len")
txt_type = payload.get("txt_type")

View File

@@ -0,0 +1,275 @@
"""LetsMesh packet decoder integration.
Provides an optional bridge to the external `meshcore-decoder` CLI so the
collector can turn LetsMesh upload `raw` packet hex into decoded message data.
"""
from __future__ import annotations
import hashlib
import json
import logging
import shlex
import shutil
import string
import subprocess
from typing import Any, NamedTuple
logger = logging.getLogger(__name__)
class LetsMeshPacketDecoder:
"""Decode LetsMesh packet payloads with `meshcore-decoder` CLI."""
class ChannelKey(NamedTuple):
"""Channel key metadata for decryption and channel labeling."""
label: str | None
key_hex: str
channel_hash: str
# Built-in keys required by your deployment.
# - Public channel
# - #test channel
BUILTIN_CHANNEL_KEYS: tuple[tuple[str, str], ...] = (
("Public", "8B3387E9C5CDEA6AC9E5EDBAA115CD72"),
("test", "9CD8FCF22A47333B591D96A2B848B73F"),
)
def __init__(
self,
enabled: bool = True,
command: str = "meshcore-decoder",
channel_keys: list[str] | None = None,
timeout_seconds: float = 2.0,
) -> None:
self._enabled = enabled
self._command_tokens = shlex.split(command.strip()) if command.strip() else []
self._channel_key_infos = self._normalize_channel_keys(channel_keys or [])
self._channel_keys = [info.key_hex for info in self._channel_key_infos]
self._channel_names_by_hash = {
info.channel_hash: info.label
for info in self._channel_key_infos
if info.label
}
self._decode_cache: dict[str, dict[str, Any] | None] = {}
self._decode_cache_maxsize = 2048
self._timeout_seconds = timeout_seconds
self._checked_command = False
self._command_available = False
self._warned_unavailable = False
@classmethod
def _normalize_channel_keys(cls, values: list[str]) -> list[ChannelKey]:
"""Normalize key list (labels + key + channel hash, deduplicated)."""
normalized: list[LetsMeshPacketDecoder.ChannelKey] = []
seen_keys: set[str] = set()
for label, key in cls.BUILTIN_CHANNEL_KEYS:
entry = cls._normalize_channel_entry(f"{label}={key}")
if not entry:
continue
if entry.key_hex in seen_keys:
continue
normalized.append(entry)
seen_keys.add(entry.key_hex)
for value in values:
entry = cls._normalize_channel_entry(value)
if not entry:
continue
if entry.key_hex in seen_keys:
continue
normalized.append(entry)
seen_keys.add(entry.key_hex)
return normalized
@classmethod
def _normalize_channel_entry(cls, value: str | None) -> ChannelKey | None:
"""Normalize one key entry (`label=hex`, `label:hex`, or `hex`)."""
if value is None:
return None
candidate = value.strip()
if not candidate:
return None
label: str | None = None
key_candidate = candidate
for separator in ("=", ":"):
if separator not in candidate:
continue
left, right = candidate.split(separator, 1)
right = right.strip()
right = right.removeprefix("0x").removeprefix("0X").strip()
if right and cls._is_hex(right):
label = left.strip().lstrip("#")
key_candidate = right
break
key_candidate = key_candidate.strip()
key_candidate = key_candidate.removeprefix("0x").removeprefix("0X").strip()
if not key_candidate or not cls._is_hex(key_candidate):
return None
key_hex = key_candidate.upper()
channel_hash = cls._compute_channel_hash(key_hex)
normalized_label = label.strip() if label and label.strip() else None
return cls.ChannelKey(
label=normalized_label,
key_hex=key_hex,
channel_hash=channel_hash,
)
@staticmethod
def _is_hex(value: str) -> bool:
"""Return True if string contains only hex digits."""
return bool(value) and all(char in string.hexdigits for char in value)
@staticmethod
def _compute_channel_hash(key_hex: str) -> str:
"""Compute channel hash (first byte of SHA-256 of channel key)."""
return hashlib.sha256(bytes.fromhex(key_hex)).digest()[:1].hex().upper()
def channel_name_from_decoded(
self,
decoded_packet: dict[str, Any] | None,
) -> str | None:
"""Resolve channel label from decoded payload channel hash."""
if not isinstance(decoded_packet, dict):
return None
payload = decoded_packet.get("payload")
if not isinstance(payload, dict):
return None
decoded = payload.get("decoded")
if not isinstance(decoded, dict):
return None
channel_hash = decoded.get("channelHash")
if not isinstance(channel_hash, str):
return None
return self._channel_names_by_hash.get(channel_hash.upper())
def channel_labels_by_index(self) -> dict[int, str]:
"""Return channel labels keyed by numeric channel index (0-255)."""
labels: dict[int, str] = {}
for info in self._channel_key_infos:
if not info.label:
continue
label = info.label.strip()
if not label:
continue
if label.lower() == "public":
normalized_label = "Public"
else:
normalized_label = label if label.startswith("#") else f"#{label}"
channel_idx = int(info.channel_hash, 16)
labels.setdefault(channel_idx, normalized_label)
return labels
def decode_payload(self, payload: dict[str, Any]) -> dict[str, Any] | None:
"""Decode packet payload `raw` hex and return decoded JSON if available."""
raw_hex = payload.get("raw")
if not isinstance(raw_hex, str):
return None
clean_hex = raw_hex.strip()
if not clean_hex:
return None
if not self._is_hex(clean_hex):
logger.debug("LetsMesh decoder skipped non-hex raw payload")
return None
cached = self._decode_cache.get(clean_hex)
if clean_hex in self._decode_cache:
return cached
decoded = self._decode_raw(clean_hex)
self._decode_cache[clean_hex] = decoded
if len(self._decode_cache) > self._decode_cache_maxsize:
# Drop oldest cached payload (insertion-order dict).
self._decode_cache.pop(next(iter(self._decode_cache)))
return decoded
def _decode_raw(self, raw_hex: str) -> dict[str, Any] | None:
"""Decode raw packet hex with decoder CLI (cached per packet hex)."""
if not self._enabled:
return None
if not self._is_command_available():
return None
command = [*self._command_tokens, "decode", raw_hex, "--json"]
if self._channel_keys:
command.append("--key")
command.extend(self._channel_keys)
try:
result = subprocess.run(
command,
check=False,
capture_output=True,
text=True,
timeout=self._timeout_seconds,
)
except subprocess.TimeoutExpired:
logger.debug(
"LetsMesh decoder timed out after %.2fs",
self._timeout_seconds,
)
return None
except OSError as exc:
logger.debug("LetsMesh decoder failed to execute: %s", exc)
return None
if result.returncode != 0:
stderr = result.stderr.strip() if result.stderr else ""
logger.debug(
"LetsMesh decoder exited with code %s%s",
result.returncode,
f": {stderr}" if stderr else "",
)
return None
output = result.stdout.strip()
if not output:
return None
try:
decoded = json.loads(output)
except json.JSONDecodeError:
logger.debug("LetsMesh decoder returned non-JSON output")
return None
return decoded if isinstance(decoded, dict) else None
def _is_command_available(self) -> bool:
"""Check decoder command availability once."""
if self._checked_command:
return self._command_available
self._checked_command = True
if not self._command_tokens:
self._command_available = False
else:
command = self._command_tokens[0]
if "/" in command:
self._command_available = shutil.which(command) is not None
else:
self._command_available = shutil.which(command) is not None
if not self._command_available and not self._warned_unavailable:
self._warned_unavailable = True
command_text = " ".join(self._command_tokens) or "<empty>"
logger.warning(
"LetsMesh decoder command not found (%s). "
"Messages will remain encrypted placeholders until decoder is installed.",
command_text,
)
return self._command_available

File diff suppressed because it is too large Load Diff

View File

@@ -5,41 +5,28 @@ from pathlib import Path
from typing import Any, Optional
import yaml
from pydantic import BaseModel, Field, field_validator
from pydantic import BaseModel, Field
from sqlalchemy import select
from meshcore_hub.common.database import DatabaseManager
from meshcore_hub.common.models import Member, MemberNode
from meshcore_hub.common.models import Member
logger = logging.getLogger(__name__)
class NodeData(BaseModel):
"""Schema for a node entry in the member import file."""
public_key: str = Field(..., min_length=64, max_length=64)
node_role: Optional[str] = Field(default=None, max_length=50)
@field_validator("public_key")
@classmethod
def validate_public_key(cls, v: str) -> str:
"""Validate and normalize public key."""
if len(v) != 64:
raise ValueError(f"public_key must be 64 characters, got {len(v)}")
if not all(c in "0123456789abcdefABCDEF" for c in v):
raise ValueError("public_key must be a valid hex string")
return v.lower()
class MemberData(BaseModel):
"""Schema for a member entry in the import file."""
"""Schema for a member entry in the import file.
Note: Nodes are associated with members via a 'member_id' tag on the node,
not through this schema.
"""
member_id: str = Field(..., min_length=1, max_length=100)
name: str = Field(..., min_length=1, max_length=255)
callsign: Optional[str] = Field(default=None, max_length=20)
role: Optional[str] = Field(default=None, max_length=100)
description: Optional[str] = Field(default=None)
contact: Optional[str] = Field(default=None, max_length=255)
nodes: Optional[list[NodeData]] = Field(default=None)
def load_members_file(file_path: str | Path) -> list[dict[str, Any]]:
@@ -48,20 +35,16 @@ def load_members_file(file_path: str | Path) -> list[dict[str, Any]]:
Supports two formats:
1. List of member objects:
- name: Member 1
- member_id: member1
name: Member 1
callsign: M1
nodes:
- public_key: abc123...
node_role: chat
2. Object with "members" key:
members:
- name: Member 1
- member_id: member1
name: Member 1
callsign: M1
nodes:
- public_key: abc123...
node_role: chat
Args:
file_path: Path to the members YAML file
@@ -96,6 +79,8 @@ def load_members_file(file_path: str | Path) -> list[dict[str, Any]]:
for i, member in enumerate(members_list):
if not isinstance(member, dict):
raise ValueError(f"Member at index {i} must be an object")
if "member_id" not in member:
raise ValueError(f"Member at index {i} must have a 'member_id' field")
if "name" not in member:
raise ValueError(f"Member at index {i} must have a 'name' field")
@@ -115,9 +100,11 @@ def import_members(
) -> dict[str, Any]:
"""Import members from a YAML file into the database.
Performs upsert operations based on name - existing members are updated,
new members are created. Nodes are synced (existing nodes removed and
replaced with new ones from the file).
Performs upsert operations based on member_id - existing members are updated,
new members are created.
Note: Nodes are associated with members via a 'member_id' tag on the node.
This import does not manage node associations.
Args:
file_path: Path to the members YAML file
@@ -149,14 +136,17 @@ def import_members(
with db.session_scope() as session:
for member_data in members_data:
try:
member_id = member_data["member_id"]
name = member_data["name"]
# Find existing member by name
query = select(Member).where(Member.name == name)
# Find existing member by member_id
query = select(Member).where(Member.member_id == member_id)
existing = session.execute(query).scalar_one_or_none()
if existing:
# Update existing member
if member_data.get("name") is not None:
existing.name = member_data["name"]
if member_data.get("callsign") is not None:
existing.callsign = member_data["callsign"]
if member_data.get("role") is not None:
@@ -166,25 +156,12 @@ def import_members(
if member_data.get("contact") is not None:
existing.contact = member_data["contact"]
# Sync nodes if provided
if member_data.get("nodes") is not None:
# Remove existing nodes
existing.nodes.clear()
# Add new nodes
for node_data in member_data["nodes"]:
node = MemberNode(
member_id=existing.id,
public_key=node_data["public_key"],
node_role=node_data.get("node_role"),
)
existing.nodes.append(node)
stats["updated"] += 1
logger.debug(f"Updated member: {name}")
logger.debug(f"Updated member: {member_id} ({name})")
else:
# Create new member
new_member = Member(
member_id=member_id,
name=name,
callsign=member_data.get("callsign"),
role=member_data.get("role"),
@@ -192,23 +169,12 @@ def import_members(
contact=member_data.get("contact"),
)
session.add(new_member)
session.flush() # Get the ID for the member
# Add nodes if provided
if member_data.get("nodes"):
for node_data in member_data["nodes"]:
node = MemberNode(
member_id=new_member.id,
public_key=node_data["public_key"],
node_role=node_data.get("node_role"),
)
session.add(node)
stats["created"] += 1
logger.debug(f"Created member: {name}")
logger.debug(f"Created member: {member_id} ({name})")
except Exception as e:
error_msg = f"Error processing member '{member_data.get('name', 'unknown')}': {e}"
error_msg = f"Error processing member '{member_data.get('member_id', 'unknown')}' ({member_data.get('name', 'unknown')}): {e}"
stats["errors"].append(error_msg)
logger.error(error_msg)

View File

@@ -21,6 +21,8 @@ from typing import Any, Callable, Optional, TYPE_CHECKING
from meshcore_hub.common.database import DatabaseManager
from meshcore_hub.common.health import HealthReporter
from meshcore_hub.common.mqtt import MQTTClient, MQTTConfig
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
from meshcore_hub.collector.letsmesh_normalizer import LetsMeshNormalizer
if TYPE_CHECKING:
from meshcore_hub.collector.webhook import WebhookDispatcher
@@ -32,9 +34,12 @@ logger = logging.getLogger(__name__)
EventHandler = Callable[[str, str, dict[str, Any], DatabaseManager], None]
class Subscriber:
class Subscriber(LetsMeshNormalizer):
"""MQTT Subscriber for collecting and storing MeshCore events."""
INGEST_MODE_NATIVE = "native"
INGEST_MODE_LETSMESH_UPLOAD = "letsmesh_upload"
def __init__(
self,
mqtt_client: MQTTClient,
@@ -45,6 +50,11 @@ class Subscriber:
cleanup_interval_hours: int = 24,
node_cleanup_enabled: bool = False,
node_cleanup_days: int = 90,
ingest_mode: str = INGEST_MODE_NATIVE,
letsmesh_decoder_enabled: bool = True,
letsmesh_decoder_command: str = "meshcore-decoder",
letsmesh_decoder_channel_keys: list[str] | None = None,
letsmesh_decoder_timeout_seconds: float = 2.0,
):
"""Initialize subscriber.
@@ -57,6 +67,11 @@ class Subscriber:
cleanup_interval_hours: Hours between cleanup runs
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
node_cleanup_days: Remove nodes not seen for this many days
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
letsmesh_decoder_command: Decoder CLI command
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
"""
self.mqtt = mqtt_client
self.db = db_manager
@@ -79,6 +94,18 @@ class Subscriber:
self._node_cleanup_days = node_cleanup_days
self._cleanup_thread: Optional[threading.Thread] = None
self._last_cleanup: Optional[datetime] = None
self._ingest_mode = ingest_mode.lower()
if self._ingest_mode not in {
self.INGEST_MODE_NATIVE,
self.INGEST_MODE_LETSMESH_UPLOAD,
}:
raise ValueError(f"Unsupported collector ingest mode: {ingest_mode}")
self._letsmesh_decoder = LetsMeshPacketDecoder(
enabled=letsmesh_decoder_enabled,
command=letsmesh_decoder_command,
channel_keys=letsmesh_decoder_channel_keys,
timeout_seconds=letsmesh_decoder_timeout_seconds,
)
@property
def is_healthy(self) -> bool:
@@ -125,14 +152,34 @@ class Subscriber:
pattern: Subscription pattern
payload: Message payload
"""
# Parse event from topic
parsed = self.mqtt.topic_builder.parse_event_topic(topic)
parsed: tuple[str, str, dict[str, Any]] | None
if self._ingest_mode == self.INGEST_MODE_LETSMESH_UPLOAD:
parsed = self._normalize_letsmesh_event(topic, payload)
else:
parsed_event = self.mqtt.topic_builder.parse_event_topic(topic)
parsed = (
(parsed_event[0], parsed_event[1], payload) if parsed_event else None
)
if not parsed:
logger.warning(f"Could not parse event topic: {topic}")
logger.warning(
"Could not parse topic for ingest mode %s: %s",
self._ingest_mode,
topic,
)
return
public_key, event_type = parsed
logger.debug(f"Received event: {event_type} from {public_key[:12]}...")
public_key, event_type, normalized_payload = parsed
logger.debug("Received event: %s from %s...", event_type, public_key[:12])
self._dispatch_event(public_key, event_type, normalized_payload)
def _dispatch_event(
self,
public_key: str,
event_type: str,
payload: dict[str, Any],
) -> None:
"""Route a normalized event to the appropriate handler."""
# Find and call handler
handler = self._handlers.get(event_type)
@@ -358,10 +405,20 @@ class Subscriber:
logger.error(f"Failed to connect to MQTT broker: {e}")
raise
# Subscribe to all event topics
event_topic = self.mqtt.topic_builder.all_events_topic()
self.mqtt.subscribe(event_topic, self._handle_mqtt_message)
logger.info(f"Subscribed to event topic: {event_topic}")
# Subscribe to topics based on ingest mode
if self._ingest_mode == self.INGEST_MODE_LETSMESH_UPLOAD:
letsmesh_topics = [
f"{self.mqtt.topic_builder.prefix}/+/packets",
f"{self.mqtt.topic_builder.prefix}/+/status",
f"{self.mqtt.topic_builder.prefix}/+/internal",
]
for letsmesh_topic in letsmesh_topics:
self.mqtt.subscribe(letsmesh_topic, self._handle_mqtt_message)
logger.info(f"Subscribed to LetsMesh upload topic: {letsmesh_topic}")
else:
event_topic = self.mqtt.topic_builder.all_events_topic()
self.mqtt.subscribe(event_topic, self._handle_mqtt_message)
logger.info(f"Subscribed to event topic: {event_topic}")
self._running = True
@@ -429,6 +486,9 @@ def create_subscriber(
mqtt_password: Optional[str] = None,
mqtt_prefix: str = "meshcore",
mqtt_tls: bool = False,
mqtt_transport: str = "tcp",
mqtt_ws_path: str = "/mqtt",
ingest_mode: str = "native",
database_url: str = "sqlite:///./meshcore.db",
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
cleanup_enabled: bool = False,
@@ -436,6 +496,10 @@ def create_subscriber(
cleanup_interval_hours: int = 24,
node_cleanup_enabled: bool = False,
node_cleanup_days: int = 90,
letsmesh_decoder_enabled: bool = True,
letsmesh_decoder_command: str = "meshcore-decoder",
letsmesh_decoder_channel_keys: list[str] | None = None,
letsmesh_decoder_timeout_seconds: float = 2.0,
) -> Subscriber:
"""Create a configured subscriber instance.
@@ -446,6 +510,9 @@ def create_subscriber(
mqtt_password: MQTT password
mqtt_prefix: MQTT topic prefix
mqtt_tls: Enable TLS/SSL for MQTT connection
mqtt_transport: MQTT transport protocol (tcp or websockets)
mqtt_ws_path: WebSocket path (used when transport=websockets)
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
database_url: Database connection URL
webhook_dispatcher: Optional webhook dispatcher for event forwarding
cleanup_enabled: Enable automatic event data cleanup
@@ -453,6 +520,10 @@ def create_subscriber(
cleanup_interval_hours: Hours between cleanup runs
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
node_cleanup_days: Remove nodes not seen for this many days
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
letsmesh_decoder_command: Decoder CLI command
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
Returns:
Configured Subscriber instance
@@ -467,6 +538,8 @@ def create_subscriber(
prefix=mqtt_prefix,
client_id=f"meshcore-collector-{unique_id}",
tls=mqtt_tls,
transport=mqtt_transport,
ws_path=mqtt_ws_path,
)
mqtt_client = MQTTClient(mqtt_config)
@@ -483,6 +556,11 @@ def create_subscriber(
cleanup_interval_hours=cleanup_interval_hours,
node_cleanup_enabled=node_cleanup_enabled,
node_cleanup_days=node_cleanup_days,
ingest_mode=ingest_mode,
letsmesh_decoder_enabled=letsmesh_decoder_enabled,
letsmesh_decoder_command=letsmesh_decoder_command,
letsmesh_decoder_channel_keys=letsmesh_decoder_channel_keys,
letsmesh_decoder_timeout_seconds=letsmesh_decoder_timeout_seconds,
)
# Register handlers
@@ -500,6 +578,9 @@ def run_collector(
mqtt_password: Optional[str] = None,
mqtt_prefix: str = "meshcore",
mqtt_tls: bool = False,
mqtt_transport: str = "tcp",
mqtt_ws_path: str = "/mqtt",
ingest_mode: str = "native",
database_url: str = "sqlite:///./meshcore.db",
webhook_dispatcher: Optional["WebhookDispatcher"] = None,
cleanup_enabled: bool = False,
@@ -507,6 +588,10 @@ def run_collector(
cleanup_interval_hours: int = 24,
node_cleanup_enabled: bool = False,
node_cleanup_days: int = 90,
letsmesh_decoder_enabled: bool = True,
letsmesh_decoder_command: str = "meshcore-decoder",
letsmesh_decoder_channel_keys: list[str] | None = None,
letsmesh_decoder_timeout_seconds: float = 2.0,
) -> None:
"""Run the collector (blocking).
@@ -517,6 +602,9 @@ def run_collector(
mqtt_password: MQTT password
mqtt_prefix: MQTT topic prefix
mqtt_tls: Enable TLS/SSL for MQTT connection
mqtt_transport: MQTT transport protocol (tcp or websockets)
mqtt_ws_path: WebSocket path (used when transport=websockets)
ingest_mode: Ingest mode ('native' or 'letsmesh_upload')
database_url: Database connection URL
webhook_dispatcher: Optional webhook dispatcher for event forwarding
cleanup_enabled: Enable automatic event data cleanup
@@ -524,6 +612,10 @@ def run_collector(
cleanup_interval_hours: Hours between cleanup runs
node_cleanup_enabled: Enable automatic cleanup of inactive nodes
node_cleanup_days: Remove nodes not seen for this many days
letsmesh_decoder_enabled: Enable external LetsMesh packet decoder
letsmesh_decoder_command: Decoder CLI command
letsmesh_decoder_channel_keys: Optional channel keys for decrypting group text
letsmesh_decoder_timeout_seconds: Decoder CLI timeout
"""
subscriber = create_subscriber(
mqtt_host=mqtt_host,
@@ -532,6 +624,9 @@ def run_collector(
mqtt_password=mqtt_password,
mqtt_prefix=mqtt_prefix,
mqtt_tls=mqtt_tls,
mqtt_transport=mqtt_transport,
mqtt_ws_path=mqtt_ws_path,
ingest_mode=ingest_mode,
database_url=database_url,
webhook_dispatcher=webhook_dispatcher,
cleanup_enabled=cleanup_enabled,
@@ -539,6 +634,10 @@ def run_collector(
cleanup_interval_hours=cleanup_interval_hours,
node_cleanup_enabled=node_cleanup_enabled,
node_cleanup_days=node_cleanup_days,
letsmesh_decoder_enabled=letsmesh_decoder_enabled,
letsmesh_decoder_command=letsmesh_decoder_command,
letsmesh_decoder_channel_keys=letsmesh_decoder_channel_keys,
letsmesh_decoder_timeout_seconds=letsmesh_decoder_timeout_seconds,
)
# Set up signal handlers

View File

@@ -7,7 +7,7 @@ from typing import Any
import yaml
from pydantic import BaseModel, Field, model_validator
from sqlalchemy import select
from sqlalchemy import delete, func, select
from meshcore_hub.common.database import DatabaseManager
from meshcore_hub.common.models import Node, NodeTag
@@ -19,7 +19,7 @@ class TagValue(BaseModel):
"""Schema for a tag value with type."""
value: str | None = None
type: str = Field(default="string", pattern=r"^(string|number|boolean|coordinate)$")
type: str = Field(default="string", pattern=r"^(string|number|boolean)$")
class NodeTags(BaseModel):
@@ -151,16 +151,19 @@ def import_tags(
file_path: str | Path,
db: DatabaseManager,
create_nodes: bool = True,
clear_existing: bool = False,
) -> dict[str, Any]:
"""Import tags from a YAML file into the database.
Performs upsert operations - existing tags are updated, new tags are created.
Optionally clears all existing tags before import.
Args:
file_path: Path to the tags YAML file
db: Database manager instance
create_nodes: If True, create nodes that don't exist. If False, skip tags
for non-existent nodes.
clear_existing: If True, delete all existing tags before importing.
Returns:
Dictionary with import statistics:
@@ -169,6 +172,7 @@ def import_tags(
- updated: Number of existing tags updated
- skipped: Number of tags skipped (node not found and create_nodes=False)
- nodes_created: Number of new nodes created
- deleted: Number of existing tags deleted (if clear_existing=True)
- errors: List of error messages
"""
stats: dict[str, Any] = {
@@ -177,6 +181,7 @@ def import_tags(
"updated": 0,
"skipped": 0,
"nodes_created": 0,
"deleted": 0,
"errors": [],
}
@@ -194,6 +199,15 @@ def import_tags(
now = datetime.now(timezone.utc)
with db.session_scope() as session:
# Clear all existing tags if requested
if clear_existing:
delete_count = (
session.execute(select(func.count()).select_from(NodeTag)).scalar() or 0
)
session.execute(delete(NodeTag))
stats["deleted"] = delete_count
logger.info(f"Deleted {delete_count} existing tags")
# Cache nodes by public_key to reduce queries
node_cache: dict[str, Node] = {}
@@ -232,24 +246,8 @@ def import_tags(
tag_value = tag_data.get("value")
tag_type = tag_data.get("type", "string")
# Find or create tag
tag_query = select(NodeTag).where(
NodeTag.node_id == node.id,
NodeTag.key == tag_key,
)
existing_tag = session.execute(tag_query).scalar_one_or_none()
if existing_tag:
# Update existing tag
existing_tag.value = tag_value
existing_tag.value_type = tag_type
stats["updated"] += 1
logger.debug(
f"Updated tag {tag_key}={tag_value} "
f"for {public_key[:12]}..."
)
else:
# Create new tag
if clear_existing:
# When clearing, always create new tags
new_tag = NodeTag(
node_id=node.id,
key=tag_key,
@@ -262,6 +260,39 @@ def import_tags(
f"Created tag {tag_key}={tag_value} "
f"for {public_key[:12]}..."
)
else:
# Find or create tag
tag_query = select(NodeTag).where(
NodeTag.node_id == node.id,
NodeTag.key == tag_key,
)
existing_tag = session.execute(
tag_query
).scalar_one_or_none()
if existing_tag:
# Update existing tag
existing_tag.value = tag_value
existing_tag.value_type = tag_type
stats["updated"] += 1
logger.debug(
f"Updated tag {tag_key}={tag_value} "
f"for {public_key[:12]}..."
)
else:
# Create new tag
new_tag = NodeTag(
node_id=node.id,
key=tag_key,
value=tag_value,
value_type=tag_type,
)
session.add(new_tag)
stats["created"] += 1
logger.debug(
f"Created tag {tag_key}={tag_value} "
f"for {public_key[:12]}..."
)
except Exception as e:
error_msg = f"Error processing tag {tag_key} for {public_key[:12]}...: {e}"

View File

@@ -1,6 +1,7 @@
"""Pydantic Settings for MeshCore Hub configuration."""
from enum import Enum
import re
from typing import Optional
from pydantic import Field, field_validator
@@ -24,6 +25,20 @@ class InterfaceMode(str, Enum):
SENDER = "SENDER"
class MQTTTransport(str, Enum):
"""MQTT transport type."""
TCP = "tcp"
WEBSOCKETS = "websockets"
class CollectorIngestMode(str, Enum):
"""Collector MQTT ingest mode."""
NATIVE = "native"
LETSMESH_UPLOAD = "letsmesh_upload"
class CommonSettings(BaseSettings):
"""Common settings shared by all components."""
@@ -55,6 +70,14 @@ class CommonSettings(BaseSettings):
mqtt_tls: bool = Field(
default=False, description="Enable TLS/SSL for MQTT connection"
)
mqtt_transport: MQTTTransport = Field(
default=MQTTTransport.TCP,
description="MQTT transport protocol (tcp or websockets)",
)
mqtt_ws_path: str = Field(
default="/mqtt",
description="WebSocket path for MQTT transport (used when MQTT_TRANSPORT=websockets)",
)
class InterfaceSettings(CommonSettings):
@@ -78,6 +101,17 @@ class InterfaceSettings(CommonSettings):
default=None, description="Device/node name (optional)"
)
# Contact cleanup settings
contact_cleanup_enabled: bool = Field(
default=True,
description="Enable automatic removal of stale contacts from companion node",
)
contact_cleanup_days: int = Field(
default=7,
description="Remove contacts not advertised for this many days",
ge=1,
)
class CollectorSettings(CommonSettings):
"""Settings for the Collector component."""
@@ -151,6 +185,42 @@ class CollectorSettings(CommonSettings):
description="Remove nodes not seen for this many days (last_seen)",
ge=1,
)
collector_ingest_mode: CollectorIngestMode = Field(
default=CollectorIngestMode.NATIVE,
description=(
"Collector MQTT ingest mode. "
"'native' expects <prefix>/<pubkey>/event/<event_name>. "
"'letsmesh_upload' expects LetsMesh observer uploads on "
"<prefix>/<pubkey>/(packets|status|internal)."
),
)
collector_letsmesh_decoder_enabled: bool = Field(
default=True,
description=(
"Enable external LetsMesh packet decoding via meshcore-decoder. "
"Only applies when COLLECTOR_INGEST_MODE=letsmesh_upload."
),
)
collector_letsmesh_decoder_command: str = Field(
default="meshcore-decoder",
description=(
"Command used to run LetsMesh packet decoder CLI "
"(for example: meshcore-decoder, /usr/local/bin/meshcore-decoder, "
"or 'npx meshcore-decoder')."
),
)
collector_letsmesh_decoder_keys: Optional[str] = Field(
default=None,
description=(
"Optional channel secret keys for LetsMesh message decryption. "
"Provide as comma/space separated hex values."
),
)
collector_letsmesh_decoder_timeout_seconds: float = Field(
default=2.0,
description="Timeout in seconds for each decoder invocation.",
ge=0.1,
)
@property
def collector_data_dir(self) -> str:
@@ -190,6 +260,17 @@ class CollectorSettings(CommonSettings):
return str(Path(self.effective_seed_home) / "members.yaml")
@property
def collector_letsmesh_decoder_keys_list(self) -> list[str]:
"""Parse configured LetsMesh decoder keys into a normalized list."""
if not self.collector_letsmesh_decoder_keys:
return []
return [
part.strip()
for part in re.split(r"[,\s]+", self.collector_letsmesh_decoder_keys)
if part.strip()
]
@field_validator("database_url")
@classmethod
def validate_database_url(cls, v: Optional[str]) -> Optional[str]:
@@ -242,6 +323,47 @@ class WebSettings(CommonSettings):
web_host: str = Field(default="0.0.0.0", description="Web server host")
web_port: int = Field(default=8080, description="Web server port")
# Timezone for date/time display (uses standard TZ environment variable)
tz: str = Field(default="UTC", description="Timezone for displaying dates/times")
# Theme (dark or light, default dark)
web_theme: str = Field(
default="dark",
description="Default theme for the web dashboard (dark or light)",
)
# Locale / language (default: English)
web_locale: str = Field(
default="en",
description="Locale/language for the web dashboard (e.g. 'en')",
)
web_datetime_locale: str = Field(
default="en-US",
description=(
"Locale used for date/time formatting in the web dashboard "
"(e.g. 'en-US', 'en-GB')."
),
)
# Auto-refresh interval for list pages
web_auto_refresh_seconds: int = Field(
default=30,
description="Auto-refresh interval in seconds for list pages (0 to disable)",
ge=0,
)
# Trusted proxy hosts for X-Forwarded-For header processing
web_trusted_proxy_hosts: str = Field(
default="*",
description="Comma-separated list of trusted proxy hosts or '*' for all",
)
# Admin interface (disabled by default for security)
web_admin_enabled: bool = Field(
default=False,
description="Enable admin interface at /a/ (requires OAuth2Proxy in front)",
)
# API connection
api_base_url: str = Field(
default="http://localhost:8000",
@@ -274,10 +396,80 @@ class WebSettings(CommonSettings):
network_contact_github: Optional[str] = Field(
default=None, description="GitHub repository URL"
)
network_contact_youtube: Optional[str] = Field(
default=None, description="YouTube channel URL"
)
network_welcome_text: Optional[str] = Field(
default=None, description="Welcome text for homepage"
)
# Feature flags (control which pages are visible in the web dashboard)
feature_dashboard: bool = Field(
default=True, description="Enable the /dashboard page"
)
feature_nodes: bool = Field(default=True, description="Enable the /nodes pages")
feature_advertisements: bool = Field(
default=True, description="Enable the /advertisements page"
)
feature_messages: bool = Field(
default=True, description="Enable the /messages page"
)
feature_map: bool = Field(
default=True, description="Enable the /map page and /map/data endpoint"
)
feature_members: bool = Field(default=True, description="Enable the /members page")
feature_pages: bool = Field(
default=True, description="Enable custom markdown pages"
)
# Content directory (contains pages/ and media/ subdirectories)
content_home: Optional[str] = Field(
default=None,
description="Directory containing custom content (pages/, media/) (default: ./content)",
)
@property
def features(self) -> dict[str, bool]:
"""Get feature flags as a dictionary.
Automatic dependencies:
- Dashboard requires at least one of nodes/advertisements/messages.
- Map requires nodes (map displays node locations).
"""
has_dashboard_content = (
self.feature_nodes or self.feature_advertisements or self.feature_messages
)
return {
"dashboard": self.feature_dashboard and has_dashboard_content,
"nodes": self.feature_nodes,
"advertisements": self.feature_advertisements,
"messages": self.feature_messages,
"map": self.feature_map and self.feature_nodes,
"members": self.feature_members,
"pages": self.feature_pages,
}
@property
def effective_content_home(self) -> str:
"""Get the effective content home directory."""
from pathlib import Path
return str(Path(self.content_home or "./content"))
@property
def effective_pages_home(self) -> str:
"""Get the effective pages directory (content_home/pages)."""
from pathlib import Path
return str(Path(self.effective_content_home) / "pages")
@property
def effective_media_home(self) -> str:
"""Get the effective media directory (content_home/media)."""
from pathlib import Path
return str(Path(self.effective_content_home) / "media")
@property
def web_data_dir(self) -> str:
"""Get the web data directory path."""

View File

@@ -98,6 +98,15 @@ class DatabaseManager:
echo: Enable SQL query logging
"""
self.database_url = database_url
# Ensure parent directory exists for SQLite databases
if database_url.startswith("sqlite:///"):
from pathlib import Path
# Extract path from sqlite:///path/to/db.sqlite
db_path = Path(database_url.replace("sqlite:///", ""))
db_path.parent.mkdir(parents=True, exist_ok=True)
self.engine = create_database_engine(database_url, echo=echo)
self.session_factory = create_session_factory(self.engine)

View File

@@ -49,7 +49,7 @@ def compute_advertisement_hash(
adv_type: Optional[str] = None,
flags: Optional[int] = None,
received_at: Optional[datetime] = None,
bucket_seconds: int = 30,
bucket_seconds: int = 120,
) -> str:
"""Compute a deterministic hash for an advertisement.
@@ -104,7 +104,7 @@ def compute_telemetry_hash(
node_public_key: str,
parsed_data: Optional[dict] = None,
received_at: Optional[datetime] = None,
bucket_seconds: int = 30,
bucket_seconds: int = 120,
) -> str:
"""Compute a deterministic hash for a telemetry record.

View File

@@ -0,0 +1,81 @@
"""Lightweight i18n support for MeshCore Hub.
Loads JSON translation files and provides a ``t()`` lookup function
that is shared between the Python (Jinja2) and JavaScript (SPA) sides.
The same ``en.json`` file is served as a static asset for the client and
read from disk for server-side template rendering.
"""
import json
import logging
from pathlib import Path
from typing import Any
logger = logging.getLogger(__name__)
_translations: dict[str, Any] = {}
_locale: str = "en"
# Directory where locale JSON files live (web/static/locales/)
LOCALES_DIR = Path(__file__).parent.parent / "web" / "static" / "locales"
def load_locale(locale: str = "en", locales_dir: Path | None = None) -> None:
"""Load a locale's translation file into memory.
Args:
locale: Language code (e.g. ``"en"``).
locales_dir: Override directory containing ``<locale>.json`` files.
"""
global _translations, _locale
directory = locales_dir or LOCALES_DIR
path = directory / f"{locale}.json"
if not path.exists():
logger.warning("Locale file not found: %s falling back to 'en'", path)
path = directory / "en.json"
if path.exists():
_translations = json.loads(path.read_text(encoding="utf-8"))
_locale = locale
logger.info("Loaded locale '%s' from %s", locale, path)
else:
logger.error("No locale files found in %s", directory)
def _resolve(key: str) -> Any:
"""Walk a dot-separated key through the nested translation dict."""
value: Any = _translations
for part in key.split("."):
if isinstance(value, dict):
value = value.get(part)
else:
return None
return value
def t(key: str, **kwargs: Any) -> str:
"""Translate a key with optional interpolation.
Supports ``{{var}}`` placeholders in translation strings.
Args:
key: Dot-separated translation key (e.g. ``"nav.home"``).
**kwargs: Interpolation values.
Returns:
Translated string, or the key itself as fallback.
"""
val = _resolve(key)
if not isinstance(val, str):
return key
# Interpolation: replace {{var}} placeholders
for k, v in kwargs.items():
val = val.replace("{{" + k + "}}", str(v))
return val
def get_locale() -> str:
"""Return the currently loaded locale code."""
return _locale

View File

@@ -9,7 +9,6 @@ from meshcore_hub.common.models.trace_path import TracePath
from meshcore_hub.common.models.telemetry import Telemetry
from meshcore_hub.common.models.event_log import EventLog
from meshcore_hub.common.models.member import Member
from meshcore_hub.common.models.member_node import MemberNode
from meshcore_hub.common.models.event_receiver import EventReceiver, add_event_receiver
__all__ = [
@@ -23,7 +22,6 @@ __all__ = [
"Telemetry",
"EventLog",
"Member",
"MemberNode",
"EventReceiver",
"add_event_receiver",
]

View File

@@ -1,36 +1,39 @@
"""Member model for network member information."""
from typing import TYPE_CHECKING, Optional
from typing import Optional
from sqlalchemy import String, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.orm import Mapped, mapped_column
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin
if TYPE_CHECKING:
from meshcore_hub.common.models.member_node import MemberNode
class Member(Base, UUIDMixin, TimestampMixin):
"""Member model for network member information.
Stores information about network members/operators.
Members can have multiple associated nodes (chat, repeater, etc.).
Nodes are associated with members via a 'member_id' tag on the node.
Attributes:
id: UUID primary key
member_id: Unique member identifier (e.g., 'walshie86')
name: Member's display name
callsign: Amateur radio callsign (optional)
role: Member's role in the network (optional)
description: Additional description (optional)
contact: Contact information (optional)
nodes: List of associated MemberNode records
created_at: Record creation timestamp
updated_at: Record update timestamp
"""
__tablename__ = "members"
member_id: Mapped[str] = mapped_column(
String(100),
nullable=False,
unique=True,
index=True,
)
name: Mapped[str] = mapped_column(
String(255),
nullable=False,
@@ -52,11 +55,5 @@ class Member(Base, UUIDMixin, TimestampMixin):
nullable=True,
)
# Relationship to member nodes
nodes: Mapped[list["MemberNode"]] = relationship(
back_populates="member",
cascade="all, delete-orphan",
)
def __repr__(self) -> str:
return f"<Member(id={self.id}, name={self.name}, callsign={self.callsign})>"
return f"<Member(id={self.id}, member_id={self.member_id}, name={self.name}, callsign={self.callsign})>"

View File

@@ -1,56 +0,0 @@
"""MemberNode model for associating nodes with members."""
from typing import TYPE_CHECKING, Optional
from sqlalchemy import ForeignKey, String, Index
from sqlalchemy.orm import Mapped, mapped_column, relationship
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin
if TYPE_CHECKING:
from meshcore_hub.common.models.member import Member
class MemberNode(Base, UUIDMixin, TimestampMixin):
"""Association model linking members to their nodes.
A member can have multiple nodes (e.g., chat node, repeater).
Each node is identified by its public_key and has a role.
Attributes:
id: UUID primary key
member_id: Foreign key to the member
public_key: Node's public key (64-char hex)
node_role: Role of the node (e.g., 'chat', 'repeater')
created_at: Record creation timestamp
updated_at: Record update timestamp
"""
__tablename__ = "member_nodes"
member_id: Mapped[str] = mapped_column(
String(36),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
public_key: Mapped[str] = mapped_column(
String(64),
nullable=False,
index=True,
)
node_role: Mapped[Optional[str]] = mapped_column(
String(50),
nullable=True,
)
# Relationship back to member
member: Mapped["Member"] = relationship(back_populates="nodes")
# Composite index for efficient lookups
__table_args__ = (
Index("ix_member_nodes_member_public_key", "member_id", "public_key"),
)
def __repr__(self) -> str:
return f"<MemberNode(member_id={self.member_id}, public_key={self.public_key[:8]}..., role={self.node_role})>"

View File

@@ -3,7 +3,7 @@
from datetime import datetime
from typing import TYPE_CHECKING, Optional
from sqlalchemy import DateTime, Index, Integer, String
from sqlalchemy import DateTime, Float, Index, Integer, String
from sqlalchemy.orm import Mapped, mapped_column, relationship
from meshcore_hub.common.models.base import Base, TimestampMixin, UUIDMixin, utc_now
@@ -23,6 +23,8 @@ class Node(Base, UUIDMixin, TimestampMixin):
flags: Capability/status flags bitmask
first_seen: Timestamp of first advertisement
last_seen: Timestamp of most recent activity
lat: GPS latitude coordinate (if available)
lon: GPS longitude coordinate (if available)
created_at: Record creation timestamp
updated_at: Record update timestamp
"""
@@ -52,10 +54,18 @@ class Node(Base, UUIDMixin, TimestampMixin):
default=utc_now,
nullable=False,
)
last_seen: Mapped[datetime] = mapped_column(
last_seen: Mapped[Optional[datetime]] = mapped_column(
DateTime(timezone=True),
default=utc_now,
nullable=False,
default=None,
nullable=True,
)
lat: Mapped[Optional[float]] = mapped_column(
Float,
nullable=True,
)
lon: Mapped[Optional[float]] = mapped_column(
Float,
nullable=True,
)
# Relationships

View File

@@ -21,7 +21,7 @@ class NodeTag(Base, UUIDMixin, TimestampMixin):
node_id: Foreign key to nodes table
key: Tag name/key
value: Tag value (stored as text, can be JSON for typed values)
value_type: Type hint (string, number, boolean, coordinate)
value_type: Type hint (string, number, boolean)
created_at: Record creation timestamp
updated_at: Record update timestamp
"""

View File

@@ -20,7 +20,7 @@ class TracePath(Base, UUIDMixin, TimestampMixin):
path_len: Path length
flags: Trace flags
auth: Authentication data
path_hashes: JSON array of node hash identifiers
path_hashes: JSON array of hex-encoded node hash identifiers (variable length)
snr_values: JSON array of SNR values per hop
hop_count: Total number of hops
received_at: When received by interface

View File

@@ -24,6 +24,8 @@ class MQTTConfig:
keepalive: int = 60
clean_session: bool = True
tls: bool = False
transport: str = "tcp"
ws_path: str = "/mqtt"
class TopicBuilder:
@@ -37,6 +39,10 @@ class TopicBuilder:
"""
self.prefix = prefix
def _prefix_parts(self) -> list[str]:
"""Split configured prefix into path segments."""
return [part for part in self.prefix.strip("/").split("/") if part]
def event_topic(self, public_key: str, event_name: str) -> str:
"""Build an event topic.
@@ -86,10 +92,16 @@ class TopicBuilder:
Returns:
Tuple of (public_key, event_name) or None if invalid
"""
parts = topic.split("/")
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "event":
public_key = parts[1]
event_name = "/".join(parts[3:])
parts = [part for part in topic.strip("/").split("/") if part]
prefix_parts = self._prefix_parts()
prefix_len = len(prefix_parts)
if (
len(parts) >= prefix_len + 3
and parts[:prefix_len] == prefix_parts
and parts[prefix_len + 1] == "event"
):
public_key = parts[prefix_len]
event_name = "/".join(parts[prefix_len + 2 :])
return (public_key, event_name)
return None
@@ -102,13 +114,39 @@ class TopicBuilder:
Returns:
Tuple of (public_key, command_name) or None if invalid
"""
parts = topic.split("/")
if len(parts) >= 4 and parts[0] == self.prefix and parts[2] == "command":
public_key = parts[1]
command_name = "/".join(parts[3:])
parts = [part for part in topic.strip("/").split("/") if part]
prefix_parts = self._prefix_parts()
prefix_len = len(prefix_parts)
if (
len(parts) >= prefix_len + 3
and parts[:prefix_len] == prefix_parts
and parts[prefix_len + 1] == "command"
):
public_key = parts[prefix_len]
command_name = "/".join(parts[prefix_len + 2 :])
return (public_key, command_name)
return None
def parse_letsmesh_upload_topic(self, topic: str) -> tuple[str, str] | None:
"""Parse a LetsMesh upload topic to extract public key and feed type.
LetsMesh upload topics are expected in this form:
<prefix>/<public_key>/(packets|status|internal)
"""
parts = [part for part in topic.strip("/").split("/") if part]
prefix_parts = self._prefix_parts()
prefix_len = len(prefix_parts)
if len(parts) != prefix_len + 2 or parts[:prefix_len] != prefix_parts:
return None
public_key = parts[prefix_len]
feed_type = parts[prefix_len + 1]
if feed_type not in {"packets", "status", "internal"}:
return None
return (public_key, feed_type)
MessageHandler = Callable[[str, str, dict[str, Any]], None]
@@ -124,14 +162,24 @@ class MQTTClient:
"""
self.config = config
self.topic_builder = TopicBuilder(config.prefix)
transport = config.transport.lower()
if transport not in {"tcp", "websockets"}:
raise ValueError(f"Unsupported MQTT transport: {config.transport}")
self._client = mqtt.Client(
callback_api_version=CallbackAPIVersion.VERSION2, # type: ignore[call-arg]
client_id=config.client_id,
clean_session=config.clean_session,
transport=transport,
)
self._connected = False
self._message_handlers: dict[str, list[MessageHandler]] = {}
# Set WebSocket path when using MQTT over WebSockets.
if transport == "websockets":
self._client.ws_set_options(path=config.ws_path)
logger.debug("MQTT WebSocket transport enabled (path=%s)", config.ws_path)
# Set up TLS if enabled
if config.tls:
self._client.tls_set()

View File

@@ -28,6 +28,14 @@ class AdvertisementEvent(BaseModel):
default=None,
description="Capability/status flags bitmask",
)
lat: Optional[float] = Field(
default=None,
description="Node latitude when location metadata is available",
)
lon: Optional[float] = Field(
default=None,
description="Node longitude when location metadata is available",
)
class ContactMessageEvent(BaseModel):
@@ -125,7 +133,7 @@ class TraceDataEvent(BaseModel):
)
path_hashes: Optional[list[str]] = Field(
default=None,
description="Array of 2-character node hash identifiers",
description="Array of hex-encoded node hash identifiers (variable length, e.g. '4a' for single-byte or 'b3fa' for multibyte)",
)
snr_values: Optional[list[float]] = Field(
default=None,

View File

@@ -6,46 +6,19 @@ from typing import Optional
from pydantic import BaseModel, Field
class MemberNodeCreate(BaseModel):
"""Schema for creating a member node association."""
public_key: str = Field(
...,
min_length=64,
max_length=64,
pattern=r"^[0-9a-fA-F]{64}$",
description="Node's public key (64-char hex)",
)
node_role: Optional[str] = Field(
default=None,
max_length=50,
description="Role of the node (e.g., 'chat', 'repeater')",
)
class MemberNodeRead(BaseModel):
"""Schema for reading a member node association."""
public_key: str = Field(..., description="Node's public key")
node_role: Optional[str] = Field(default=None, description="Role of the node")
created_at: datetime = Field(..., description="Creation timestamp")
updated_at: datetime = Field(..., description="Last update timestamp")
# Node details (populated from nodes table if available)
node_name: Optional[str] = Field(default=None, description="Node's name from DB")
node_adv_type: Optional[str] = Field(
default=None, description="Node's advertisement type"
)
friendly_name: Optional[str] = Field(
default=None, description="Node's friendly name tag"
)
class Config:
from_attributes = True
class MemberCreate(BaseModel):
"""Schema for creating a member."""
"""Schema for creating a member.
Note: Nodes are associated with members via a 'member_id' tag on the node,
not through this schema.
"""
member_id: str = Field(
...,
min_length=1,
max_length=100,
description="Unique member identifier (e.g., 'walshie86')",
)
name: str = Field(
...,
min_length=1,
@@ -71,15 +44,21 @@ class MemberCreate(BaseModel):
max_length=255,
description="Contact information",
)
nodes: Optional[list[MemberNodeCreate]] = Field(
default=None,
description="List of associated nodes",
)
class MemberUpdate(BaseModel):
"""Schema for updating a member."""
"""Schema for updating a member.
Note: Nodes are associated with members via a 'member_id' tag on the node,
not through this schema.
"""
member_id: Optional[str] = Field(
default=None,
min_length=1,
max_length=100,
description="Unique member identifier (e.g., 'walshie86')",
)
name: Optional[str] = Field(
default=None,
min_length=1,
@@ -105,22 +84,22 @@ class MemberUpdate(BaseModel):
max_length=255,
description="Contact information",
)
nodes: Optional[list[MemberNodeCreate]] = Field(
default=None,
description="List of associated nodes (replaces existing nodes)",
)
class MemberRead(BaseModel):
"""Schema for reading a member."""
"""Schema for reading a member.
Note: Nodes are associated with members via a 'member_id' tag on the node.
To find nodes for a member, query nodes with a 'member_id' tag matching this member.
"""
id: str = Field(..., description="Member UUID")
member_id: str = Field(..., description="Unique member identifier")
name: str = Field(..., description="Member's display name")
callsign: Optional[str] = Field(default=None, description="Amateur radio callsign")
role: Optional[str] = Field(default=None, description="Member's role")
description: Optional[str] = Field(default=None, description="Description")
contact: Optional[str] = Field(default=None, description="Contact information")
nodes: list[MemberNodeRead] = Field(default=[], description="Associated nodes")
created_at: datetime = Field(..., description="Creation timestamp")
updated_at: datetime = Field(..., description="Last update timestamp")

View File

@@ -12,9 +12,7 @@ class ReceiverInfo(BaseModel):
node_id: str = Field(..., description="Receiver node UUID")
public_key: str = Field(..., description="Receiver node public key")
name: Optional[str] = Field(default=None, description="Receiver node name")
friendly_name: Optional[str] = Field(
default=None, description="Receiver friendly name from tags"
)
tag_name: Optional[str] = Field(default=None, description="Receiver name from tags")
snr: Optional[float] = Field(
default=None, description="Signal-to-noise ratio at this receiver"
)
@@ -31,8 +29,8 @@ class MessageRead(BaseModel):
default=None, description="Receiving interface node public key"
)
receiver_name: Optional[str] = Field(default=None, description="Receiver node name")
receiver_friendly_name: Optional[str] = Field(
default=None, description="Receiver friendly name from tags"
receiver_tag_name: Optional[str] = Field(
default=None, description="Receiver name from tags"
)
message_type: str = Field(..., description="Message type (contact, channel)")
pubkey_prefix: Optional[str] = Field(
@@ -41,8 +39,8 @@ class MessageRead(BaseModel):
sender_name: Optional[str] = Field(
default=None, description="Sender's advertised node name"
)
sender_friendly_name: Optional[str] = Field(
default=None, description="Sender's friendly name from node tags"
sender_tag_name: Optional[str] = Field(
default=None, description="Sender's name from node tags"
)
channel_idx: Optional[int] = Field(default=None, description="Channel index")
text: str = Field(..., description="Message content")
@@ -110,16 +108,19 @@ class AdvertisementRead(BaseModel):
default=None, description="Receiving interface node public key"
)
receiver_name: Optional[str] = Field(default=None, description="Receiver node name")
receiver_friendly_name: Optional[str] = Field(
default=None, description="Receiver friendly name from tags"
receiver_tag_name: Optional[str] = Field(
default=None, description="Receiver name from tags"
)
public_key: str = Field(..., description="Advertised public key")
name: Optional[str] = Field(default=None, description="Advertised name")
node_name: Optional[str] = Field(
default=None, description="Node name from nodes table"
)
node_friendly_name: Optional[str] = Field(
default=None, description="Node friendly name from tags"
node_tag_name: Optional[str] = Field(
default=None, description="Node name from tags"
)
node_tag_description: Optional[str] = Field(
default=None, description="Node description from tags"
)
adv_type: Optional[str] = Field(default=None, description="Node type")
flags: Optional[int] = Field(default=None, description="Capability flags")
@@ -154,7 +155,8 @@ class TracePathRead(BaseModel):
flags: Optional[int] = Field(default=None, description="Trace flags")
auth: Optional[int] = Field(default=None, description="Auth data")
path_hashes: Optional[list[str]] = Field(
default=None, description="Node hash identifiers"
default=None,
description="Hex-encoded node hash identifiers (variable length, e.g. '4a' for single-byte or 'b3fa' for multibyte)",
)
snr_values: Optional[list[float]] = Field(
default=None, description="SNR values per hop"
@@ -215,7 +217,7 @@ class RecentAdvertisement(BaseModel):
public_key: str = Field(..., description="Node public key")
name: Optional[str] = Field(default=None, description="Node name")
friendly_name: Optional[str] = Field(default=None, description="Friendly name tag")
tag_name: Optional[str] = Field(default=None, description="Name tag")
adv_type: Optional[str] = Field(default=None, description="Node type")
received_at: datetime = Field(..., description="When received")
@@ -225,8 +227,8 @@ class ChannelMessage(BaseModel):
text: str = Field(..., description="Message text")
sender_name: Optional[str] = Field(default=None, description="Sender name")
sender_friendly_name: Optional[str] = Field(
default=None, description="Sender friendly name"
sender_tag_name: Optional[str] = Field(
default=None, description="Sender name from tags"
)
pubkey_prefix: Optional[str] = Field(
default=None, description="Sender public key prefix"
@@ -241,10 +243,14 @@ class DashboardStats(BaseModel):
active_nodes: int = Field(..., description="Nodes active in last 24h")
total_messages: int = Field(..., description="Total number of messages")
messages_today: int = Field(..., description="Messages received today")
messages_7d: int = Field(default=0, description="Messages received in last 7 days")
total_advertisements: int = Field(..., description="Total advertisements")
advertisements_24h: int = Field(
default=0, description="Advertisements received in last 24h"
)
advertisements_7d: int = Field(
default=0, description="Advertisements received in last 7 days"
)
recent_advertisements: list[RecentAdvertisement] = Field(
default_factory=list, description="Last 10 advertisements"
)

View File

@@ -19,7 +19,7 @@ class NodeTagCreate(BaseModel):
default=None,
description="Tag value",
)
value_type: Literal["string", "number", "boolean", "coordinate"] = Field(
value_type: Literal["string", "number", "boolean"] = Field(
default="string",
description="Value type hint",
)
@@ -32,12 +32,33 @@ class NodeTagUpdate(BaseModel):
default=None,
description="Tag value",
)
value_type: Optional[Literal["string", "number", "boolean", "coordinate"]] = Field(
value_type: Optional[Literal["string", "number", "boolean"]] = Field(
default=None,
description="Value type hint",
)
class NodeTagMove(BaseModel):
"""Schema for moving a node tag to a different node."""
new_public_key: str = Field(
...,
min_length=64,
max_length=64,
description="Public key of the destination node",
)
class NodeTagsCopyResult(BaseModel):
"""Schema for bulk copy tags result."""
copied: int = Field(..., description="Number of tags copied")
skipped: int = Field(..., description="Number of tags skipped (already exist)")
skipped_keys: list[str] = Field(
default_factory=list, description="Keys of skipped tags"
)
class NodeTagRead(BaseModel):
"""Schema for reading a node tag."""
@@ -59,7 +80,11 @@ class NodeRead(BaseModel):
adv_type: Optional[str] = Field(default=None, description="Advertisement type")
flags: Optional[int] = Field(default=None, description="Capability flags")
first_seen: datetime = Field(..., description="First advertisement timestamp")
last_seen: datetime = Field(..., description="Last activity timestamp")
last_seen: Optional[datetime] = Field(
default=None, description="Last activity timestamp"
)
lat: Optional[float] = Field(default=None, description="GPS latitude coordinate")
lon: Optional[float] = Field(default=None, description="GPS longitude coordinate")
created_at: datetime = Field(..., description="Record creation timestamp")
updated_at: datetime = Field(..., description="Record update timestamp")
tags: list[NodeTagRead] = Field(default_factory=list, description="Node tags")
@@ -82,7 +107,7 @@ class NodeFilters(BaseModel):
search: Optional[str] = Field(
default=None,
description="Search in name or public key",
description="Search in name tag, node name, or public key",
)
adv_type: Optional[str] = Field(
default=None,

View File

@@ -100,6 +100,19 @@ def interface() -> None:
envvar="MQTT_TLS",
help="Enable TLS/SSL for MQTT connection",
)
@click.option(
"--contact-cleanup/--no-contact-cleanup",
default=True,
envvar="CONTACT_CLEANUP_ENABLED",
help="Enable/disable automatic removal of stale contacts (RECEIVER mode only)",
)
@click.option(
"--contact-cleanup-days",
type=int,
default=7,
envvar="CONTACT_CLEANUP_DAYS",
help="Remove contacts not advertised for this many days (RECEIVER mode only)",
)
@click.option(
"--log-level",
type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]),
@@ -120,6 +133,8 @@ def run(
mqtt_password: str | None,
prefix: str,
mqtt_tls: bool,
contact_cleanup: bool,
contact_cleanup_days: int,
log_level: str,
) -> None:
"""Run the interface component.
@@ -162,6 +177,8 @@ def run(
mqtt_password=mqtt_password,
mqtt_prefix=prefix,
mqtt_tls=mqtt_tls,
contact_cleanup_enabled=contact_cleanup,
contact_cleanup_days=contact_cleanup_days,
)
elif mode_upper == "SENDER":
from meshcore_hub.interface.sender import run_sender
@@ -262,6 +279,19 @@ def run(
envvar="MQTT_TLS",
help="Enable TLS/SSL for MQTT connection",
)
@click.option(
"--contact-cleanup/--no-contact-cleanup",
default=True,
envvar="CONTACT_CLEANUP_ENABLED",
help="Enable/disable automatic removal of stale contacts",
)
@click.option(
"--contact-cleanup-days",
type=int,
default=7,
envvar="CONTACT_CLEANUP_DAYS",
help="Remove contacts not advertised for this many days",
)
def receiver(
port: str,
baud: int,
@@ -274,6 +304,8 @@ def receiver(
mqtt_password: str | None,
prefix: str,
mqtt_tls: bool,
contact_cleanup: bool,
contact_cleanup_days: int,
) -> None:
"""Run interface in RECEIVER mode.
@@ -293,12 +325,15 @@ def receiver(
baud=baud,
mock=mock,
node_address=node_address,
device_name=device_name,
mqtt_host=mqtt_host,
mqtt_port=mqtt_port,
mqtt_username=mqtt_username,
mqtt_password=mqtt_password,
mqtt_prefix=prefix,
mqtt_tls=mqtt_tls,
contact_cleanup_enabled=contact_cleanup,
contact_cleanup_days=contact_cleanup_days,
)

View File

@@ -193,11 +193,50 @@ class BaseMeshCoreDevice(ABC):
Triggers a CONTACTS event with all stored contacts from the device.
Note: This should only be called before the event loop is running.
Returns:
True if request was sent successfully
"""
pass
@abstractmethod
def schedule_get_contacts(self) -> bool:
"""Schedule a get_contacts request on the event loop.
This is safe to call from event handlers while the event loop is running.
Returns:
True if request was scheduled successfully
"""
pass
@abstractmethod
def remove_contact(self, public_key: str) -> bool:
"""Remove a contact from the device's contact database.
Args:
public_key: The 64-character hex public key of the contact to remove
Returns:
True if contact was removed successfully
"""
pass
@abstractmethod
def schedule_remove_contact(self, public_key: str) -> bool:
"""Schedule a remove_contact request on the event loop.
This is safe to call from event handlers while the event loop is running.
Args:
public_key: The 64-character hex public key of the contact to remove
Returns:
True if request was scheduled successfully
"""
pass
@abstractmethod
def run(self) -> None:
"""Run the device event loop (blocking)."""
@@ -567,7 +606,12 @@ class MeshCoreDevice(BaseMeshCoreDevice):
return False
def get_contacts(self) -> bool:
"""Fetch contacts from device contact database."""
"""Fetch contacts from device contact database.
Note: This method should only be called before the event loop is running
(e.g., during initialization). For calling during event processing,
use schedule_get_contacts() instead.
"""
if not self._connected or not self._mc:
logger.error("Cannot get contacts: not connected")
return False
@@ -584,6 +628,79 @@ class MeshCoreDevice(BaseMeshCoreDevice):
logger.error(f"Failed to get contacts: {e}")
return False
def schedule_get_contacts(self) -> bool:
"""Schedule a get_contacts request on the event loop.
This is safe to call from event handlers while the event loop is running.
The request is scheduled as a task on the event loop.
Returns:
True if request was scheduled, False if device not connected
"""
if not self._connected or not self._mc:
logger.error("Cannot get contacts: not connected")
return False
try:
async def _get_contacts() -> None:
await self._mc.commands.get_contacts()
asyncio.run_coroutine_threadsafe(_get_contacts(), self._loop)
logger.info("Scheduled contact sync request")
return True
except Exception as e:
logger.error(f"Failed to schedule get contacts: {e}")
return False
def remove_contact(self, public_key: str) -> bool:
"""Remove a contact from the device's contact database.
Note: This method should only be called before the event loop is running
(e.g., during initialization). For calling during event processing,
use schedule_remove_contact() instead.
"""
if not self._connected or not self._mc:
logger.error("Cannot remove contact: not connected")
return False
try:
async def _remove_contact() -> None:
await self._mc.commands.remove_contact(public_key)
self._loop.run_until_complete(_remove_contact())
logger.info(f"Removed contact {public_key[:12]}...")
return True
except Exception as e:
logger.error(f"Failed to remove contact: {e}")
return False
def schedule_remove_contact(self, public_key: str) -> bool:
"""Schedule a remove_contact request on the event loop.
This is safe to call from event handlers while the event loop is running.
The request is scheduled as a task on the event loop.
Returns:
True if request was scheduled, False if device not connected
"""
if not self._connected or not self._mc:
logger.error("Cannot remove contact: not connected")
return False
try:
async def _remove_contact() -> None:
await self._mc.commands.remove_contact(public_key)
asyncio.run_coroutine_threadsafe(_remove_contact(), self._loop)
logger.debug(f"Scheduled removal of contact {public_key[:12]}...")
return True
except Exception as e:
logger.error(f"Failed to schedule remove contact: {e}")
return False
def run(self) -> None:
"""Run the device event loop."""
self._running = True

View File

@@ -292,7 +292,10 @@ class MockMeshCoreDevice(BaseMeshCoreDevice):
return True
def get_contacts(self) -> bool:
"""Fetch contacts from mock device contact database."""
"""Fetch contacts from mock device contact database.
Note: This should only be called before the event loop is running.
"""
if not self._connected:
logger.error("Cannot get contacts: not connected")
return False
@@ -318,6 +321,38 @@ class MockMeshCoreDevice(BaseMeshCoreDevice):
threading.Thread(target=send_contacts, daemon=True).start()
return True
def schedule_get_contacts(self) -> bool:
"""Schedule a get_contacts request.
For the mock device, this is the same as get_contacts() since we
don't have a real async event loop. The contacts are sent via a thread.
"""
return self.get_contacts()
def remove_contact(self, public_key: str) -> bool:
"""Remove a contact from the mock device's contact database."""
if not self._connected:
logger.error("Cannot remove contact: not connected")
return False
# Find and remove the contact from mock_config.nodes
for i, node in enumerate(self.mock_config.nodes):
if node.public_key == public_key:
del self.mock_config.nodes[i]
logger.info(f"Mock: Removed contact {public_key[:12]}...")
return True
logger.warning(f"Mock: Contact {public_key[:12]}... not found")
return True # Return True even if not found (idempotent)
def schedule_remove_contact(self, public_key: str) -> bool:
"""Schedule a remove_contact request.
For the mock device, this is the same as remove_contact() since we
don't have a real async event loop.
"""
return self.remove_contact(public_key)
def run(self) -> None:
"""Run the mock device event loop."""
self._running = True

View File

@@ -20,6 +20,9 @@ from meshcore_hub.interface.device import (
create_device,
)
# Default contact cleanup settings
DEFAULT_CONTACT_CLEANUP_DAYS = 7
logger = logging.getLogger(__name__)
@@ -34,6 +37,8 @@ class Receiver:
device: BaseMeshCoreDevice,
mqtt_client: MQTTClient,
device_name: Optional[str] = None,
contact_cleanup_enabled: bool = True,
contact_cleanup_days: int = DEFAULT_CONTACT_CLEANUP_DAYS,
):
"""Initialize receiver.
@@ -41,10 +46,14 @@ class Receiver:
device: MeshCore device instance
mqtt_client: MQTT client instance
device_name: Optional device/node name to set on startup
contact_cleanup_enabled: Whether to remove stale contacts from device
contact_cleanup_days: Remove contacts not advertised for this many days
"""
self.device = device
self.mqtt = mqtt_client
self.device_name = device_name
self.contact_cleanup_enabled = contact_cleanup_enabled
self.contact_cleanup_days = contact_cleanup_days
self._running = False
self._shutdown_event = threading.Event()
self._device_connected = False
@@ -144,14 +153,31 @@ class Receiver:
logger.debug(f"Published {event_name} event to MQTT")
# Trigger contact sync on advertisements
if event_type == EventType.ADVERTISEMENT:
self._sync_contacts()
except Exception as e:
logger.error(f"Failed to publish event to MQTT: {e}")
def _sync_contacts(self) -> None:
"""Request contact sync from device.
Called when advertisements are received to ensure contact database
stays current with all nodes on the mesh.
"""
logger.info("Advertisement received, triggering contact sync")
success = self.device.schedule_get_contacts()
if not success:
logger.warning("Contact sync request failed")
def _publish_contacts(self, payload: dict[str, Any]) -> None:
"""Publish each contact as a separate MQTT message.
The device returns contacts as a dict keyed by public_key.
We split this into individual 'contact' events for cleaner processing.
Stale contacts (not advertised for > contact_cleanup_days) are removed
from the device and not published.
Args:
payload: Dict of contacts keyed by public_key
@@ -173,22 +199,54 @@ class Receiver:
return
device_key = self.device.public_key # Capture for type narrowing
count = 0
current_time = int(time.time())
stale_threshold = current_time - (self.contact_cleanup_days * 24 * 60 * 60)
published_count = 0
removed_count = 0
for contact in contacts:
if not isinstance(contact, dict):
continue
public_key = contact.get("public_key")
if not public_key:
continue
# Check if contact is stale based on last_advert timestamp
# Only check if cleanup is enabled and last_advert exists
if self.contact_cleanup_enabled:
last_advert = contact.get("last_advert")
if last_advert is not None and last_advert > 0:
if last_advert < stale_threshold:
# Contact is stale - remove from device
adv_name = contact.get("adv_name", contact.get("name", ""))
logger.info(
f"Removing stale contact {public_key[:12]}... "
f"({adv_name}) - last advertised "
f"{(current_time - last_advert) // 86400} days ago"
)
self.device.schedule_remove_contact(public_key)
removed_count += 1
continue # Don't publish stale contacts
try:
self.mqtt.publish_event(
device_key,
"contact", # Use singular 'contact' for individual events
contact,
)
count += 1
published_count += 1
except Exception as e:
logger.error(f"Failed to publish contact event: {e}")
logger.info(f"Published {count} contact events to MQTT")
if removed_count > 0:
logger.info(
f"Contact sync: published {published_count}, "
f"removed {removed_count} stale contacts"
)
else:
logger.info(f"Published {published_count} contact events to MQTT")
def start(self) -> None:
"""Start the receiver."""
@@ -291,6 +349,8 @@ def create_receiver(
mqtt_password: Optional[str] = None,
mqtt_prefix: str = "meshcore",
mqtt_tls: bool = False,
contact_cleanup_enabled: bool = True,
contact_cleanup_days: int = DEFAULT_CONTACT_CLEANUP_DAYS,
) -> Receiver:
"""Create a configured receiver instance.
@@ -306,6 +366,8 @@ def create_receiver(
mqtt_password: MQTT password
mqtt_prefix: MQTT topic prefix
mqtt_tls: Enable TLS/SSL for MQTT connection
contact_cleanup_enabled: Whether to remove stale contacts from device
contact_cleanup_days: Remove contacts not advertised for this many days
Returns:
Configured Receiver instance
@@ -330,7 +392,13 @@ def create_receiver(
)
mqtt_client = MQTTClient(mqtt_config)
return Receiver(device, mqtt_client, device_name=device_name)
return Receiver(
device,
mqtt_client,
device_name=device_name,
contact_cleanup_enabled=contact_cleanup_enabled,
contact_cleanup_days=contact_cleanup_days,
)
def run_receiver(
@@ -345,6 +413,8 @@ def run_receiver(
mqtt_password: Optional[str] = None,
mqtt_prefix: str = "meshcore",
mqtt_tls: bool = False,
contact_cleanup_enabled: bool = True,
contact_cleanup_days: int = DEFAULT_CONTACT_CLEANUP_DAYS,
) -> None:
"""Run the receiver (blocking).
@@ -362,6 +432,8 @@ def run_receiver(
mqtt_password: MQTT password
mqtt_prefix: MQTT topic prefix
mqtt_tls: Enable TLS/SSL for MQTT connection
contact_cleanup_enabled: Whether to remove stale contacts from device
contact_cleanup_days: Remove contacts not advertised for this many days
"""
receiver = create_receiver(
port=port,
@@ -375,6 +447,8 @@ def run_receiver(
mqtt_password=mqtt_password,
mqtt_prefix=mqtt_prefix,
mqtt_tls=mqtt_tls,
contact_cleanup_enabled=contact_cleanup_enabled,
contact_cleanup_days=contact_cleanup_days,
)
# Set up signal handlers

View File

@@ -1,17 +1,28 @@
"""FastAPI application for MeshCore Hub Web Dashboard."""
"""FastAPI application for MeshCore Hub Web Dashboard (SPA)."""
import json
import logging
import os
import re
from contextlib import asynccontextmanager
from datetime import datetime
from pathlib import Path
from typing import AsyncGenerator
from typing import Any, AsyncGenerator
from zoneinfo import ZoneInfo
import httpx
from fastapi import FastAPI, Request
from fastapi import FastAPI, Request, Response
from fastapi.responses import HTMLResponse, JSONResponse, PlainTextResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
from meshcore_hub import __version__
from meshcore_hub.collector.letsmesh_decoder import LetsMeshPacketDecoder
from meshcore_hub.common.i18n import load_locale, t
from meshcore_hub.common.schemas import RadioConfig
from meshcore_hub.web.middleware import CacheControlMiddleware
from meshcore_hub.web.pages import PageLoader
logger = logging.getLogger(__name__)
@@ -21,6 +32,60 @@ TEMPLATES_DIR = PACKAGE_DIR / "templates"
STATIC_DIR = PACKAGE_DIR / "static"
def _parse_decoder_key_entries(raw: str | None) -> list[str]:
"""Parse COLLECTOR_LETSMESH_DECODER_KEYS into key entries."""
if not raw:
return []
return [part.strip() for part in re.split(r"[,\s]+", raw) if part.strip()]
def _build_channel_labels() -> dict[str, str]:
"""Build UI channel labels from built-in + configured decoder keys."""
raw_keys = os.getenv("COLLECTOR_LETSMESH_DECODER_KEYS")
decoder = LetsMeshPacketDecoder(
enabled=False,
channel_keys=_parse_decoder_key_entries(raw_keys),
)
labels = decoder.channel_labels_by_index()
return {str(idx): label for idx, label in sorted(labels.items())}
def _resolve_logo(media_home: Path) -> tuple[str, bool, Path | None]:
"""Resolve logo URL and whether light-mode inversion should be applied.
Returns:
tuple of (logo_url, invert_in_light_mode, resolved_path)
"""
custom_logo_candidates = (
("logo-invert.svg", "/media/images/logo-invert.svg", True),
("logo.svg", "/media/images/logo.svg", False),
)
for filename, url, invert_in_light_mode in custom_logo_candidates:
path = media_home / "images" / filename
if path.exists():
cache_buster = int(path.stat().st_mtime)
return f"{url}?v={cache_buster}", invert_in_light_mode, path
# Default packaged logo is monochrome and needs darkening in light mode.
return "/static/img/logo.svg", True, None
def _is_authenticated_proxy_request(request: Request) -> bool:
"""Check whether request is authenticated by an upstream auth proxy.
Supported patterns:
- OAuth2/OIDC proxy headers: X-Forwarded-User, X-Auth-Request-User
- Forwarded Basic auth header: Authorization: Basic ...
"""
if request.headers.get("x-forwarded-user"):
return True
if request.headers.get("x-auth-request-user"):
return True
auth_header = request.headers.get("authorization", "")
return auth_header.lower().startswith("basic ")
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
"""Application lifespan handler."""
@@ -47,9 +112,85 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
logger.info("Web dashboard stopped")
def _build_config_json(app: FastAPI, request: Request) -> str:
"""Build the JSON config object to embed in the SPA shell.
Args:
app: The FastAPI application instance.
request: The current HTTP request.
Returns:
JSON string with app configuration.
"""
# Parse radio config
radio_config = RadioConfig.from_config_string(app.state.network_radio_config)
radio_config_dict = None
if radio_config:
radio_config_dict = {
"profile": radio_config.profile,
"frequency": radio_config.frequency,
"bandwidth": radio_config.bandwidth,
"spreading_factor": radio_config.spreading_factor,
"coding_rate": radio_config.coding_rate,
"tx_power": radio_config.tx_power,
}
# Get feature flags
features = app.state.features
# Get custom pages for navigation (empty when pages feature is disabled)
page_loader = app.state.page_loader
custom_pages = (
[
{
"slug": p.slug,
"title": p.title,
"url": p.url,
"menu_order": p.menu_order,
}
for p in page_loader.get_menu_pages()
]
if features.get("pages", True)
else []
)
config = {
"network_name": app.state.network_name,
"network_city": app.state.network_city,
"network_country": app.state.network_country,
"network_radio_config": radio_config_dict,
"network_contact_email": app.state.network_contact_email,
"network_contact_discord": app.state.network_contact_discord,
"network_contact_github": app.state.network_contact_github,
"network_contact_youtube": app.state.network_contact_youtube,
"network_welcome_text": app.state.network_welcome_text,
"admin_enabled": app.state.admin_enabled,
"features": features,
"custom_pages": custom_pages,
"logo_url": app.state.logo_url,
"version": __version__,
"timezone": app.state.timezone_abbr,
"timezone_iana": app.state.timezone,
"is_authenticated": _is_authenticated_proxy_request(request),
"default_theme": app.state.web_theme,
"locale": app.state.web_locale,
"datetime_locale": app.state.web_datetime_locale,
"auto_refresh_seconds": app.state.auto_refresh_seconds,
"channel_labels": app.state.channel_labels,
"logo_invert_light": app.state.logo_invert_light,
}
# Escape "</script>" sequences to prevent XSS breakout from the
# <script> block where this JSON is embedded via |safe in the
# Jinja2 template. "<\/" is valid JSON per the spec and parsed
# correctly by JavaScript's JSON.parse().
return json.dumps(config).replace("</", "<\\/")
def create_app(
api_url: str | None = None,
api_key: str | None = None,
admin_enabled: bool | None = None,
network_name: str | None = None,
network_city: str | None = None,
network_country: str | None = None,
@@ -57,7 +198,9 @@ def create_app(
network_contact_email: str | None = None,
network_contact_discord: str | None = None,
network_contact_github: str | None = None,
network_contact_youtube: str | None = None,
network_welcome_text: str | None = None,
features: dict[str, bool] | None = None,
) -> FastAPI:
"""Create and configure the web dashboard application.
@@ -67,6 +210,7 @@ def create_app(
Args:
api_url: Base URL of the MeshCore Hub API
api_key: API key for authentication
admin_enabled: Enable admin interface at /a/
network_name: Display name for the network
network_city: City where the network is located
network_country: Country where the network is located
@@ -74,7 +218,9 @@ def create_app(
network_contact_email: Contact email address
network_contact_discord: Discord invite/server info
network_contact_github: GitHub repository URL
network_contact_youtube: YouTube channel URL
network_welcome_text: Welcome text for homepage
features: Feature flags dict (default: all enabled from settings)
Returns:
Configured FastAPI application
@@ -93,9 +239,45 @@ def create_app(
redoc_url=None,
)
# Trust proxy headers (X-Forwarded-Proto, X-Forwarded-For) for HTTPS detection
trusted_hosts_raw = settings.web_trusted_proxy_hosts
if trusted_hosts_raw == "*":
trusted_hosts: str | list[str] = "*"
else:
trusted_hosts = [h.strip() for h in trusted_hosts_raw.split(",") if h.strip()]
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts=trusted_hosts)
# Compute effective admin flag (parameter overrides setting)
effective_admin = (
admin_enabled if admin_enabled is not None else settings.web_admin_enabled
)
# Warn when admin is enabled but proxy trust is wide open
if effective_admin and settings.web_trusted_proxy_hosts == "*":
logger.warning(
"WEB_ADMIN_ENABLED is true but WEB_TRUSTED_PROXY_HOSTS is '*' (trust all). "
"Consider restricting to your reverse proxy IP for production deployments."
)
# Add cache control headers based on resource type
app.add_middleware(CacheControlMiddleware)
# Load i18n translations
app.state.web_locale = settings.web_locale or "en"
app.state.web_datetime_locale = settings.web_datetime_locale or "en-US"
load_locale(app.state.web_locale)
# Auto-refresh interval
app.state.auto_refresh_seconds = settings.web_auto_refresh_seconds
app.state.channel_labels = _build_channel_labels()
# Store configuration in app state (use args if provided, else settings)
app.state.web_theme = (
settings.web_theme if settings.web_theme in ("dark", "light") else "dark"
)
app.state.api_url = api_url or settings.api_base_url
app.state.api_key = api_key or settings.api_key
app.state.admin_enabled = effective_admin
app.state.network_name = network_name or settings.network_name
app.state.network_city = network_city or settings.network_city
app.state.network_country = network_country or settings.network_country
@@ -111,24 +293,309 @@ def create_app(
app.state.network_contact_github = (
network_contact_github or settings.network_contact_github
)
app.state.network_contact_youtube = (
network_contact_youtube or settings.network_contact_youtube
)
app.state.network_welcome_text = (
network_welcome_text or settings.network_welcome_text
)
# Set up templates
# Store feature flags with automatic dependencies:
# - Dashboard requires at least one of nodes/advertisements/messages
# - Map requires nodes (map displays node locations)
effective_features = features if features is not None else settings.features
overrides: dict[str, bool] = {}
has_dashboard_content = (
effective_features.get("nodes", True)
or effective_features.get("advertisements", True)
or effective_features.get("messages", True)
)
if not has_dashboard_content:
overrides["dashboard"] = False
if not effective_features.get("nodes", True):
overrides["map"] = False
if overrides:
effective_features = {**effective_features, **overrides}
app.state.features = effective_features
# Set up templates (for SPA shell only)
templates = Jinja2Templates(directory=str(TEMPLATES_DIR))
templates.env.trim_blocks = True
templates.env.lstrip_blocks = True
templates.env.globals["t"] = t
app.state.templates = templates
# Compute timezone
app.state.timezone = settings.tz
try:
tz = ZoneInfo(settings.tz)
app.state.timezone_abbr = datetime.now(tz).strftime("%Z")
except Exception:
app.state.timezone_abbr = "UTC"
# Initialize page loader for custom markdown pages
page_loader = PageLoader(settings.effective_pages_home)
page_loader.load_pages()
app.state.page_loader = page_loader
# Check for custom logo and store media path
media_home = Path(settings.effective_media_home)
logo_url, logo_invert_light, logo_path = _resolve_logo(media_home)
app.state.logo_url = logo_url
app.state.logo_invert_light = logo_invert_light
if logo_path is not None:
logger.info("Using custom logo from %s", logo_path)
# Mount static files
if STATIC_DIR.exists():
app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static")
# Include routers
from meshcore_hub.web.routes import web_router
# Mount custom media files if directory exists
if media_home.exists() and media_home.is_dir():
app.mount("/media", StaticFiles(directory=str(media_home)), name="media")
app.include_router(web_router)
# --- API Proxy ---
@app.api_route(
"/api/{path:path}",
methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
tags=["API Proxy"],
)
async def api_proxy(request: Request, path: str) -> Response:
"""Proxy API requests to the backend API server."""
client: httpx.AsyncClient = request.app.state.http_client
url = f"/api/{path}"
# Health check endpoint
# Forward query parameters
params = dict(request.query_params)
# Forward body for write methods
body = None
if request.method in ("POST", "PUT", "PATCH"):
body = await request.body()
# Forward content-type header
headers: dict[str, str] = {}
if "content-type" in request.headers:
headers["content-type"] = request.headers["content-type"]
# Forward auth proxy headers for admin operations
for h in ("x-forwarded-user", "x-forwarded-email", "x-forwarded-groups"):
if h in request.headers:
headers[h] = request.headers[h]
# Block mutating requests from unauthenticated users when admin is
# enabled. OAuth2Proxy is expected to set X-Forwarded-User for
# authenticated sessions; without it, write operations must be
# rejected server-side to prevent auth bypass.
if (
request.method in ("POST", "PUT", "DELETE", "PATCH")
and request.app.state.admin_enabled
and not _is_authenticated_proxy_request(request)
):
return JSONResponse(
{"detail": "Authentication required"},
status_code=401,
)
try:
response = await client.request(
method=request.method,
url=url,
params=params,
content=body,
headers=headers,
)
# Filter response headers (remove hop-by-hop headers)
resp_headers: dict[str, str] = {}
for k, v in response.headers.items():
if k.lower() not in (
"transfer-encoding",
"connection",
"keep-alive",
"content-encoding",
):
resp_headers[k] = v
return Response(
content=response.content,
status_code=response.status_code,
headers=resp_headers,
)
except httpx.ConnectError:
return JSONResponse(
{"detail": "API server unavailable"},
status_code=502,
)
except Exception as e:
logger.error(f"API proxy error: {e}")
return JSONResponse(
{"detail": "API proxy error"},
status_code=502,
)
# --- Map Data Endpoint (server-side aggregation) ---
@app.get("/map/data", tags=["Map"])
async def map_data(request: Request) -> JSONResponse:
"""Return node location data as JSON for the map."""
if not request.app.state.features.get("map", True):
return JSONResponse({"detail": "Map feature is disabled"}, status_code=404)
nodes_with_location: list[dict[str, Any]] = []
members_list: list[dict[str, Any]] = []
members_by_id: dict[str, dict[str, Any]] = {}
error: str | None = None
total_nodes = 0
nodes_with_coords = 0
try:
# Fetch all members to build lookup by member_id
members_response = await request.app.state.http_client.get(
"/api/v1/members", params={"limit": 500}
)
if members_response.status_code == 200:
members_data = members_response.json()
for member in members_data.get("items", []):
member_info = {
"member_id": member.get("member_id"),
"name": member.get("name"),
"callsign": member.get("callsign"),
}
members_list.append(member_info)
if member.get("member_id"):
members_by_id[member["member_id"]] = member_info
# Fetch all nodes from API
response = await request.app.state.http_client.get(
"/api/v1/nodes", params={"limit": 500}
)
if response.status_code == 200:
data = response.json()
nodes = data.get("items", [])
total_nodes = len(nodes)
for node in nodes:
tags = node.get("tags", [])
tag_lat = None
tag_lon = None
friendly_name = None
role = None
node_member_id = None
for tag in tags:
key = tag.get("key")
if key == "lat":
try:
tag_lat = float(tag.get("value"))
except (ValueError, TypeError):
pass
elif key == "lon":
try:
tag_lon = float(tag.get("value"))
except (ValueError, TypeError):
pass
elif key == "friendly_name":
friendly_name = tag.get("value")
elif key == "role":
role = tag.get("value")
elif key == "member_id":
node_member_id = tag.get("value")
lat = tag_lat if tag_lat is not None else node.get("lat")
lon = tag_lon if tag_lon is not None else node.get("lon")
if lat is None or lon is None:
continue
if lat == 0.0 and lon == 0.0:
continue
nodes_with_coords += 1
display_name = (
friendly_name
or node.get("name")
or node.get("public_key", "")[:12]
)
public_key = node.get("public_key")
owner = (
members_by_id.get(node_member_id) if node_member_id else None
)
nodes_with_location.append(
{
"public_key": public_key,
"name": display_name,
"adv_type": node.get("adv_type"),
"lat": lat,
"lon": lon,
"last_seen": node.get("last_seen"),
"role": role,
"is_infra": role == "infra",
"member_id": node_member_id,
"owner": owner,
}
)
else:
error = f"API returned status {response.status_code}"
except Exception as e:
error = str(e)
logger.warning(f"Failed to fetch nodes for map: {e}")
infra_nodes = [n for n in nodes_with_location if n.get("is_infra")]
infra_count = len(infra_nodes)
center_lat = 0.0
center_lon = 0.0
if nodes_with_location:
center_lat = sum(n["lat"] for n in nodes_with_location) / len(
nodes_with_location
)
center_lon = sum(n["lon"] for n in nodes_with_location) / len(
nodes_with_location
)
infra_center: dict[str, float] | None = None
if infra_nodes:
infra_center = {
"lat": sum(n["lat"] for n in infra_nodes) / len(infra_nodes),
"lon": sum(n["lon"] for n in infra_nodes) / len(infra_nodes),
}
return JSONResponse(
{
"nodes": nodes_with_location,
"members": members_list,
"center": {"lat": center_lat, "lon": center_lon},
"infra_center": infra_center,
"debug": {
"total_nodes": total_nodes,
"nodes_with_coords": nodes_with_coords,
"infra_nodes": infra_count,
"error": error,
},
}
)
# --- Custom Pages API ---
@app.get("/spa/pages/{slug}", tags=["SPA"])
async def get_custom_page(request: Request, slug: str) -> JSONResponse:
"""Get a custom page by slug."""
if not request.app.state.features.get("pages", True):
return JSONResponse(
{"detail": "Pages feature is disabled"}, status_code=404
)
page_loader = request.app.state.page_loader
page = page_loader.get_page(slug)
if not page:
return JSONResponse({"detail": "Page not found"}, status_code=404)
return JSONResponse(
{
"slug": page.slug,
"title": page.title,
"content_html": page.content_html,
}
)
# --- Health Endpoints ---
@app.get("/health", tags=["Health"])
async def health() -> dict:
"""Basic health check."""
@@ -145,30 +612,135 @@ def create_app(
except Exception as e:
return {"status": "not_ready", "api": str(e)}
# --- SEO Endpoints ---
def _get_https_base_url(request: Request) -> str:
"""Get base URL, ensuring HTTPS is used for public-facing URLs."""
base_url = str(request.base_url).rstrip("/")
if base_url.startswith("http://"):
base_url = "https://" + base_url[7:]
return base_url
@app.get("/robots.txt", response_class=PlainTextResponse)
async def robots_txt(request: Request) -> str:
"""Serve robots.txt."""
base_url = _get_https_base_url(request)
features = request.app.state.features
# Always disallow message and node detail pages
disallow_lines = [
"Disallow: /messages",
"Disallow: /nodes/",
]
# Add disallow for disabled features
feature_paths = {
"dashboard": "/dashboard",
"nodes": "/nodes",
"advertisements": "/advertisements",
"map": "/map",
"members": "/members",
"pages": "/pages",
}
for feature, path in feature_paths.items():
if not features.get(feature, True):
line = f"Disallow: {path}"
if line not in disallow_lines:
disallow_lines.append(line)
disallow_block = "\n".join(disallow_lines)
return (
f"User-agent: *\n"
f"{disallow_block}\n"
f"\n"
f"Sitemap: {base_url}/sitemap.xml\n"
)
@app.get("/sitemap.xml")
async def sitemap_xml(request: Request) -> Response:
"""Generate dynamic sitemap."""
base_url = _get_https_base_url(request)
features = request.app.state.features
# Home is always included; other pages depend on feature flags
all_static_pages = [
("", "daily", "1.0", None),
("/dashboard", "hourly", "0.9", "dashboard"),
("/nodes", "hourly", "0.9", "nodes"),
("/advertisements", "hourly", "0.8", "advertisements"),
("/map", "daily", "0.7", "map"),
("/members", "weekly", "0.6", "members"),
]
static_pages = [
(path, freq, prio)
for path, freq, prio, feature in all_static_pages
if feature is None or features.get(feature, True)
]
urls = []
for path, changefreq, priority in static_pages:
urls.append(
f" <url>\n"
f" <loc>{base_url}{path}</loc>\n"
f" <changefreq>{changefreq}</changefreq>\n"
f" <priority>{priority}</priority>\n"
f" </url>"
)
if features.get("pages", True):
page_loader = request.app.state.page_loader
for page in page_loader.get_menu_pages():
urls.append(
f" <url>\n"
f" <loc>{base_url}{page.url}</loc>\n"
f" <changefreq>weekly</changefreq>\n"
f" <priority>0.6</priority>\n"
f" </url>"
)
xml = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n'
+ "\n".join(urls)
+ "\n</urlset>"
)
return Response(content=xml, media_type="application/xml")
# --- SPA Catch-All (MUST be last) ---
@app.api_route("/{path:path}", methods=["GET"], tags=["SPA"])
async def spa_catchall(request: Request, path: str = "") -> HTMLResponse:
"""Serve the SPA shell for all non-API routes."""
templates_inst: Jinja2Templates = request.app.state.templates
features = request.app.state.features
page_loader = request.app.state.page_loader
custom_pages = (
page_loader.get_menu_pages() if features.get("pages", True) else []
)
config_json = _build_config_json(request.app, request)
return templates_inst.TemplateResponse(
"spa.html",
{
"request": request,
"network_name": request.app.state.network_name,
"network_city": request.app.state.network_city,
"network_country": request.app.state.network_country,
"network_contact_email": request.app.state.network_contact_email,
"network_contact_discord": request.app.state.network_contact_discord,
"network_contact_github": request.app.state.network_contact_github,
"network_contact_youtube": request.app.state.network_contact_youtube,
"network_welcome_text": request.app.state.network_welcome_text,
"admin_enabled": request.app.state.admin_enabled,
"features": features,
"custom_pages": custom_pages,
"logo_url": request.app.state.logo_url,
"logo_invert_light": request.app.state.logo_invert_light,
"version": __version__,
"default_theme": request.app.state.web_theme,
"config_json": config_json,
},
)
return app
def get_templates(request: Request) -> Jinja2Templates:
"""Get templates from app state."""
templates: Jinja2Templates = request.app.state.templates
return templates
def get_network_context(request: Request) -> dict:
"""Get network configuration context for templates."""
# Parse radio config from comma-delimited string
radio_config = RadioConfig.from_config_string(
request.app.state.network_radio_config
)
return {
"network_name": request.app.state.network_name,
"network_city": request.app.state.network_city,
"network_country": request.app.state.network_country,
"network_radio_config": radio_config,
"network_contact_email": request.app.state.network_contact_email,
"network_contact_discord": request.app.state.network_contact_discord,
"network_contact_github": request.app.state.network_contact_github,
"network_welcome_text": request.app.state.network_welcome_text,
"version": __version__,
}

View File

@@ -88,6 +88,13 @@ import click
envvar="NETWORK_CONTACT_GITHUB",
help="GitHub repository URL",
)
@click.option(
"--network-contact-youtube",
type=str,
default=None,
envvar="NETWORK_CONTACT_YOUTUBE",
help="YouTube channel URL",
)
@click.option(
"--network-welcome-text",
type=str,
@@ -116,6 +123,7 @@ def web(
network_contact_email: str | None,
network_contact_discord: str | None,
network_contact_github: str | None,
network_contact_youtube: str | None,
network_welcome_text: str | None,
reload: bool,
) -> None:
@@ -175,6 +183,11 @@ def web(
if effective_city and effective_country:
click.echo(f"Location: {effective_city}, {effective_country}")
click.echo(f"Reload mode: {reload}")
disabled_features = [
name for name, enabled in settings.features.items() if not enabled
]
if disabled_features:
click.echo(f"Disabled features: {', '.join(disabled_features)}")
click.echo("=" * 50)
if reload:
@@ -201,6 +214,7 @@ def web(
network_contact_email=network_contact_email,
network_contact_discord=network_contact_discord,
network_contact_github=network_contact_github,
network_contact_youtube=network_contact_youtube,
network_welcome_text=network_welcome_text,
)

View File

@@ -0,0 +1,85 @@
"""HTTP caching middleware for the web component."""
from collections.abc import Awaitable, Callable
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.requests import Request
from starlette.responses import Response
from starlette.types import ASGIApp
class CacheControlMiddleware(BaseHTTPMiddleware):
"""Middleware to set appropriate Cache-Control headers based on resource type."""
def __init__(self, app: ASGIApp) -> None:
"""Initialize the middleware.
Args:
app: The ASGI application to wrap.
"""
super().__init__(app)
async def dispatch(
self,
request: Request,
call_next: Callable[[Request], Awaitable[Response]],
) -> Response:
"""Process the request and add appropriate caching headers.
Args:
request: The incoming HTTP request.
call_next: The next middleware or route handler.
Returns:
The response with cache headers added.
"""
response: Response = await call_next(request)
# Skip if Cache-Control already set (explicit override)
if "cache-control" in response.headers:
return response
path = request.url.path
query_params = request.url.query
# Health endpoints - never cache
if path.startswith("/health"):
response.headers["cache-control"] = "no-cache, no-store, must-revalidate"
# Static files with version parameter - long-term cache
elif path.startswith("/static/") and "v=" in query_params:
response.headers["cache-control"] = "public, max-age=31536000, immutable"
# Static files without version - short cache as fallback
elif path.startswith("/static/"):
response.headers["cache-control"] = "public, max-age=3600"
# Media files with version parameter - long-term cache
elif path.startswith("/media/") and "v=" in query_params:
response.headers["cache-control"] = "public, max-age=31536000, immutable"
# Media files without version - short cache (user may update)
elif path.startswith("/media/"):
response.headers["cache-control"] = "public, max-age=3600"
# Map data - short cache (5 minutes)
elif path == "/map/data":
response.headers["cache-control"] = "public, max-age=300"
# Custom pages - moderate cache (1 hour)
elif path.startswith("/spa/pages/"):
response.headers["cache-control"] = "public, max-age=3600"
# SEO files - moderate cache (1 hour)
elif path in ("/robots.txt", "/sitemap.xml"):
response.headers["cache-control"] = "public, max-age=3600"
# API proxy - don't add headers (pass through backend)
elif path.startswith("/api/"):
pass
# SPA shell HTML (catch-all for client-side routes) - no cache
elif response.headers.get("content-type", "").startswith("text/html"):
response.headers["cache-control"] = "no-cache, public"
return response

View File

@@ -0,0 +1,119 @@
"""Custom markdown pages loader for MeshCore Hub Web Dashboard."""
import logging
from dataclasses import dataclass
from pathlib import Path
from typing import Optional
import frontmatter
import markdown
logger = logging.getLogger(__name__)
@dataclass
class CustomPage:
"""Represents a custom markdown page."""
slug: str
title: str
menu_order: int
content_html: str
file_path: str
@property
def url(self) -> str:
"""Get the URL path for this page."""
return f"/pages/{self.slug}"
class PageLoader:
"""Loads and manages custom markdown pages from a directory."""
def __init__(self, pages_dir: str) -> None:
"""Initialize the page loader.
Args:
pages_dir: Path to the directory containing markdown pages.
"""
self.pages_dir = Path(pages_dir)
self._pages: dict[str, CustomPage] = {}
self._md = markdown.Markdown(
extensions=["tables", "fenced_code", "toc"],
output_format="html",
)
def load_pages(self) -> None:
"""Load all markdown pages from the pages directory."""
self._pages.clear()
if not self.pages_dir.exists():
logger.debug(f"Pages directory does not exist: {self.pages_dir}")
return
if not self.pages_dir.is_dir():
logger.warning(f"Pages path is not a directory: {self.pages_dir}")
return
for md_file in self.pages_dir.glob("*.md"):
try:
page = self._load_page(md_file)
if page:
self._pages[page.slug] = page
logger.info(f"Loaded custom page: {page.slug} ({md_file.name})")
except Exception as e:
logger.error(f"Failed to load page {md_file}: {e}")
logger.info(f"Loaded {len(self._pages)} custom page(s)")
def _load_page(self, file_path: Path) -> Optional[CustomPage]:
"""Load a single markdown page.
Args:
file_path: Path to the markdown file.
Returns:
CustomPage instance or None if loading failed.
"""
content = file_path.read_text(encoding="utf-8")
post = frontmatter.loads(content)
# Extract frontmatter fields
slug = post.get("slug", file_path.stem)
title = post.get("title", slug.replace("-", " ").replace("_", " ").title())
menu_order = post.get("menu_order", 100)
# Convert markdown to HTML
self._md.reset()
content_html = self._md.convert(post.content)
return CustomPage(
slug=slug,
title=title,
menu_order=menu_order,
content_html=content_html,
file_path=str(file_path),
)
def get_page(self, slug: str) -> Optional[CustomPage]:
"""Get a page by its slug.
Args:
slug: The page slug.
Returns:
CustomPage instance or None if not found.
"""
return self._pages.get(slug)
def get_menu_pages(self) -> list[CustomPage]:
"""Get all pages sorted by menu_order for navigation.
Returns:
List of CustomPage instances sorted by menu_order.
"""
return sorted(self._pages.values(), key=lambda p: (p.menu_order, p.title))
def reload(self) -> None:
"""Reload all pages from disk."""
self.load_pages()

View File

@@ -1,25 +0,0 @@
"""Web routes for MeshCore Hub Dashboard."""
from fastapi import APIRouter
from meshcore_hub.web.routes.home import router as home_router
from meshcore_hub.web.routes.network import router as network_router
from meshcore_hub.web.routes.nodes import router as nodes_router
from meshcore_hub.web.routes.messages import router as messages_router
from meshcore_hub.web.routes.advertisements import router as advertisements_router
from meshcore_hub.web.routes.map import router as map_router
from meshcore_hub.web.routes.members import router as members_router
# Create main web router
web_router = APIRouter()
# Include all sub-routers
web_router.include_router(home_router)
web_router.include_router(network_router)
web_router.include_router(nodes_router)
web_router.include_router(messages_router)
web_router.include_router(advertisements_router)
web_router.include_router(map_router)
web_router.include_router(members_router)
__all__ = ["web_router"]

View File

@@ -1,64 +0,0 @@
"""Advertisements page route."""
import logging
from fastapi import APIRouter, Query, Request
from fastapi.responses import HTMLResponse
from meshcore_hub.web.app import get_network_context, get_templates
logger = logging.getLogger(__name__)
router = APIRouter()
@router.get("/advertisements", response_class=HTMLResponse)
async def advertisements_list(
request: Request,
public_key: str | None = Query(None, description="Filter by public key"),
page: int = Query(1, ge=1, description="Page number"),
limit: int = Query(50, ge=1, le=100, description="Items per page"),
) -> HTMLResponse:
"""Render the advertisements list page."""
templates = get_templates(request)
context = get_network_context(request)
context["request"] = request
# Calculate offset
offset = (page - 1) * limit
# Build query params
params: dict[str, int | str] = {"limit": limit, "offset": offset}
if public_key:
params["public_key"] = public_key
# Fetch advertisements from API
advertisements = []
total = 0
try:
response = await request.app.state.http_client.get(
"/api/v1/advertisements", params=params
)
if response.status_code == 200:
data = response.json()
advertisements = data.get("items", [])
total = data.get("total", 0)
except Exception as e:
logger.warning(f"Failed to fetch advertisements from API: {e}")
context["api_error"] = str(e)
# Calculate pagination
total_pages = (total + limit - 1) // limit if total > 0 else 1
context.update(
{
"advertisements": advertisements,
"total": total,
"page": page,
"limit": limit,
"total_pages": total_pages,
"public_key": public_key or "",
}
)
return templates.TemplateResponse("advertisements.html", context)

Some files were not shown because too many files have changed in this diff Show More