Compare commits

...

61 Commits

Author SHA1 Message Date
l5y
d09fc842b8 Configure web container for production (#282) 2025-10-11 19:39:22 +02:00
l5y
73bdd809bd Normalize INSTANCE_DOMAIN configuration to require hostnames (#280)
* Ensure INSTANCE_DOMAIN configuration uses hostname

* Define ip_from_domain before use
2025-10-11 19:39:05 +02:00
l5y
f1dba89d4b Run initial federation announcement asynchronously (#281) 2025-10-11 19:38:12 +02:00
l5y
131a63845c Add production build Dockerfile and compose contexts (#279) 2025-10-11 18:23:51 +02:00
l5y
2240be1f2d Improve instance domain detection logic (#278) 2025-10-11 18:22:50 +02:00
l5y
a048a83c6c Implement federation announcements and instances API (#277) 2025-10-11 18:01:08 +02:00
l5y
4ef1e29034 Fix federation signature handling and IP guard (#276)
* Fix federation signature handling and IP guard

* Avoid defaulting isPrivate before signature verification

* Normalize instance domain host handling for restricted IP check

* ignore web app credentials

---------

Co-authored-by: l5yth <d220195275+l5yth@users.noreply.github.com>
2025-10-11 17:39:52 +02:00
l5y
b21df3de5c Add persistent federation metadata endpoint (#274)
* Add federated metadata endpoint

* Fix configure-time database access

* Fix well-known refresh bypassed by static files

* run rufo

---------

Co-authored-by: l5yth <d220195275+l5yth@users.noreply.github.com>
2025-10-11 13:04:33 +00:00
l5y
678af5e55b Add configurable instance domain with reverse DNS fallback (#272)
* Add instance domain resolution with reverse DNS fallback

* run rufo

---------

Co-authored-by: l5yth <d220195275+l5yth@users.noreply.github.com>
2025-10-11 12:21:55 +00:00
l5y
c4fd59626f Add production configuration guidance (#273) 2025-10-11 14:01:53 +02:00
l5y
0a26e4252a Add targeted API endpoints and expose version metadata (#271)
* Add per-node API endpoints and version route

* Adjust version metadata and node lookup route
2025-10-11 12:36:28 +02:00
Nic Jansma
d19e032b40 Prometheus metrics updates on startup and for position/telemetry (#270)
* Prometheus metrics updates on startup and for position/telemetry

* Fix per rufo

* CoPilot feedback

* CR feedback
2025-10-11 09:24:12 +02:00
l5y
ab9ae796f3 Add hourly reconnect handling for inactive mesh interface (#267)
* Add hourly reconnect handling for inactive mesh interface

* Reset inactivity timestamp after reconnect
2025-10-07 16:41:16 +02:00
Nic Jansma
0f2f2f447c Dockerfile fixes (#268) 2025-10-07 16:34:45 +02:00
Nic Jansma
3a031694db Added prometheus /metrics endpoint (#262)
* Added prometheus /metrics endpoint

* Fixes per CoPilot suggestions

* More Copilot fixes

* Rufo formatted
2025-10-07 16:32:45 +02:00
l5y
3cfbffc155 Add fullscreen toggle to map view (#263)
* Add fullscreen mode controls for map

* Improve fullscreen map scaling and control icon

* Improve fullscreen map sizing and icon
2025-10-07 15:53:18 +02:00
l5y
4f5aec45b3 Relocate JavaScript coverage export under web (#266) 2025-10-07 15:49:40 +02:00
Nic Jansma
2acfca20d9 v4.0.0 version string in web UI (#265) 2025-10-07 13:34:23 +00:00
l5y
f2ed5f5c03 Add energy saving mode to ingestor (#256) 2025-10-07 15:28:41 +02:00
l5y
db04b85134 chore: restore apache headers (#260) 2025-10-07 10:28:24 +02:00
l5y
ba66ac5cea docs: add matrix to readme (#259) 2025-10-07 07:29:12 +00:00
l5y
a592b655c4 Force dark theme default based on sanitized cookie (#252)
* Ensure dark theme defaults cleanly on initial load

* Ensure form controls respond to theme
2025-10-07 09:00:55 +02:00
l5y
a5a2ae5edc Document mesh ingestor modules with PDoc-style docstrings (#255) 2025-10-07 08:59:38 +02:00
l5y
363b4c5525 Handle missing node IDs in Meshtastic nodeinfo packets (#251)
* Handle Meshtastic nodeinfo packets without IDs

* Guard BLE reader against missing payloads
2025-10-07 08:56:36 +02:00
l5y
16e1304ded Add comprehensive RDoc comments to Ruby helpers (#254) 2025-10-07 08:53:39 +02:00
l5y
b89347938a docs: expand jsdoc coverage (#253) 2025-10-07 08:53:26 +02:00
l5y
6969ae6c4a Fix mesh ingestor telemetry and neighbor handling (#249)
* Refine mesh ingestor modularization

* Handle script execution in mesh wrapper

* Ensure mesh shim finds package when run as script

* Ensure queue state resets after send errors
2025-10-07 08:40:28 +02:00
l5y
64f8862676 Refactor front-end assets into external modules (#245)
* Refactor front-end assets into external modules

* Restore chat flag inline script

* Declare legend toggle control variable

* Remove dynamic background generation

* Restore background script with theme-based color

* run rufo
2025-10-07 08:33:06 +02:00
l5y
6660986211 Add tests for helper utilities and asset routes (#243)
* test: expand coverage for helpers and assets

* Adjust failing helper and asset specs

* Adjust specs for fallback node lookup and missing logo
2025-10-07 07:07:23 +02:00
l5y
5dfcc1a5fe docs: add ingestor inline docstrings (#244) 2025-10-07 00:06:42 +02:00
l5y
2efd28766b Add comprehensive coverage tests for mesh ingestor (#241) 2025-10-07 00:04:33 +02:00
l5y
c9bba25e5a Add comprehensive inline documentation (#240) 2025-10-07 00:01:31 +02:00
l5y
41976a3b43 Update changelog (#238)
* Update changelog

* Update readme
2025-10-06 08:36:13 +02:00
l5y
5a47a8f8e4 Reformat neighbor overlay details (#237) 2025-10-06 08:08:24 +02:00
l5y
c13f3c913f Add neighbor lines toggle to map legend (#236) 2025-10-06 08:05:44 +02:00
l5y
2e9b54b6cf Hide Air Util Tx column on mobile (#235) 2025-10-06 08:04:07 +02:00
l5y
7e844be627 Add overlay for clickable neighbor links on map (#234)
* Add overlay for clickable neighbor links on map

* Fix neighbor overlays and include SNR details

* Prevent map neighbor overlay clicks from closing immediately
2025-10-06 07:41:11 +02:00
l5y
b37e55c29a Hide humidity and pressure on mobile (#232) 2025-10-06 06:34:48 +02:00
l5y
332ba044f2 Remove last position timestamp from map info overlay (#233) 2025-10-06 06:34:37 +02:00
l5y
09a2d849ec Improve live node positions and expose precision metadata (#231)
* Fetch latest node positions and precision metadata

* Stop showing position source and precision in UI

* Guard node positions against stale merges
2025-10-05 23:08:57 +02:00
l5y
a3fb9b0d5c Show neighbor short names in info overlays (#228)
* Show neighbor short names in info overlays

* Adjust neighbor info placement
2025-10-05 22:04:29 +02:00
l5y
192978acf9 Add telemetry environmental data to node UI (#227) 2025-10-05 21:49:28 +02:00
l5y
581aaea93b Reduce neighbor line opacity (#226) 2025-10-05 21:45:05 +02:00
l5y
299752a4f1 Visualize neighbor connections on map canvas (#224)
* Visualize neighbor connections on map

* Gracefully handle neighbor fetch failures
2025-10-05 21:27:41 +02:00
l5y
142c0aa539 Add clear control to filter input (#225) 2025-10-05 21:26:37 +02:00
l5y
78168ce3db Handle Bluetooth shutdown hangs gracefully (#221)
* Handle Bluetooth shutdown hangs gracefully

* Make interface close guard compatible with patched Event
2025-10-05 21:07:19 +02:00
l5y
332abbc183 Adjust mesh priorities and receive topics (#220) 2025-10-05 20:50:34 +02:00
l5y
c136c5cf26 Add BLE and fallback mesh interface handling (#219)
* Add BLE and fallback mesh interface support

* Handle SIGINT by propagating KeyboardInterrupt

* Guard optional BLE dependency

* run black
2025-10-05 20:48:23 +02:00
l5y
2a65e89eee Add neighbor info ingestion and API endpoints (#218)
* Add neighbor info ingestion and API support

* Fix neighbor spec and add fixture

* run black

* run rufo
2025-10-05 12:35:13 +02:00
l5y
d6f1e7bc80 Add debug logs for unknown node creation and last-heard updates (#214)
* Add debug logging for unknown nodes and last-heard updates

* Fix debug log syntax
2025-10-04 21:25:23 +02:00
l5y
5ac5f3ec3f Update node last seen when events are received (#212)
* Update node last seen timestamps from event receive times

* run rufo

* fix tests
2025-10-04 21:11:16 +02:00
l5y
bb4cbfa62c Improve debug logging for node and telemetry data (#213)
* Improve debug logging for node and telemetry data

* run black
2025-10-04 21:03:03 +02:00
l5y
f0d600e5d7 Improve stored message debug logging (#211) 2025-10-04 20:53:54 +02:00
l5y
e0f0a6390d Stop repeating ingestor node info snapshot and timestamp debug logs (#210)
* Adjust ingestor node snapshot cadence and debug logging

* Ensure node snapshot waits for data

* run black
2025-10-04 20:41:53 +02:00
l5y
d4a27dccf7 Add telemetry API and ingestion support (#205)
* Add telemetry ingestion and API support

* Flatten telemetry storage and API responses

* Fix telemetry insert placeholder count

* Adjust telemetry node updates

* run black

* run rufo
2025-10-04 18:28:18 +02:00
l5y
74c4596dc5 Add private mode to hide chat and message APIs (#204)
* Add private mode to hide chat and message APIs

* run rufo
2025-10-04 09:36:43 +02:00
l5y
1f2328613c Handle offline-ready map fallback (#202) 2025-10-03 11:24:18 +02:00
l5y
eeca67f6ea Add linux/armv7 images and configuration support (#201) 2025-10-03 11:11:14 +02:00
l5y
4ae8a1cfca Update Docker documentation (#200)
* Update Docker documentation

* docs: reference compose file
2025-10-03 11:03:25 +02:00
l5y
ff06129a6f Update node last seen when ingesting encrypted messages (#198)
* Update node last seen for encrypted messages

* run rufo
2025-10-03 10:59:12 +02:00
l5y
6d7aa4dd56 fix api in readme (#197) 2025-10-01 14:16:54 +00:00
47 changed files with 12619 additions and 3142 deletions

View File

@@ -56,6 +56,9 @@ MATRIX_ROOM='#meshtastic-berlin:matrix.org'
# Debug mode (0=off, 1=on)
DEBUG=0
# Docker image architecture (linux-amd64, linux-arm64, linux-armv7)
POTATOMESH_IMAGE_ARCH=linux-amd64
# Docker Compose networking profile
# Leave unset for Linux hosts (default host networking).
# Set to "bridge" on Docker Desktop (macOS/Windows) if host networking

View File

@@ -33,6 +33,7 @@ jobs:
architecture:
- { name: linux-amd64, platform: linux/amd64, label: "Linux x86_64" }
- { name: linux-arm64, platform: linux/arm64, label: "Linux ARM64" }
- { name: linux-armv7, platform: linux/arm/v7, label: "Linux ARMv7" }
steps:
- name: Checkout repository
@@ -161,11 +162,13 @@ jobs:
echo "### 🌐 Web Application" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-amd64:latest\` - Linux x86_64" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-arm64:latest\` - Linux ARM64" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-armv7:latest\` - Linux ARMv7" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Ingestor images
echo "### 📡 Ingestor Service" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-amd64:latest\` - Linux x86_64" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-arm64:latest\` - Linux ARM64" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-armv7:latest\` - Linux ARMv7" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

42
.github/workflows/javascript.yml vendored Normal file
View File

@@ -0,0 +1,42 @@
name: JavaScript
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Set up Node.js 20
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install dependencies
run: npm install
working-directory: web
- name: Run JavaScript tests
run: npm test
working-directory: web
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: web/reports/javascript-coverage.json
flags: javascript
name: javascript
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: web/reports/javascript-junit.xml
flags: javascript

3
.gitignore vendored
View File

@@ -66,3 +66,6 @@ reports/
# AI planning and documentation
ai_docs/
*.log
# Generated credentials for the instance
web/.config

View File

@@ -1,8 +1,82 @@
# CHANGELOG
## v0.5.0
* Add JavaScript configuration tests and coverage workflow
## v0.4.0
* Reformat neighbor overlay layout by @l5yth in <https://github.com/l5yth/potato-mesh/pull/237>
* Add legend toggle for neighbor lines by @l5yth in <https://github.com/l5yth/potato-mesh/pull/236>
* Hide Air Util Tx column on mobile by @l5yth in <https://github.com/l5yth/potato-mesh/pull/235>
* Add overlay for clickable neighbor links on map by @l5yth in <https://github.com/l5yth/potato-mesh/pull/234>
* Hide humidity and pressure columns on mobile by @l5yth in <https://github.com/l5yth/potato-mesh/pull/232>
* Remove last position timestamp from map info overlay by @l5yth in <https://github.com/l5yth/potato-mesh/pull/233>
* Improve live node positions and expose precision metadata by @l5yth in <https://github.com/l5yth/potato-mesh/pull/231>
* Show neighbor short names in info overlays by @l5yth in <https://github.com/l5yth/potato-mesh/pull/228>
* Add telemetry environment metrics to node UI by @l5yth in <https://github.com/l5yth/potato-mesh/pull/227>
* Reduce neighbor line opacity by @l5yth in <https://github.com/l5yth/potato-mesh/pull/226>
* Visualize neighbor connections on map canvas by @l5yth in <https://github.com/l5yth/potato-mesh/pull/224>
* Add clear control to filter input by @l5yth in <https://github.com/l5yth/potato-mesh/pull/225>
* Handle Bluetooth shutdown hangs gracefully by @l5yth in <https://github.com/l5yth/potato-mesh/pull/221>
* Adjust mesh priorities and receive topics by @l5yth in <https://github.com/l5yth/potato-mesh/pull/220>
* Add BLE and fallback mesh interface handling by @l5yth in <https://github.com/l5yth/potato-mesh/pull/219>
* Add neighbor info ingestion and API endpoints by @l5yth in <https://github.com/l5yth/potato-mesh/pull/218>
* Add debug logs for unknown node creation and last-heard updates by @l5yth in <https://github.com/l5yth/potato-mesh/pull/214>
* Update node last seen when events are received by @l5yth in <https://github.com/l5yth/potato-mesh/pull/212>
* Improve debug logging for node and telemetry data by @l5yth in <https://github.com/l5yth/potato-mesh/pull/213>
* Normalize stored message debug output by @l5yth in <https://github.com/l5yth/potato-mesh/pull/211>
* Stop repeating ingestor node info snapshot and timestamp debug logs by @l5yth in <https://github.com/l5yth/potato-mesh/pull/210>
* Add telemetry API and ingestion support by @l5yth in <https://github.com/l5yth/potato-mesh/pull/205>
* Add private mode to hide chat and message APIs by @l5yth in <https://github.com/l5yth/potato-mesh/pull/204>
* Handle offline-ready map fallback by @l5yth in <https://github.com/l5yth/potato-mesh/pull/202>
* Add linux/armv7 container builds and configuration options by @l5yth in <https://github.com/l5yth/potato-mesh/pull/201>
* Update Docker documentation by @l5yth in <https://github.com/l5yth/potato-mesh/pull/200>
* Update node last seen when ingesting encrypted messages by @l5yth in <https://github.com/l5yth/potato-mesh/pull/198>
* Fix api in readme by @l5yth in <https://github.com/l5yth/potato-mesh/pull/197>
## v0.3.0
* Add comprehensive Docker support with multi-architecture builds and automated CI/CD by @trose in <https://github.com/l5yth/potato-mesh/pull/122>
* Add connection recovery for TCP interface by @l5yth in <https://github.com/l5yth/potato-mesh/pull/186>
* Bump version to 0.3 by @l5yth in <https://github.com/l5yth/potato-mesh/pull/191>
* Pgrade styles and fix interface issues by @l5yth in <https://github.com/l5yth/potato-mesh/pull/190>
* Some updates in the front by @dkorotkih2014-hub in <https://github.com/l5yth/potato-mesh/pull/188>
* Update last heard on node entry change by @l5yth in <https://github.com/l5yth/potato-mesh/pull/185>
* Populate chat metadata for unknown nodes by @l5yth in <https://github.com/l5yth/potato-mesh/pull/182>
* Update role color theme to latest palette by @l5yth in <https://github.com/l5yth/potato-mesh/pull/183>
* Add placeholder nodes for unknown senders by @l5yth in <https://github.com/l5yth/potato-mesh/pull/181>
* Update role colors and ordering for firmware 2.7.10 by @l5yth in <https://github.com/l5yth/potato-mesh/pull/180>
* Handle plain IP addresses in mesh TCP detection by @l5yth in <https://github.com/l5yth/potato-mesh/pull/154>
* Handle encrypted messages by @l5yth in <https://github.com/l5yth/potato-mesh/pull/173>
* Add fallback display names for unnamed nodes by @l5yth in <https://github.com/l5yth/potato-mesh/pull/171>
* Ensure routers render above other node types by @l5yth in <https://github.com/l5yth/potato-mesh/pull/169>
* Move lint checks after tests in CI by @l5yth in <https://github.com/l5yth/potato-mesh/pull/168>
* Handle proto values in nodeinfo payloads by @l5yth in <https://github.com/l5yth/potato-mesh/pull/167>
* Remove raw payload storage from database schema by @l5yth in <https://github.com/l5yth/potato-mesh/pull/166>
* Add POSITION_APP ingestion and API support by @l5yth in <https://github.com/l5yth/potato-mesh/pull/160>
* Add support for NODEINFO_APP packets by @l5yth in <https://github.com/l5yth/potato-mesh/pull/159>
* Derive SEO metadata from existing config values by @l5yth in <https://github.com/l5yth/potato-mesh/pull/153>
* Tests: create helper script to dump all mesh data from serial by @l5yth in <https://github.com/l5yth/potato-mesh/pull/152>
* Limit chat log to recent entries by @l5yth in <https://github.com/l5yth/potato-mesh/pull/151>
* Require time library before formatting ISO timestamps by @l5yth in <https://github.com/l5yth/potato-mesh/pull/149>
* Define docker compose network by @l5yth in <https://github.com/l5yth/potato-mesh/pull/148>
* Fix sqlite3 native extension on Alpine by @l5yth in <https://github.com/l5yth/potato-mesh/pull/146>
* Fix web app startup binding by @l5yth in <https://github.com/l5yth/potato-mesh/pull/147>
* Ensure sqlite3 builds from source on Alpine by @l5yth in <https://github.com/l5yth/potato-mesh/pull/145>
* Support mock serial interface in CI by @l5yth in <https://github.com/l5yth/potato-mesh/pull/143>
* Fix Docker workflow matrix for supported platforms by @l5yth in <https://github.com/l5yth/potato-mesh/pull/142>
* Add clickable role filters to the map legend by @l5yth in <https://github.com/l5yth/potato-mesh/pull/140>
* Rebuild chat log on each refresh by @l5yth in <https://github.com/l5yth/potato-mesh/pull/139>
* Fix: retain alpine runtime libs after removing build deps by @l5yth in <https://github.com/l5yth/potato-mesh/pull/138>
* Fix: support windows ingestor build by @l5yth in <https://github.com/l5yth/potato-mesh/pull/136>
* Fix: use supported ruby image by @l5yth in <https://github.com/l5yth/potato-mesh/pull/135>
* Feat: Add comprehensive Docker support by @trose in <https://github.com/l5yth/potato-mesh/pull/122>
* Chore: bump version to 0.2.1 by @l5yth in <https://github.com/l5yth/potato-mesh/pull/134>
* Fix dark mode tile styling on new map tiles by @l5yth in <https://github.com/l5yth/potato-mesh/pull/132>
* Switch map tiles to OSM HOT and add theme filters by @l5yth in <https://github.com/l5yth/potato-mesh/pull/130>
* Add footer version display by @l5yth in <https://github.com/l5yth/potato-mesh/pull/128>
* Add responsive controls for map legend by @l5yth in <https://github.com/l5yth/potato-mesh/pull/129>
* Update changelog by @l5yth in <https://github.com/l5yth/potato-mesh/pull/119>
## v0.2.0

150
DOCKER.md
View File

@@ -1,103 +1,85 @@
# PotatoMesh Docker Setup
# PotatoMesh Docker Guide
## Quick Start
PotatoMesh publishes ready-to-run container images to the GitHub Packages container
registry (GHCR). You do not need to clone the repository to deploy them—Compose
will pull the latest release images for you.
```bash
./configure.sh
docker-compose up -d
docker-compose logs -f
## Prerequisites
- Docker Engine 24+ or Docker Desktop with the Compose plugin
- Access to `/dev/ttyACM*` (or equivalent) if you plan to attach a Meshtastic
device to the ingestor container
- An API token that authorises the ingestor to post to your PotatoMesh instance
## Images on GHCR
| Service | Image |
|----------|-------------------------------------------------------------------|
| Web UI | `ghcr.io/l5yth/potato-mesh-web-linux-amd64:latest` |
| Ingestor | `ghcr.io/l5yth/potato-mesh-ingestor-linux-amd64:latest` |
Images are published for every tagged release. Replace `latest` with a
specific version tag if you prefer pinned deployments.
## Configure environment
Create a `.env` file alongside your Compose file and populate the variables you
need. At a minimum you must set `API_TOKEN` so the ingestor can authenticate
against the web API.
```env
API_TOKEN=replace-with-a-strong-token
SITE_NAME=My Meshtastic Network
MESH_SERIAL=/dev/ttyACM0
```
The default configuration attaches both services to the host network. This
avoids creating Docker bridge interfaces on platforms where that operation is
blocked. Access the dashboard at `http://127.0.0.1:41447` as soon as the
containers are running. On Docker Desktop (macOS/Windows) or when you prefer
traditional bridged networking, start Compose with the `bridge` profile:
Additional environment variables are optional:
- `DEFAULT_CHANNEL`, `DEFAULT_FREQUENCY`, `MAP_CENTER_LAT`, `MAP_CENTER_LON`,
`MAX_NODE_DISTANCE_KM`, and `MATRIX_ROOM` customise the UI.
- `POTATOMESH_INSTANCE` (defaults to `http://web:41447`) lets the ingestor post
to a remote PotatoMesh instance if you do not run both services together.
- `MESH_CHANNEL_INDEX`, `MESH_SNAPSHOT_SECS`, and `DEBUG` adjust ingestor
behaviour.
## Docker Compose file
Use the `docker-compose.yml` file provided in the repository (or download the
[raw file from GitHub](https://raw.githubusercontent.com/l5yth/potato-mesh/main/docker-compose.yml)).
It already references the published GHCR images, defines persistent volumes for
data and logs, and includes optional bridge-profile services for environments
that require classic port mapping. Place this file in the same directory as
your `.env` file so Compose can pick up both.
## Start the stack
From the directory containing the Compose file:
```bash
COMPOSE_PROFILES=bridge docker-compose up -d
docker compose up -d
```
Access at `http://localhost:41447`
## Configuration
Edit `.env` file or run `./configure.sh` to set:
- `API_TOKEN` - Required for ingestor authentication
- `MESH_SERIAL` - Your Meshtastic device path (e.g., `/dev/ttyACM0`)
- `SITE_NAME` - Your mesh network name
- `MAP_CENTER_LAT/LON` - Map center coordinates
## Device Setup
**Find your device:**
Docker automatically pulls the GHCR images when they are not present locally.
The dashboard becomes available at `http://127.0.0.1:41447`. Use the bridge
profile when you need to map the port explicitly:
```bash
# Linux
ls /dev/ttyACM* /dev/ttyUSB*
# macOS
ls /dev/cu.usbserial-*
# Windows
ls /dev/ttyS*
COMPOSE_PROFILES=bridge docker compose up -d
```
**Set permissions (Linux/macOS):**
## Updating
```bash
sudo chmod 666 /dev/ttyACM0
# Or add user to dialout group
sudo usermod -a -G dialout $USER
```
## Common Commands
```bash
# Start services
docker-compose up -d
# View logs
docker-compose logs -f
# Stop services
docker-compose down
# Stop and remove data
docker-compose down -v
# Update images
docker-compose pull && docker-compose up -d
docker compose pull
docker compose up -d
```
## Troubleshooting
**Device access issues:**
- **Serial device permissions (Linux/macOS):** grant access with `sudo chmod 666
/dev/ttyACM0` or add your user to the `dialout` group.
- **Port already in use:** identify the conflicting service with `sudo lsof -i
:41447`.
- **Viewing logs:** `docker compose logs -f` tails output from both services.
```bash
# Check device exists and permissions
ls -la /dev/ttyACM0
# Fix permissions
sudo chmod 666 /dev/ttyACM0
```
**Port conflicts:**
```bash
# Find what's using port 41447
sudo lsof -i :41447
```
**Container issues:**
```bash
# Check logs
docker-compose logs
# Restart services
docker-compose restart
```
For more Docker help, see [Docker Compose documentation](https://docs.docker.com/compose/).
For general Docker support, consult the [Docker Compose documentation](https://docs.docker.com/compose/).

83
Dockerfile Normal file
View File

@@ -0,0 +1,83 @@
# NOTE: This Dockerfile is kept for backward compatibility. The canonical build
# instructions live in `web/Dockerfile`; keep the two files in sync.
# Main application builder stage
FROM ruby:3.3-alpine AS builder
# Ensure native extensions are built against musl libc rather than
# using glibc precompiled binaries (which fail on Alpine).
ENV BUNDLE_FORCE_RUBY_PLATFORM=true
# Install build dependencies and SQLite3
RUN apk add --no-cache \
build-base \
sqlite-dev \
linux-headers \
pkgconfig
# Set working directory
WORKDIR /app
# Copy Gemfile and install dependencies
COPY web/Gemfile web/Gemfile.lock* ./
# Install gems with SQLite3 support
RUN bundle config set --local force_ruby_platform true && \
bundle config set --local without 'development test' && \
bundle install --jobs=4 --retry=3
# Production stage
FROM ruby:3.3-alpine AS production
# Install runtime dependencies
RUN apk add --no-cache \
sqlite \
tzdata \
curl
# Create non-root user
RUN addgroup -g 1000 -S potatomesh && \
adduser -u 1000 -S potatomesh -G potatomesh
# Set working directory
WORKDIR /app
# Copy installed gems from builder stage
COPY --from=builder /usr/local/bundle /usr/local/bundle
# Copy application code (exclude Dockerfile from web directory)
COPY --chown=potatomesh:potatomesh web/app.rb web/app.sh web/Gemfile web/Gemfile.lock* web/spec/ ./
COPY --chown=potatomesh:potatomesh web/public ./public
COPY --chown=potatomesh:potatomesh web/views/ ./views/
# Copy SQL schema files from data directory
COPY --chown=potatomesh:potatomesh data/*.sql /data/
# Create data directory for SQLite database
RUN mkdir -p /app/data && \
chown -R potatomesh:potatomesh /app/data
# Switch to non-root user
USER potatomesh
# Expose port
EXPOSE 41447
# Default environment variables (can be overridden by host)
ENV APP_ENV=production \
MESH_DB=/app/data/mesh.db \
DB_BUSY_TIMEOUT_MS=5000 \
DB_BUSY_MAX_RETRIES=5 \
DB_BUSY_RETRY_DELAY=0.05 \
MAX_JSON_BODY_BYTES=1048576 \
SITE_NAME="Berlin Mesh Network" \
DEFAULT_CHANNEL="#MediumFast" \
DEFAULT_FREQUENCY="868MHz" \
MAP_CENTER_LAT=52.502889 \
MAP_CENTER_LON=13.404194 \
MAX_NODE_DISTANCE_KM=50 \
MATRIX_ROOM="" \
DEBUG=0
# Start the application
CMD ["ruby", "app.rb", "-p", "41447", "-o", "0.0.0.0"]

View File

@@ -8,7 +8,7 @@
A simple Meshtastic-powered node dashboard for your local community. _No MQTT clutter, just local LoRa aether._
* Web app with chat window and map view showing nodes and messages.
* Web app with chat window and map view showing nodes, neighbors, telemetry, and messages.
* API to POST (authenticated) and to GET nodes and messages.
* Supplemental Python ingestor to feed the POST APIs of the Web app with data remotely.
* Shows new node notifications (first seen) in chat.
@@ -16,25 +16,7 @@ A simple Meshtastic-powered node dashboard for your local community. _No MQTT cl
Live demo for Berlin #MediumFast: [potatomesh.net](https://potatomesh.net)
![screenshot of the third version](./scrot-0.3.png)
## Quick Start with Docker
```bash
./configure.sh # Configure your setup
docker-compose up -d # Start services
docker-compose logs -f # View logs
```
PotatoMesh uses host networking by default so it can run on restricted
systems where Docker cannot create bridged interfaces. The web UI listens on
`http://127.0.0.1:41447` immediately without explicit port mappings. If you
are using Docker Desktop (macOS/Windows) or otherwise require bridged
networking, enable the Compose profile with:
```bash
COMPOSE_PROFILES=bridge docker-compose up -d
```
![screenshot of the fourth version](./scrot-0.4.png)
## Web App
@@ -64,6 +46,27 @@ Puma starting in single mode...
Check [127.0.0.1:41447](http://127.0.0.1:41447/) for the development preview
of the node map. Set `API_TOKEN` required for authorizations on the API's POST endpoints.
### Production
When promoting the app to production, run the server with the minimum required
configuration to ensure secure access and proper routing:
```bash
RACK_ENV="production" \
APP_ENV="production" \
API_TOKEN="SuperSecureTokenReally" \
INSTANCE_DOMAIN="https://potatomesh.net" \
exec ruby app.rb -p 41447 -o 0.0.0.0
```
* `RACK_ENV` and `APP_ENV` must be set to `production` to enable optimized
settings suited for live deployments.
* Bind the server to a production port and all interfaces (`-p 41447 -o 0.0.0.0`)
so that clients can reach the dashboard over the network.
* Provide a strong `API_TOKEN` value to authorize POST requests against the API.
* Configure `INSTANCE_DOMAIN` with the public URL of your deployment so vanity
links and generated metadata resolve correctly.
The web app can be configured with environment variables (defaults shown):
* `SITE_NAME` - title and header shown in the ui (default: "Meshtastic Berlin")
@@ -72,6 +75,8 @@ The web app can be configured with environment variables (defaults shown):
* `MAP_CENTER_LAT` / `MAP_CENTER_LON` - default map center coordinates (default: `52.502889` / `13.404194`)
* `MAX_NODE_DISTANCE_KM` - hide nodes farther than this distance from the center (default: `137`)
* `MATRIX_ROOM` - matrix room id for a footer link (default: `#meshtastic-berlin:matrix.org`)
* `PRIVATE` - set to `1` to hide the chat UI, disable message APIs, and exclude hidden clients (default: unset)
* `PROM_REPORT_IDS` - comma-separated list of Node IDs to report in prometheus metrics, `*` for all (default: unset)
The application derives SEO-friendly document titles, descriptions, and social
preview tags from these existing configuration values and reuses the bundled
@@ -89,10 +94,15 @@ The web app contains an API:
* GET `/api/nodes?limit=100` - returns the latest 100 nodes reported to the app
* GET `/api/positions?limit=100` - returns the latest 100 position data
* GET `/api/messages?limit=100` - returns the latest 100 messages
* GET `/api/messages?limit=100` - returns the latest 100 messages (disabled when `PRIVATE=1`)
* GET `/api/telemetry?limit=100` - returns the latest 100 telemetry data
* GET `/api/neighbors?limit=100` - returns the latest 100 neighbor tuples
* GET `/metrics`- prometheus endpoint
* POST `/api/nodes` - upserts nodes provided as JSON object mapping node ids to node data (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/messages` - appends positions provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/messages` - appends messages provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/positions` - appends positions provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/messages` - appends messages provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`; disabled when `PRIVATE=1`)
* POST `/api/telemetry` - appends telemetry provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/neighbors` - appends neighbor tuples provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
The `API_TOKEN` environment variable must be set to a non-empty value and match the token supplied in the `Authorization` header for `POST` requests.
@@ -106,7 +116,7 @@ by ID and there will be no duplication.
For convenience, the directory `./data` contains a Python ingestor. It connects to a
Meshtastic node via serial port or to a remote device that exposes the Meshtastic TCP
interface to gather nodes and messages seen by the node.
or Bluetooth (BLE) interfaces to gather nodes and messages seen by the node.
```bash
pacman -S python
@@ -135,12 +145,30 @@ Run the script with `POTATOMESH_INSTANCE` and `API_TOKEN` to keep updating
node records and parsing new incoming messages. Enable debug output with `DEBUG=1`,
specify the serial port with `MESH_SERIAL` (default `/dev/ttyACM0`) or set it to an IP
address (for example `192.168.1.20:4403`) to use the Meshtastic TCP interface.
`MESH_SERIAL` also accepts Bluetooth device addresses (e.g., `ED:4D:9E:95:CF:60`)
and attempts an BLE connection if available.
## Demos
* <https://potatomesh.net/>
* <https://vrs.kdd2105.ru/>
* <https://potatomesh.stratospire.com/>
* <https://es1tem.uk/>
## Docker
Docker images are published on Github for each release:
```bash
docker pull ghcr.io/l5yth/potato-mesh/web:latest
docker pull ghcr.io/l5yth/potato-mesh/ingestor:latest
```
See the [Docker guide](DOCKER.md) for more details and custome deployment instructions.
## License
Apache v2.0, Contact <COM0@l5y.tech>
Join our Matrix to discuss the dashboard or ask for technical support:
[#potatomesh:dod.ngo](https://matrix.to/#/#potatomesh:dod.ngo)

View File

@@ -1,4 +1,17 @@
#!/bin/bash
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# PotatoMesh Configuration Script
# This script helps you configure your PotatoMesh instance with your local settings
@@ -62,6 +75,7 @@ MAP_CENTER_LON=$(grep "^MAP_CENTER_LON=" .env 2>/dev/null | cut -d'=' -f2- | tr
MAX_NODE_DISTANCE_KM=$(grep "^MAX_NODE_DISTANCE_KM=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "50")
MATRIX_ROOM=$(grep "^MATRIX_ROOM=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "")
API_TOKEN=$(grep "^API_TOKEN=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "")
POTATOMESH_IMAGE_ARCH=$(grep "^POTATOMESH_IMAGE_ARCH=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "linux-amd64")
echo "📍 Location Settings"
echo "-------------------"
@@ -81,6 +95,12 @@ echo "💬 Optional Settings"
echo "-------------------"
read_with_default "Matrix Room (optional, e.g., #meshtastic-berlin:matrix.org)" "$MATRIX_ROOM" MATRIX_ROOM
echo ""
echo "🛠 Docker Settings"
echo "------------------"
echo "Specify the Docker image architecture for your host (linux-amd64, linux-arm64, linux-armv7)."
read_with_default "Docker image architecture" "$POTATOMESH_IMAGE_ARCH" POTATOMESH_IMAGE_ARCH
echo ""
echo "🔐 Security Settings"
echo "-------------------"
@@ -124,6 +144,7 @@ update_env "MAP_CENTER_LON" "$MAP_CENTER_LON"
update_env "MAX_NODE_DISTANCE_KM" "$MAX_NODE_DISTANCE_KM"
update_env "MATRIX_ROOM" "\"$MATRIX_ROOM\""
update_env "API_TOKEN" "$API_TOKEN"
update_env "POTATOMESH_IMAGE_ARCH" "$POTATOMESH_IMAGE_ARCH"
# Add other common settings if they don't exist
if ! grep -q "^MESH_SERIAL=" .env; then
@@ -148,6 +169,7 @@ echo " Channel: $DEFAULT_CHANNEL"
echo " Frequency: $DEFAULT_FREQUENCY"
echo " Matrix Room: ${MATRIX_ROOM:-'Not set'}"
echo " API Token: ${API_TOKEN:0:8}..."
echo " Docker Image Arch: $POTATOMESH_IMAGE_ARCH"
echo ""
echo "🚀 You can now start PotatoMesh with:"
echo " docker-compose up -d"

32
data/instances.sql Normal file
View File

@@ -0,0 +1,32 @@
-- Copyright (C) 2025 l5yth
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
PRAGMA journal_mode=WAL;
CREATE TABLE IF NOT EXISTS instances (
id TEXT PRIMARY KEY,
domain TEXT NOT NULL,
pubkey TEXT NOT NULL,
name TEXT,
version TEXT,
channel TEXT,
frequency TEXT,
latitude REAL,
longitude REAL,
last_update_time INTEGER,
is_private BOOLEAN NOT NULL DEFAULT 0,
signature TEXT
);
CREATE UNIQUE INDEX IF NOT EXISTS idx_instances_domain ON instances(domain);

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,124 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""High-level API for the potato-mesh ingestor."""
from __future__ import annotations
import signal as signal # re-exported for compatibility
import threading as threading # re-exported for compatibility
import sys
import types
from . import config, daemon, handlers, interfaces, queue, serialization
__all__: list[str] = []
def _reexport(module) -> None:
names = getattr(module, "__all__", [])
for name in names:
globals()[name] = getattr(module, name)
__all__.extend(names)
def _export_constants() -> None:
globals()["json"] = queue.json
globals()["urllib"] = queue.urllib
globals()["glob"] = interfaces.glob
__all__.extend(["json", "urllib", "glob", "threading", "signal"])
for _module in (daemon, handlers, interfaces, queue, serialization):
_reexport(_module)
_export_constants()
_CONFIG_ATTRS = {
"PORT",
"SNAPSHOT_SECS",
"CHANNEL_INDEX",
"DEBUG",
"INSTANCE",
"API_TOKEN",
"_RECONNECT_INITIAL_DELAY_SECS",
"_RECONNECT_MAX_DELAY_SECS",
"_CLOSE_TIMEOUT_SECS",
"_debug_log",
}
_INTERFACE_ATTRS = {"BLEInterface", "SerialInterface", "TCPInterface"}
_QUEUE_ATTRS = set(queue.__all__)
_HANDLER_ATTRS = set(handlers.__all__)
_DAEMON_ATTRS = set(daemon.__all__)
_SERIALIZATION_ATTRS = set(serialization.__all__)
_INTERFACE_EXPORTS = set(interfaces.__all__)
__all__.extend(sorted(_CONFIG_ATTRS))
__all__.extend(sorted(_INTERFACE_ATTRS))
class _MeshIngestorModule(types.ModuleType):
"""Module proxy that forwards config and interface state."""
def __getattr__(self, name: str): # type: ignore[override]
"""Resolve attributes by delegating to the underlying submodules."""
if name in _CONFIG_ATTRS:
return getattr(config, name)
if name in _INTERFACE_ATTRS:
return getattr(interfaces, name)
if name in _INTERFACE_EXPORTS:
return getattr(interfaces, name)
raise AttributeError(name)
def __setattr__(self, name: str, value): # type: ignore[override]
"""Propagate assignments to the appropriate submodule."""
if name in _CONFIG_ATTRS:
setattr(config, name, value)
super().__setattr__(name, value)
return
if name in _INTERFACE_ATTRS:
setattr(interfaces, name, value)
super().__setattr__(name, value)
return
handled = False
if name in _INTERFACE_EXPORTS:
setattr(interfaces, name, value)
super().__setattr__(name, getattr(interfaces, name, value))
handled = True
if name in _QUEUE_ATTRS:
setattr(queue, name, value)
super().__setattr__(name, getattr(queue, name, value))
handled = True
if name in _HANDLER_ATTRS:
setattr(handlers, name, value)
super().__setattr__(name, getattr(handlers, name, value))
handled = True
if name in _DAEMON_ATTRS:
setattr(daemon, name, value)
super().__setattr__(name, getattr(daemon, name, value))
handled = True
if name in _SERIALIZATION_ATTRS:
setattr(serialization, name, value)
super().__setattr__(name, getattr(serialization, name, value))
handled = True
if handled:
return
super().__setattr__(name, value)
sys.modules[__name__].__class__ = _MeshIngestorModule

View File

@@ -0,0 +1,71 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configuration helpers for the potato-mesh ingestor."""
from __future__ import annotations
import os
import time
PORT = os.environ.get("MESH_SERIAL")
SNAPSHOT_SECS = int(os.environ.get("MESH_SNAPSHOT_SECS", "60"))
CHANNEL_INDEX = int(os.environ.get("MESH_CHANNEL_INDEX", "0"))
DEBUG = os.environ.get("DEBUG") == "1"
INSTANCE = os.environ.get("POTATOMESH_INSTANCE", "").rstrip("/")
API_TOKEN = os.environ.get("API_TOKEN", "")
ENERGY_SAVING = os.environ.get("ENERGY_SAVING") == "1"
_RECONNECT_INITIAL_DELAY_SECS = float(os.environ.get("MESH_RECONNECT_INITIAL", "5"))
_RECONNECT_MAX_DELAY_SECS = float(os.environ.get("MESH_RECONNECT_MAX", "60"))
_CLOSE_TIMEOUT_SECS = float(os.environ.get("MESH_CLOSE_TIMEOUT", "5"))
_INACTIVITY_RECONNECT_SECS = float(
os.environ.get("MESH_INACTIVITY_RECONNECT_SECS", str(60 * 60))
)
_ENERGY_ONLINE_DURATION_SECS = float(
os.environ.get("ENERGY_ONLINE_DURATION_SECS", "300")
)
_ENERGY_SLEEP_SECS = float(os.environ.get("ENERGY_SLEEP_SECS", str(6 * 60 * 60)))
def _debug_log(message: str) -> None:
"""Print ``message`` with a UTC timestamp when ``DEBUG`` is enabled.
Parameters:
message: Text to display when debug logging is active.
"""
if not DEBUG:
return
timestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
print(f"[{timestamp}] [debug] {message}")
__all__ = [
"PORT",
"SNAPSHOT_SECS",
"CHANNEL_INDEX",
"DEBUG",
"INSTANCE",
"API_TOKEN",
"ENERGY_SAVING",
"_RECONNECT_INITIAL_DELAY_SECS",
"_RECONNECT_MAX_DELAY_SECS",
"_CLOSE_TIMEOUT_SECS",
"_INACTIVITY_RECONNECT_SECS",
"_ENERGY_ONLINE_DURATION_SECS",
"_ENERGY_SLEEP_SECS",
"_debug_log",
]

View File

@@ -0,0 +1,409 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runtime entry point for the mesh ingestor."""
from __future__ import annotations
import inspect
import signal
import threading
import time
from pubsub import pub
from . import config, handlers, interfaces
_RECEIVE_TOPICS = (
"meshtastic.receive",
"meshtastic.receive.text",
"meshtastic.receive.position",
"meshtastic.receive.user",
"meshtastic.receive.POSITION_APP",
"meshtastic.receive.NODEINFO_APP",
"meshtastic.receive.NEIGHBORINFO_APP",
"meshtastic.receive.TEXT_MESSAGE_APP",
"meshtastic.receive.TELEMETRY_APP",
)
def _event_wait_allows_default_timeout() -> bool:
"""Return ``True`` when :meth:`threading.Event.wait` accepts ``timeout``.
The behaviour changed between Python versions; this helper shields the
daemon from ``TypeError`` when the default timeout parameter is absent.
"""
try:
wait_signature = inspect.signature(threading.Event.wait)
except (TypeError, ValueError): # pragma: no cover
return True
parameters = list(wait_signature.parameters.values())
if len(parameters) <= 1:
return True
timeout_parameter = parameters[1]
if timeout_parameter.kind in (
inspect.Parameter.VAR_POSITIONAL,
inspect.Parameter.VAR_KEYWORD,
):
return True
return timeout_parameter.default is not inspect._empty
def _subscribe_receive_topics() -> list[str]:
"""Subscribe the packet handler to all receive-related pubsub topics."""
subscribed = []
for topic in _RECEIVE_TOPICS:
try:
pub.subscribe(handlers.on_receive, topic)
subscribed.append(topic)
except Exception as exc: # pragma: no cover
config._debug_log(f"failed to subscribe to {topic!r}: {exc}")
return subscribed
def _node_items_snapshot(
nodes_obj, retries: int = 3
) -> list[tuple[str, object]] | None:
"""Snapshot ``nodes_obj`` to avoid iteration errors during updates.
Parameters:
nodes_obj: Meshtastic nodes mapping or iterable.
retries: Number of attempts when encountering "dictionary changed"
runtime errors.
Returns:
A list of ``(node_id, node)`` tuples, ``None`` when retries are
exhausted, or an empty list when no nodes exist.
"""
if not nodes_obj:
return []
items_callable = getattr(nodes_obj, "items", None)
if callable(items_callable):
for _ in range(max(1, retries)):
try:
return list(items_callable())
except RuntimeError as err:
if "dictionary changed size during iteration" not in str(err):
raise
time.sleep(0)
return None
if hasattr(nodes_obj, "__iter__") and hasattr(nodes_obj, "__getitem__"):
for _ in range(max(1, retries)):
try:
keys = list(nodes_obj)
return [(key, nodes_obj[key]) for key in keys]
except RuntimeError as err:
if "dictionary changed size during iteration" not in str(err):
raise
time.sleep(0)
return None
return []
def _close_interface(iface_obj) -> None:
"""Close ``iface_obj`` while respecting configured timeouts."""
if iface_obj is None:
return
def _do_close() -> None:
try:
iface_obj.close()
except Exception as exc: # pragma: no cover
if config.DEBUG:
config._debug_log(f"error while closing mesh interface: {exc}")
if config._CLOSE_TIMEOUT_SECS <= 0 or not _event_wait_allows_default_timeout():
_do_close()
return
close_thread = threading.Thread(target=_do_close, name="mesh-close", daemon=True)
close_thread.start()
close_thread.join(config._CLOSE_TIMEOUT_SECS)
if close_thread.is_alive():
print(
"[warn] mesh interface did not close within "
f"{config._CLOSE_TIMEOUT_SECS:g}s; continuing shutdown"
)
def _is_ble_interface(iface_obj) -> bool:
"""Return ``True`` when ``iface_obj`` appears to be a BLE interface."""
if iface_obj is None:
return False
iface_cls = getattr(iface_obj, "__class__", None)
if iface_cls is None:
return False
module_name = getattr(iface_cls, "__module__", "") or ""
return "ble_interface" in module_name
def main() -> None:
"""Run the mesh ingestion daemon until interrupted."""
subscribed = _subscribe_receive_topics()
if config.DEBUG and subscribed:
config._debug_log(f"subscribed to receive topics: {', '.join(subscribed)}")
iface = None
resolved_target = None
retry_delay = max(0.0, config._RECONNECT_INITIAL_DELAY_SECS)
stop = threading.Event()
initial_snapshot_sent = False
energy_session_deadline = None
iface_connected_at: float | None = None
last_seen_packet_monotonic = handlers.last_packet_monotonic()
last_inactivity_reconnect: float | None = None
inactivity_reconnect_secs = max(
0.0, getattr(config, "_INACTIVITY_RECONNECT_SECS", 0.0)
)
energy_saving_enabled = config.ENERGY_SAVING
energy_online_secs = max(0.0, config._ENERGY_ONLINE_DURATION_SECS)
energy_sleep_secs = max(0.0, config._ENERGY_SLEEP_SECS)
def _energy_sleep(reason: str) -> None:
if not energy_saving_enabled or energy_sleep_secs <= 0:
return
if config.DEBUG:
config._debug_log(
f"energy saving: {reason}; sleeping for {energy_sleep_secs:g}s"
)
stop.wait(energy_sleep_secs)
def handle_sigterm(*_args) -> None:
stop.set()
def handle_sigint(signum, frame) -> None:
if stop.is_set():
signal.default_int_handler(signum, frame)
return
stop.set()
signal.signal(signal.SIGINT, handle_sigint)
signal.signal(signal.SIGTERM, handle_sigterm)
target = config.INSTANCE or "(no POTATOMESH_INSTANCE)"
configured_port = config.PORT
active_candidate = configured_port
announced_target = False
print(
f"Mesh daemon: nodes+messages → {target} | port={configured_port or 'auto'} | channel={config.CHANNEL_INDEX}"
)
try:
while not stop.is_set():
if iface is None:
try:
if active_candidate:
iface, resolved_target = interfaces._create_serial_interface(
active_candidate
)
else:
iface, resolved_target = interfaces._create_default_interface()
active_candidate = resolved_target
retry_delay = max(0.0, config._RECONNECT_INITIAL_DELAY_SECS)
initial_snapshot_sent = False
if not announced_target and resolved_target:
print(f"[info] using mesh interface: {resolved_target}")
announced_target = True
if energy_saving_enabled and energy_online_secs > 0:
energy_session_deadline = time.monotonic() + energy_online_secs
else:
energy_session_deadline = None
iface_connected_at = time.monotonic()
# Seed the inactivity tracking from the connection time so a
# reconnect is given a full inactivity window even when the
# handler still reports the previous packet timestamp.
last_seen_packet_monotonic = iface_connected_at
last_inactivity_reconnect = None
except interfaces.NoAvailableMeshInterface as exc:
print(f"[error] {exc}")
_close_interface(iface)
raise SystemExit(1) from exc
except Exception as exc:
candidate_desc = active_candidate or "auto"
print(
f"[warn] failed to create mesh interface ({candidate_desc}): {exc}"
)
if configured_port is None:
active_candidate = None
announced_target = False
stop.wait(retry_delay)
if config._RECONNECT_MAX_DELAY_SECS > 0:
retry_delay = min(
(
retry_delay * 2
if retry_delay
else config._RECONNECT_INITIAL_DELAY_SECS
),
config._RECONNECT_MAX_DELAY_SECS,
)
continue
if energy_saving_enabled and iface is not None:
if (
energy_session_deadline is not None
and time.monotonic() >= energy_session_deadline
):
print("[info] energy saving: disconnecting mesh interface")
_close_interface(iface)
iface = None
announced_target = False
initial_snapshot_sent = False
energy_session_deadline = None
_energy_sleep("disconnected after session")
continue
if (
_is_ble_interface(iface)
and getattr(iface, "client", object()) is None
):
print(
"[info] energy saving: BLE client disconnected; sleeping before retry"
)
_close_interface(iface)
iface = None
announced_target = False
initial_snapshot_sent = False
energy_session_deadline = None
_energy_sleep("BLE client disconnected")
continue
if not initial_snapshot_sent:
try:
nodes = getattr(iface, "nodes", {}) or {}
node_items = _node_items_snapshot(nodes)
if node_items is None:
config._debug_log(
"skipping node snapshot; nodes changed during iteration"
)
else:
processed_snapshot_item = False
for node_id, node in node_items:
processed_snapshot_item = True
try:
handlers.upsert_node(node_id, node)
except Exception as exc:
print(
f"[warn] failed to update node snapshot for {node_id}: {exc}"
)
if config.DEBUG:
config._debug_log(f"node object: {node!r}")
if processed_snapshot_item:
initial_snapshot_sent = True
except Exception as exc:
print(f"[warn] failed to update node snapshot: {exc}")
_close_interface(iface)
iface = None
stop.wait(retry_delay)
if config._RECONNECT_MAX_DELAY_SECS > 0:
retry_delay = min(
(
retry_delay * 2
if retry_delay
else config._RECONNECT_INITIAL_DELAY_SECS
),
config._RECONNECT_MAX_DELAY_SECS,
)
continue
if iface is not None and inactivity_reconnect_secs > 0:
now_monotonic = time.monotonic()
iface_activity = handlers.last_packet_monotonic()
if (
iface_activity is not None
and iface_connected_at is not None
and iface_activity < iface_connected_at
):
iface_activity = iface_connected_at
if iface_activity is not None and (
last_seen_packet_monotonic is None
or iface_activity > last_seen_packet_monotonic
):
last_seen_packet_monotonic = iface_activity
last_inactivity_reconnect = None
latest_activity = iface_activity
if latest_activity is None and iface_connected_at is not None:
latest_activity = iface_connected_at
if latest_activity is None:
latest_activity = now_monotonic
inactivity_elapsed = now_monotonic - latest_activity
connected_attr = getattr(iface, "isConnected", None)
believed_disconnected = False
if callable(connected_attr):
try:
believed_disconnected = not bool(connected_attr())
except Exception:
believed_disconnected = False
elif connected_attr is not None:
believed_disconnected = not bool(connected_attr)
should_reconnect = believed_disconnected or (
inactivity_elapsed >= inactivity_reconnect_secs
)
if should_reconnect:
if (
last_inactivity_reconnect is None
or now_monotonic - last_inactivity_reconnect
>= inactivity_reconnect_secs
):
reason = (
"disconnected"
if believed_disconnected
else f"no data for {inactivity_elapsed:.0f}s"
)
print(
"[warn] mesh interface inactivity detected "
f"({reason}); reconnecting"
)
last_inactivity_reconnect = now_monotonic
_close_interface(iface)
iface = None
announced_target = False
initial_snapshot_sent = False
energy_session_deadline = None
iface_connected_at = None
continue
retry_delay = max(0.0, config._RECONNECT_INITIAL_DELAY_SECS)
stop.wait(config.SNAPSHOT_SECS)
except KeyboardInterrupt: # pragma: no cover - interactive only
config._debug_log("received KeyboardInterrupt; shutting down")
stop.set()
finally:
_close_interface(iface)
__all__ = [
"_RECEIVE_TOPICS",
"_event_wait_allows_default_timeout",
"_node_items_snapshot",
"_subscribe_receive_topics",
"_is_ble_interface",
"main",
]

View File

@@ -0,0 +1,877 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Packet handlers that serialise data and push it to the HTTP queue."""
from __future__ import annotations
import base64
import json
import time
from collections.abc import Mapping
from . import config, queue
from .serialization import (
_canonical_node_id,
_coerce_float,
_coerce_int,
_decode_nodeinfo_payload,
_extract_payload_bytes,
_first,
_get,
_iso,
_merge_mappings,
_node_num_from_id,
_node_to_dict,
_nodeinfo_metrics_dict,
_nodeinfo_position_dict,
_nodeinfo_user_dict,
_pkt_to_dict,
upsert_payload,
)
def upsert_node(node_id, node) -> None:
"""Schedule an upsert for a single node.
Parameters:
node_id: Canonical identifier for the node in the ``!xxxxxxxx`` format.
node: Node object or mapping to serialise for the API payload.
Returns:
``None``. The payload is forwarded to the shared HTTP queue.
"""
payload = upsert_payload(node_id, node)
_queue_post_json("/api/nodes", payload, priority=queue._NODE_POST_PRIORITY)
if config.DEBUG:
user = _get(payload[node_id], "user") or {}
short = _get(user, "shortName")
long = _get(user, "longName")
config._debug_log(
f"upserted node {node_id} shortName={short!r} longName={long!r}"
)
def store_position_packet(packet: Mapping, decoded: Mapping) -> None:
"""Persist a decoded position packet.
Parameters:
packet: Raw packet metadata emitted by Meshtastic.
decoded: Decoded payload extracted from ``packet['decoded']``.
Returns:
``None``. The formatted position data is queued for HTTP submission.
"""
node_ref = _first(packet, "fromId", "from_id", "from", default=None)
if node_ref is None:
node_ref = _first(decoded, "num", default=None)
node_id = _canonical_node_id(node_ref)
if node_id is None:
return
node_num = _coerce_int(_first(decoded, "num", default=None))
if node_num is None:
node_num = _node_num_from_id(node_id)
pkt_id = _coerce_int(_first(packet, "id", "packet_id", "packetId", default=None))
if pkt_id is None:
return
rx_time = _coerce_int(_first(packet, "rxTime", "rx_time", default=time.time()))
if rx_time is None:
rx_time = int(time.time())
to_id = _first(packet, "toId", "to_id", "to", default=None)
to_id = to_id if to_id not in {"", None} else None
position_section = decoded.get("position") if isinstance(decoded, Mapping) else None
if not isinstance(position_section, Mapping):
position_section = {}
latitude = _coerce_float(
_first(position_section, "latitude", "raw.latitude", default=None)
)
if latitude is None:
lat_i = _coerce_int(
_first(
position_section,
"latitudeI",
"latitude_i",
"raw.latitude_i",
default=None,
)
)
if lat_i is not None:
latitude = lat_i / 1e7
longitude = _coerce_float(
_first(position_section, "longitude", "raw.longitude", default=None)
)
if longitude is None:
lon_i = _coerce_int(
_first(
position_section,
"longitudeI",
"longitude_i",
"raw.longitude_i",
default=None,
)
)
if lon_i is not None:
longitude = lon_i / 1e7
altitude = _coerce_float(
_first(position_section, "altitude", "raw.altitude", default=None)
)
position_time = _coerce_int(
_first(position_section, "time", "raw.time", default=None)
)
location_source = _first(
position_section,
"locationSource",
"location_source",
"raw.location_source",
default=None,
)
location_source = (
str(location_source).strip() if location_source not in {None, ""} else None
)
precision_bits = _coerce_int(
_first(
position_section,
"precisionBits",
"precision_bits",
"raw.precision_bits",
default=None,
)
)
sats_in_view = _coerce_int(
_first(
position_section,
"satsInView",
"sats_in_view",
"raw.sats_in_view",
default=None,
)
)
pdop = _coerce_float(
_first(position_section, "PDOP", "pdop", "raw.PDOP", "raw.pdop", default=None)
)
ground_speed = _coerce_float(
_first(
position_section,
"groundSpeed",
"ground_speed",
"raw.ground_speed",
default=None,
)
)
ground_track = _coerce_float(
_first(
position_section,
"groundTrack",
"ground_track",
"raw.ground_track",
default=None,
)
)
snr = _coerce_float(_first(packet, "snr", "rx_snr", "rxSnr", default=None))
rssi = _coerce_int(_first(packet, "rssi", "rx_rssi", "rxRssi", default=None))
hop_limit = _coerce_int(_first(packet, "hopLimit", "hop_limit", default=None))
bitfield = _coerce_int(_first(decoded, "bitfield", default=None))
payload_bytes = _extract_payload_bytes(decoded)
payload_b64 = base64_payload(payload_bytes)
raw_section = decoded.get("raw") if isinstance(decoded, Mapping) else None
raw_payload = _node_to_dict(raw_section) if raw_section else None
if raw_payload is None and position_section:
raw_position = (
position_section.get("raw")
if isinstance(position_section, Mapping)
else None
)
if raw_position:
raw_payload = _node_to_dict(raw_position)
position_payload = {
"id": pkt_id,
"node_id": node_id or node_ref,
"node_num": node_num,
"num": node_num,
"from_id": node_id,
"to_id": to_id,
"rx_time": rx_time,
"rx_iso": _iso(rx_time),
"latitude": latitude,
"longitude": longitude,
"altitude": altitude,
"position_time": position_time,
"location_source": location_source,
"precision_bits": precision_bits,
"sats_in_view": sats_in_view,
"pdop": pdop,
"ground_speed": ground_speed,
"ground_track": ground_track,
"snr": snr,
"rssi": rssi,
"hop_limit": hop_limit,
"bitfield": bitfield,
"payload_b64": payload_b64,
}
if raw_payload:
position_payload["raw"] = raw_payload
_queue_post_json(
"/api/positions", position_payload, priority=queue._POSITION_POST_PRIORITY
)
if config.DEBUG:
config._debug_log(
f"stored position for {node_id} lat={latitude!r} lon={longitude!r}"
)
def base64_payload(payload_bytes: bytes | None) -> str | None:
"""Encode raw payload bytes for JSON transport.
Parameters:
payload_bytes: Optional payload to encode. ``None`` is returned when
the payload is empty or missing.
Returns:
The Base64 encoded payload string or ``None`` when no payload exists.
"""
if not payload_bytes:
return None
return base64.b64encode(payload_bytes).decode("ascii")
def store_telemetry_packet(packet: Mapping, decoded: Mapping) -> None:
"""Persist telemetry metrics extracted from a packet.
Parameters:
packet: Packet metadata received from the radio interface.
decoded: Meshtastic-decoded view containing telemetry structures.
Returns:
``None``. The telemetry payload is added to the HTTP queue.
"""
telemetry_section = (
decoded.get("telemetry") if isinstance(decoded, Mapping) else None
)
if not isinstance(telemetry_section, Mapping):
return
pkt_id = _coerce_int(_first(packet, "id", "packet_id", "packetId", default=None))
if pkt_id is None:
return
raw_from = _first(packet, "fromId", "from_id", "from", default=None)
node_id = _canonical_node_id(raw_from)
node_num = _coerce_int(_first(decoded, "num", "node_num", default=None))
if node_num is None:
node_num = _node_num_from_id(node_id or raw_from)
to_id = _first(packet, "toId", "to_id", "to", default=None)
raw_rx_time = _first(packet, "rxTime", "rx_time", default=time.time())
try:
rx_time = int(raw_rx_time)
except (TypeError, ValueError):
rx_time = int(time.time())
rx_iso = _iso(rx_time)
telemetry_time = _coerce_int(_first(telemetry_section, "time", default=None))
channel = _coerce_int(_first(decoded, "channel", default=None))
if channel is None:
channel = _coerce_int(_first(packet, "channel", default=None))
if channel is None:
channel = 0
portnum = _first(decoded, "portnum", default=None)
portnum = str(portnum) if portnum not in {None, ""} else None
bitfield = _coerce_int(_first(decoded, "bitfield", default=None))
snr = _coerce_float(_first(packet, "snr", "rx_snr", "rxSnr", default=None))
rssi = _coerce_int(_first(packet, "rssi", "rx_rssi", "rxRssi", default=None))
hop_limit = _coerce_int(_first(packet, "hopLimit", "hop_limit", default=None))
payload_bytes = _extract_payload_bytes(decoded)
payload_b64 = base64_payload(payload_bytes) or ""
battery_level = _coerce_float(
_first(
telemetry_section,
"batteryLevel",
"battery_level",
"deviceMetrics.batteryLevel",
"environmentMetrics.battery_level",
"deviceMetrics.battery_level",
default=None,
)
)
voltage = _coerce_float(
_first(
telemetry_section,
"voltage",
"environmentMetrics.voltage",
"deviceMetrics.voltage",
default=None,
)
)
channel_utilization = _coerce_float(
_first(
telemetry_section,
"channelUtilization",
"channel_utilization",
"deviceMetrics.channelUtilization",
"deviceMetrics.channel_utilization",
default=None,
)
)
air_util_tx = _coerce_float(
_first(
telemetry_section,
"airUtilTx",
"air_util_tx",
"deviceMetrics.airUtilTx",
"deviceMetrics.air_util_tx",
default=None,
)
)
uptime_seconds = _coerce_int(
_first(
telemetry_section,
"uptimeSeconds",
"uptime_seconds",
"deviceMetrics.uptimeSeconds",
"deviceMetrics.uptime_seconds",
default=None,
)
)
temperature = _coerce_float(
_first(
telemetry_section,
"temperature",
"environmentMetrics.temperature",
default=None,
)
)
relative_humidity = _coerce_float(
_first(
telemetry_section,
"relativeHumidity",
"relative_humidity",
"environmentMetrics.relativeHumidity",
"environmentMetrics.relative_humidity",
default=None,
)
)
barometric_pressure = _coerce_float(
_first(
telemetry_section,
"barometricPressure",
"barometric_pressure",
"environmentMetrics.barometricPressure",
"environmentMetrics.barometric_pressure",
default=None,
)
)
telemetry_payload = {
"id": pkt_id,
"node_id": node_id,
"node_num": node_num,
"from_id": node_id or raw_from,
"to_id": to_id,
"rx_time": rx_time,
"rx_iso": rx_iso,
"telemetry_time": telemetry_time,
"channel": channel,
"portnum": portnum,
"bitfield": bitfield,
"snr": snr,
"rssi": rssi,
"hop_limit": hop_limit,
"payload_b64": payload_b64,
}
if battery_level is not None:
telemetry_payload["battery_level"] = battery_level
if voltage is not None:
telemetry_payload["voltage"] = voltage
if channel_utilization is not None:
telemetry_payload["channel_utilization"] = channel_utilization
if air_util_tx is not None:
telemetry_payload["air_util_tx"] = air_util_tx
if uptime_seconds is not None:
telemetry_payload["uptime_seconds"] = uptime_seconds
if temperature is not None:
telemetry_payload["temperature"] = temperature
if relative_humidity is not None:
telemetry_payload["relative_humidity"] = relative_humidity
if barometric_pressure is not None:
telemetry_payload["barometric_pressure"] = barometric_pressure
_queue_post_json(
"/api/telemetry", telemetry_payload, priority=queue._TELEMETRY_POST_PRIORITY
)
if config.DEBUG:
config._debug_log(
f"stored telemetry for {node_id!r} battery={battery_level!r} voltage={voltage!r}"
)
def store_nodeinfo_packet(packet: Mapping, decoded: Mapping) -> None:
"""Persist node information updates.
Parameters:
packet: Raw packet metadata describing the update.
decoded: Decoded payload that may include ``user`` and ``position``
sections.
Returns:
``None``. The node payload is merged into the API queue.
"""
payload_bytes = _extract_payload_bytes(decoded)
node_info = _decode_nodeinfo_payload(payload_bytes)
decoded_user = decoded.get("user")
user_dict = _nodeinfo_user_dict(node_info, decoded_user)
node_info_fields = set()
if node_info:
node_info_fields = {field_desc.name for field_desc, _ in node_info.ListFields()}
node_id = None
if isinstance(user_dict, Mapping):
node_id = _canonical_node_id(user_dict.get("id"))
if node_id is None:
node_id = _canonical_node_id(
_first(packet, "fromId", "from_id", "from", default=None)
)
if node_id is None:
return
node_payload: dict = {}
if user_dict:
node_payload["user"] = user_dict
node_num = None
if node_info and "num" in node_info_fields:
try:
node_num = int(node_info.num)
except (TypeError, ValueError):
node_num = None
if node_num is None:
decoded_num = decoded.get("num")
if decoded_num is not None:
try:
node_num = int(decoded_num)
except (TypeError, ValueError):
try:
node_num = int(str(decoded_num).strip(), 0)
except Exception:
node_num = None
if node_num is None:
node_num = _node_num_from_id(node_id)
if node_num is not None:
node_payload["num"] = node_num
rx_time = int(_first(packet, "rxTime", "rx_time", default=time.time()))
last_heard = None
if node_info and "last_heard" in node_info_fields:
try:
last_heard = int(node_info.last_heard)
except (TypeError, ValueError):
last_heard = None
if last_heard is None:
decoded_last_heard = decoded.get("lastHeard")
if decoded_last_heard is not None:
try:
last_heard = int(decoded_last_heard)
except (TypeError, ValueError):
last_heard = None
if last_heard is None or last_heard < rx_time:
last_heard = rx_time
node_payload["lastHeard"] = last_heard
snr = None
if node_info and "snr" in node_info_fields:
try:
snr = float(node_info.snr)
except (TypeError, ValueError):
snr = None
if snr is None:
snr = _first(packet, "snr", "rx_snr", "rxSnr", default=None)
if snr is not None:
try:
snr = float(snr)
except (TypeError, ValueError):
snr = None
if snr is not None:
node_payload["snr"] = snr
hops = None
if node_info and "hops_away" in node_info_fields:
try:
hops = int(node_info.hops_away)
except (TypeError, ValueError):
hops = None
if hops is None:
hops = decoded.get("hopsAway")
if hops is not None:
try:
hops = int(hops)
except (TypeError, ValueError):
hops = None
if hops is not None:
node_payload["hopsAway"] = hops
if node_info and "channel" in node_info_fields:
try:
node_payload["channel"] = int(node_info.channel)
except (TypeError, ValueError):
pass
if node_info and "via_mqtt" in node_info_fields:
node_payload["viaMqtt"] = bool(node_info.via_mqtt)
if node_info and "is_favorite" in node_info_fields:
node_payload["isFavorite"] = bool(node_info.is_favorite)
elif "isFavorite" in decoded:
node_payload["isFavorite"] = bool(decoded.get("isFavorite"))
if node_info and "is_ignored" in node_info_fields:
node_payload["isIgnored"] = bool(node_info.is_ignored)
if node_info and "is_key_manually_verified" in node_info_fields:
node_payload["isKeyManuallyVerified"] = bool(node_info.is_key_manually_verified)
metrics = _nodeinfo_metrics_dict(node_info)
decoded_metrics = decoded.get("deviceMetrics")
if isinstance(decoded_metrics, Mapping):
metrics = _merge_mappings(metrics, _node_to_dict(decoded_metrics))
if metrics:
node_payload["deviceMetrics"] = metrics
position = _nodeinfo_position_dict(node_info)
decoded_position = decoded.get("position")
if isinstance(decoded_position, Mapping):
position = _merge_mappings(position, _node_to_dict(decoded_position))
if position:
node_payload["position"] = position
hop_limit = _first(packet, "hopLimit", "hop_limit", default=None)
if hop_limit is not None and "hopLimit" not in node_payload:
try:
node_payload["hopLimit"] = int(hop_limit)
except (TypeError, ValueError):
pass
_queue_post_json(
"/api/nodes", {node_id: node_payload}, priority=queue._NODE_POST_PRIORITY
)
if config.DEBUG:
short = None
long_name = None
if isinstance(user_dict, Mapping):
short = user_dict.get("shortName")
long_name = user_dict.get("longName")
config._debug_log(
f"stored nodeinfo for {node_id} shortName={short!r} longName={long_name!r}"
)
def store_neighborinfo_packet(packet: Mapping, decoded: Mapping) -> None:
"""Persist neighbour information gathered from a packet.
Parameters:
packet: Raw Meshtastic packet metadata.
decoded: Decoded view containing the neighbour information section.
Returns:
``None``. The neighbour snapshot is queued for submission.
"""
neighbor_section = (
decoded.get("neighborinfo") if isinstance(decoded, Mapping) else None
)
if not isinstance(neighbor_section, Mapping):
return
node_ref = _first(
neighbor_section,
"nodeId",
"node_id",
default=_first(packet, "fromId", "from_id", "from", default=None),
)
node_id = _canonical_node_id(node_ref)
if node_id is None:
return
node_num = _coerce_int(_first(neighbor_section, "nodeId", "node_id", default=None))
if node_num is None:
node_num = _node_num_from_id(node_id)
node_broadcast_interval = _coerce_int(
_first(
neighbor_section,
"nodeBroadcastIntervalSecs",
"node_broadcast_interval_secs",
default=None,
)
)
last_sent_by_ref = _first(
neighbor_section,
"lastSentById",
"last_sent_by_id",
default=None,
)
last_sent_by_id = _canonical_node_id(last_sent_by_ref)
rx_time = _coerce_int(_first(packet, "rxTime", "rx_time", default=time.time()))
if rx_time is None:
rx_time = int(time.time())
neighbors_payload = neighbor_section.get("neighbors")
neighbors_iterable = (
neighbors_payload if isinstance(neighbors_payload, list) else []
)
neighbor_entries: list[dict] = []
for entry in neighbors_iterable:
if not isinstance(entry, Mapping):
continue
neighbor_ref = _first(entry, "nodeId", "node_id", default=None)
neighbor_id = _canonical_node_id(neighbor_ref)
if neighbor_id is None:
continue
neighbor_num = _coerce_int(_first(entry, "nodeId", "node_id", default=None))
if neighbor_num is None:
neighbor_num = _node_num_from_id(neighbor_id)
snr = _coerce_float(_first(entry, "snr", default=None))
entry_rx_time = _coerce_int(_first(entry, "rxTime", "rx_time", default=None))
if entry_rx_time is None:
entry_rx_time = rx_time
neighbor_entries.append(
{
"neighbor_id": neighbor_id,
"neighbor_num": neighbor_num,
"snr": snr,
"rx_time": entry_rx_time,
"rx_iso": _iso(entry_rx_time),
}
)
payload = {
"node_id": node_id,
"node_num": node_num,
"neighbors": neighbor_entries,
"rx_time": rx_time,
"rx_iso": _iso(rx_time),
}
if node_broadcast_interval is not None:
payload["node_broadcast_interval_secs"] = node_broadcast_interval
if last_sent_by_id is not None:
payload["last_sent_by_id"] = last_sent_by_id
_queue_post_json("/api/neighbors", payload, priority=queue._NEIGHBOR_POST_PRIORITY)
if config.DEBUG:
config._debug_log(
f"stored neighborinfo for {node_id} neighbors={len(neighbor_entries)}"
)
def store_packet_dict(packet: Mapping) -> None:
"""Route a decoded packet to the appropriate storage handler.
Parameters:
packet: Packet dictionary emitted by the mesh interface.
Returns:
``None``. Side-effects depend on the specific handler invoked.
"""
decoded = packet.get("decoded") or {}
portnum_raw = _first(decoded, "portnum", default=None)
portnum = str(portnum_raw).upper() if portnum_raw is not None else None
portnum_int = _coerce_int(portnum_raw)
telemetry_section = (
decoded.get("telemetry") if isinstance(decoded, Mapping) else None
)
if (
portnum == "TELEMETRY_APP"
or portnum_int == 65
or isinstance(telemetry_section, Mapping)
):
store_telemetry_packet(packet, decoded)
return
if portnum in {"5", "NODEINFO_APP"}:
store_nodeinfo_packet(packet, decoded)
return
if portnum in {"4", "POSITION_APP"}:
store_position_packet(packet, decoded)
return
neighborinfo_section = (
decoded.get("neighborinfo") if isinstance(decoded, Mapping) else None
)
if portnum == "NEIGHBORINFO_APP" or isinstance(neighborinfo_section, Mapping):
store_neighborinfo_packet(packet, decoded)
return
text = _first(decoded, "payload.text", "text", default=None)
encrypted = _first(decoded, "payload.encrypted", "encrypted", default=None)
if encrypted is None:
encrypted = _first(packet, "encrypted", default=None)
if not text and not encrypted:
return
if portnum and portnum not in {"1", "TEXT_MESSAGE_APP"}:
return
channel = _first(decoded, "channel", default=None)
if channel is None:
channel = _first(packet, "channel", default=0)
try:
channel = int(channel)
except Exception:
channel = 0
pkt_id = _first(packet, "id", "packet_id", "packetId", default=None)
if pkt_id is None:
return
rx_time = int(_first(packet, "rxTime", "rx_time", default=time.time()))
from_id = _first(packet, "fromId", "from_id", "from", default=None)
to_id = _first(packet, "toId", "to_id", "to", default=None)
if (from_id is None or str(from_id) == "") and config.DEBUG:
try:
raw = json.dumps(packet, default=str)
except Exception:
raw = str(packet)
config._debug_log(f"packet missing from_id: {raw}")
snr = _first(packet, "snr", "rx_snr", "rxSnr", default=None)
rssi = _first(packet, "rssi", "rx_rssi", "rxRssi", default=None)
hop = _first(packet, "hopLimit", "hop_limit", default=None)
message_payload = {
"id": int(pkt_id),
"rx_time": rx_time,
"rx_iso": _iso(rx_time),
"from_id": from_id,
"to_id": to_id,
"channel": channel,
"portnum": str(portnum) if portnum is not None else None,
"text": text,
"encrypted": encrypted,
"snr": float(snr) if snr is not None else None,
"rssi": int(rssi) if rssi is not None else None,
"hop_limit": int(hop) if hop is not None else None,
}
_queue_post_json(
"/api/messages", message_payload, priority=queue._MESSAGE_POST_PRIORITY
)
if config.DEBUG:
from_label = _canonical_node_id(from_id) or from_id
to_label = _canonical_node_id(to_id) or to_id
payload_desc = "Encrypted" if text is None and encrypted else text
config._debug_log(
f"stored message from {from_label!r} to {to_label!r} ch={channel} text={payload_desc!r}"
)
_last_packet_monotonic: float | None = None
def last_packet_monotonic() -> float | None:
"""Return the monotonic timestamp of the most recent packet."""
return _last_packet_monotonic
def _mark_packet_seen() -> None:
"""Record that a packet has been processed."""
global _last_packet_monotonic
_last_packet_monotonic = time.monotonic()
def on_receive(packet, interface) -> None:
"""Callback registered with Meshtastic to capture incoming packets.
Parameters:
packet: Packet payload supplied by the Meshtastic pubsub topic.
interface: Interface instance that produced the packet. Only used for
compatibility with Meshtastic's callback signature.
Returns:
``None``. Packets are serialised and enqueued asynchronously.
"""
if isinstance(packet, dict):
if packet.get("_potatomesh_seen"):
return
packet["_potatomesh_seen"] = True
_mark_packet_seen()
packet_dict = None
try:
packet_dict = _pkt_to_dict(packet)
store_packet_dict(packet_dict)
except Exception as exc:
info = (
list(packet_dict.keys()) if isinstance(packet_dict, dict) else type(packet)
)
print(f"[warn] failed to store packet: {exc} | info: {info}")
__all__ = [
"_queue_post_json",
"last_packet_monotonic",
"on_receive",
"store_neighborinfo_packet",
"store_nodeinfo_packet",
"store_packet_dict",
"store_position_packet",
"store_telemetry_packet",
"upsert_node",
]
_queue_post_json = queue._queue_post_json

View File

@@ -0,0 +1,397 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mesh interface discovery helpers for interacting with Meshtastic hardware."""
from __future__ import annotations
import glob
import ipaddress
import re
import urllib.parse
from collections.abc import Mapping
from typing import TYPE_CHECKING
from meshtastic.serial_interface import SerialInterface
from meshtastic.tcp_interface import TCPInterface
from . import config, serialization
if TYPE_CHECKING: # pragma: no cover - import only used for type checking
from meshtastic.ble_interface import BLEInterface as _BLEInterface
BLEInterface = None
def _patch_meshtastic_nodeinfo_handler() -> None:
"""Ensure Meshtastic nodeinfo packets always include an ``id`` field."""
try:
import meshtastic # type: ignore
except Exception: # pragma: no cover - dependency optional in tests
return
original = getattr(meshtastic, "_onNodeInfoReceive", None)
if not callable(original):
return
if getattr(original, "_potato_mesh_safe_wrapper", False):
return
def _safe_on_node_info_receive(iface, packet): # type: ignore[override]
candidate_mapping: Mapping | None = None
if isinstance(packet, Mapping):
candidate_mapping = packet
elif hasattr(packet, "__dict__") and isinstance(packet.__dict__, Mapping):
candidate_mapping = packet.__dict__
node_id = None
if candidate_mapping is not None:
node_id = serialization._canonical_node_id(candidate_mapping.get("id"))
if node_id is None:
user_section = candidate_mapping.get("user")
if isinstance(user_section, Mapping):
node_id = serialization._canonical_node_id(user_section.get("id"))
if node_id is None:
for key in ("fromId", "from_id", "from", "num", "nodeId", "node_id"):
node_id = serialization._canonical_node_id(
candidate_mapping.get(key)
)
if node_id:
break
if node_id:
if not isinstance(candidate_mapping, dict):
try:
candidate_mapping = dict(candidate_mapping)
except Exception:
candidate_mapping = {
k: candidate_mapping[k] for k in candidate_mapping
}
if candidate_mapping.get("id") != node_id:
candidate_mapping["id"] = node_id
packet = candidate_mapping
try:
return original(iface, packet)
except KeyError as exc: # pragma: no cover - defensive only
if exc.args and exc.args[0] == "id":
return None
raise
_safe_on_node_info_receive._potato_mesh_safe_wrapper = True # type: ignore[attr-defined]
meshtastic._onNodeInfoReceive = _safe_on_node_info_receive
_patch_meshtastic_nodeinfo_handler()
def _patch_meshtastic_ble_receive_loop() -> None:
"""Prevent ``UnboundLocalError`` crashes in Meshtastic's BLE reader."""
try:
from meshtastic import ble_interface as _ble_interface_module # type: ignore
except Exception: # pragma: no cover - dependency optional in tests
return
ble_class = getattr(_ble_interface_module, "BLEInterface", None)
if ble_class is None:
return
original = getattr(ble_class, "_receiveFromRadioImpl", None)
if not callable(original):
return
if getattr(original, "_potato_mesh_safe_wrapper", False):
return
FROMRADIO_UUID = getattr(_ble_interface_module, "FROMRADIO_UUID", None)
BleakDBusError = getattr(_ble_interface_module, "BleakDBusError", ())
BleakError = getattr(_ble_interface_module, "BleakError", ())
logger = getattr(_ble_interface_module, "logger", None)
time = getattr(_ble_interface_module, "time", None)
if not FROMRADIO_UUID or logger is None or time is None:
return
def _safe_receive_from_radio(self): # type: ignore[override]
while self._want_receive:
if self.should_read:
self.should_read = False
retries: int = 0
while self._want_receive:
if self.client is None:
logger.debug("BLE client is None, shutting down")
self._want_receive = False
continue
payload: bytes = b""
try:
payload = bytes(self.client.read_gatt_char(FROMRADIO_UUID))
except BleakDBusError as exc:
logger.debug("Device disconnected, shutting down %s", exc)
self._want_receive = False
payload = b""
except BleakError as exc:
if "Not connected" in str(exc):
logger.debug("Device disconnected, shutting down %s", exc)
self._want_receive = False
payload = b""
else:
raise ble_class.BLEError("Error reading BLE") from exc
if not payload:
if not self._want_receive:
break
if retries < 5:
time.sleep(0.1)
retries += 1
continue
break
logger.debug("FROMRADIO read: %s", payload.hex())
self._handleFromRadio(payload)
else:
time.sleep(0.01)
_safe_receive_from_radio._potato_mesh_safe_wrapper = True # type: ignore[attr-defined]
ble_class._receiveFromRadioImpl = _safe_receive_from_radio
_patch_meshtastic_ble_receive_loop()
_DEFAULT_TCP_PORT = 4403
_DEFAULT_TCP_TARGET = "http://127.0.0.1"
_DEFAULT_SERIAL_PATTERNS = (
"/dev/ttyACM*",
"/dev/ttyUSB*",
"/dev/tty.usbmodem*",
"/dev/tty.usbserial*",
"/dev/cu.usbmodem*",
"/dev/cu.usbserial*",
)
_BLE_ADDRESS_RE = re.compile(r"^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$")
class _DummySerialInterface:
"""In-memory replacement for ``meshtastic.serial_interface.SerialInterface``."""
def __init__(self) -> None:
self.nodes: dict = {}
def close(self) -> None: # pragma: no cover - nothing to close
pass
def _parse_ble_target(value: str) -> str | None:
"""Return an uppercase BLE MAC address when ``value`` matches the format.
Parameters:
value: User-provided target string.
Returns:
The normalised MAC address or ``None`` when validation fails.
"""
if not value:
return None
value = value.strip()
if not value:
return None
if _BLE_ADDRESS_RE.fullmatch(value):
return value.upper()
return None
def _parse_network_target(value: str) -> tuple[str, int] | None:
"""Return ``(host, port)`` when ``value`` is an IP address string.
Parameters:
value: Hostname or URL describing the TCP interface.
Returns:
A ``(host, port)`` tuple or ``None`` when parsing fails.
"""
if not value:
return None
value = value.strip()
if not value:
return None
def _validated_result(host: str | None, port: int | None) -> tuple[str, int] | None:
if not host:
return None
try:
ipaddress.ip_address(host)
except ValueError:
return None
return host, port or _DEFAULT_TCP_PORT
parsed_values = []
if "://" in value:
parsed_values.append(urllib.parse.urlparse(value, scheme="tcp"))
parsed_values.append(urllib.parse.urlparse(f"//{value}", scheme="tcp"))
for parsed in parsed_values:
try:
port = parsed.port
except ValueError:
port = None
result = _validated_result(parsed.hostname, port)
if result:
return result
if value.count(":") == 1 and not value.startswith("["):
host, _, port_text = value.partition(":")
try:
port = int(port_text) if port_text else None
except ValueError:
port = None
result = _validated_result(host, port)
if result:
return result
return _validated_result(value, None)
def _load_ble_interface():
"""Return :class:`meshtastic.ble_interface.BLEInterface` when available.
Returns:
The resolved BLE interface class.
Raises:
RuntimeError: If the BLE dependencies are not installed.
"""
global BLEInterface
if BLEInterface is not None:
return BLEInterface
try:
from meshtastic.ble_interface import BLEInterface as _resolved_interface
except ImportError as exc: # pragma: no cover - exercised in non-BLE envs
raise RuntimeError(
"BLE interface requested but the Meshtastic BLE dependencies are not installed. "
"Install the 'meshtastic[ble]' extra to enable BLE support."
) from exc
BLEInterface = _resolved_interface
try:
import sys
for module_name in ("data.mesh_ingestor", "data.mesh"):
mesh_module = sys.modules.get(module_name)
if mesh_module is not None:
setattr(mesh_module, "BLEInterface", BLEInterface)
except Exception: # pragma: no cover - defensive only
pass
return _resolved_interface
def _create_serial_interface(port: str) -> tuple[object, str]:
"""Return an appropriate mesh interface for ``port``.
Parameters:
port: User-supplied port string which may represent serial, BLE or TCP.
Returns:
``(interface, resolved_target)`` describing the created interface.
"""
port_value = (port or "").strip()
if port_value.lower() in {"", "mock", "none", "null", "disabled"}:
config._debug_log(f"using dummy serial interface for port={port_value!r}")
return _DummySerialInterface(), "mock"
ble_target = _parse_ble_target(port_value)
if ble_target:
config._debug_log(f"using BLE interface for address={ble_target}")
return _load_ble_interface()(address=ble_target), ble_target
network_target = _parse_network_target(port_value)
if network_target:
host, tcp_port = network_target
config._debug_log(f"using TCP interface for host={host!r} port={tcp_port!r}")
return (
TCPInterface(hostname=host, portNumber=tcp_port),
f"tcp://{host}:{tcp_port}",
)
config._debug_log(f"using serial interface for port={port_value!r}")
return SerialInterface(devPath=port_value), port_value
class NoAvailableMeshInterface(RuntimeError):
"""Raised when no default mesh interface can be created."""
def _default_serial_targets() -> list[str]:
"""Return candidate serial device paths for auto-discovery."""
candidates: list[str] = []
seen: set[str] = set()
for pattern in _DEFAULT_SERIAL_PATTERNS:
for path in sorted(glob.glob(pattern)):
if path not in seen:
candidates.append(path)
seen.add(path)
if "/dev/ttyACM0" not in seen:
candidates.append("/dev/ttyACM0")
return candidates
def _create_default_interface() -> tuple[object, str]:
"""Attempt to create the default mesh interface, raising on failure.
Returns:
``(interface, resolved_target)`` for the discovered connection.
Raises:
NoAvailableMeshInterface: When no usable connection can be created.
"""
errors: list[tuple[str, Exception]] = []
for candidate in _default_serial_targets():
try:
return _create_serial_interface(candidate)
except Exception as exc: # pragma: no cover - hardware dependent
errors.append((candidate, exc))
config._debug_log(f"failed to open serial candidate {candidate!r}: {exc}")
try:
return _create_serial_interface(_DEFAULT_TCP_TARGET)
except Exception as exc: # pragma: no cover - network dependent
errors.append((_DEFAULT_TCP_TARGET, exc))
config._debug_log(f"failed to open TCP fallback {_DEFAULT_TCP_TARGET!r}: {exc}")
if errors:
summary = "; ".join(f"{target}: {error}" for target, error in errors)
raise NoAvailableMeshInterface(
f"no mesh interface available ({summary})"
) from errors[-1][1]
raise NoAvailableMeshInterface("no mesh interface available")
__all__ = [
"BLEInterface",
"NoAvailableMeshInterface",
"_DummySerialInterface",
"_DEFAULT_TCP_PORT",
"_DEFAULT_TCP_TARGET",
"_create_default_interface",
"_create_serial_interface",
"_default_serial_targets",
"_load_ble_interface",
"_parse_ble_target",
"_parse_network_target",
"SerialInterface",
"TCPInterface",
]

188
data/mesh_ingestor/queue.py Normal file
View File

@@ -0,0 +1,188 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Priority queue for POST operations."""
from __future__ import annotations
import heapq
import itertools
import json
import threading
import urllib.request
from dataclasses import dataclass, field
from typing import Callable, Iterable, Tuple
from . import config
_MESSAGE_POST_PRIORITY = 10
_NEIGHBOR_POST_PRIORITY = 20
_POSITION_POST_PRIORITY = 30
_TELEMETRY_POST_PRIORITY = 40
_NODE_POST_PRIORITY = 50
_DEFAULT_POST_PRIORITY = 90
@dataclass
class QueueState:
"""Mutable state for the HTTP POST priority queue."""
lock: threading.Lock = field(default_factory=threading.Lock)
queue: list[tuple[int, int, str, dict]] = field(default_factory=list)
counter: Iterable[int] = field(default_factory=itertools.count)
active: bool = False
STATE = QueueState()
def _post_json(
path: str,
payload: dict,
*,
instance: str | None = None,
api_token: str | None = None,
) -> None:
"""Send a JSON payload to the configured web API.
Parameters:
path: API path relative to the configured instance root.
payload: JSON-serialisable body to transmit.
instance: Optional override for :data:`config.INSTANCE`.
api_token: Optional override for :data:`config.API_TOKEN`.
"""
if instance is None:
instance = config.INSTANCE
if api_token is None:
api_token = config.API_TOKEN
if not instance:
return
url = f"{instance}{path}"
data = json.dumps(payload).encode("utf-8")
req = urllib.request.Request(
url, data=data, headers={"Content-Type": "application/json"}
)
if api_token:
req.add_header("Authorization", f"Bearer {api_token}")
try:
with urllib.request.urlopen(req, timeout=10) as resp:
resp.read()
except Exception as exc: # pragma: no cover - exercised in production
config._debug_log(f"[warn] POST {url} failed: {exc}")
def _enqueue_post_json(
path: str,
payload: dict,
priority: int,
*,
state: QueueState = STATE,
) -> None:
"""Store a POST request in the priority queue.
Parameters:
path: API path for the queued request.
payload: JSON-serialisable body.
priority: Lower values execute first.
state: Shared queue state, injectable for testing.
"""
with state.lock:
counter = next(state.counter)
heapq.heappush(state.queue, (priority, counter, path, payload))
def _drain_post_queue(
state: QueueState = STATE, send: Callable[[str, dict], None] | None = None
) -> None:
"""Process queued POST requests in priority order.
Parameters:
state: Queue container holding pending items.
send: Optional callable used to transmit requests.
"""
if send is None:
send = _post_json
try:
while True:
with state.lock:
if not state.queue:
return
_priority, _idx, path, payload = heapq.heappop(state.queue)
send(path, payload)
finally:
with state.lock:
state.active = False
def _queue_post_json(
path: str,
payload: dict,
*,
priority: int = _DEFAULT_POST_PRIORITY,
state: QueueState = STATE,
send: Callable[[str, dict], None] | None = None,
) -> None:
"""Queue a POST request and start processing if idle.
Parameters:
path: API path for the request.
payload: JSON payload to send.
priority: Scheduling priority where lower values run first.
state: Queue container used to store pending requests.
send: Optional transport override, primarily for tests.
"""
if send is None:
send = _post_json
_enqueue_post_json(path, payload, priority, state=state)
with state.lock:
if state.active:
return
state.active = True
_drain_post_queue(state, send=send)
def _clear_post_queue(state: QueueState = STATE) -> None:
"""Clear the pending POST queue.
Parameters:
state: Queue state to reset. Defaults to the global queue.
"""
with state.lock:
state.queue.clear()
state.active = False
__all__ = [
"STATE",
"QueueState",
"_DEFAULT_POST_PRIORITY",
"_MESSAGE_POST_PRIORITY",
"_NEIGHBOR_POST_PRIORITY",
"_NODE_POST_PRIORITY",
"_POSITION_POST_PRIORITY",
"_TELEMETRY_POST_PRIORITY",
"_clear_post_queue",
"_drain_post_queue",
"_enqueue_post_json",
"_post_json",
"_queue_post_json",
]

View File

@@ -0,0 +1,613 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for converting Meshtastic structures into JSON-friendly forms.
The helpers normalise loosely structured Meshtastic packets so they can be
forwarded to the web application using predictable field names and types.
"""
from __future__ import annotations
import base64
import dataclasses
import json
import math
import time
from collections.abc import Mapping
from google.protobuf.json_format import MessageToDict
from google.protobuf.message import DecodeError
from google.protobuf.message import Message as ProtoMessage
def _get(obj, key, default=None):
"""Return ``obj[key]`` or ``getattr(obj, key)`` when available.
Parameters:
obj: Mapping or object supplying attributes.
key: Name of the attribute or mapping key to retrieve.
default: Fallback value when ``key`` is not present.
Returns:
The resolved value or ``default`` if the lookup fails.
"""
if isinstance(obj, dict):
return obj.get(key, default)
return getattr(obj, key, default)
def _node_to_dict(n) -> dict:
"""Convert ``n`` into a JSON-serialisable mapping.
Parameters:
n: Arbitrary data structure, commonly a protobuf message, dataclass or
nested containers produced by Meshtastic.
Returns:
A plain dictionary containing recursively converted values.
"""
def _convert(value):
if isinstance(value, dict):
return {k: _convert(v) for k, v in value.items()}
if isinstance(value, (list, tuple, set)):
return [_convert(v) for v in value]
if dataclasses.is_dataclass(value):
return {k: _convert(getattr(value, k)) for k in value.__dataclass_fields__}
if isinstance(value, ProtoMessage):
try:
return MessageToDict(
value,
preserving_proto_field_name=True,
use_integers_for_enums=False,
)
except Exception:
if hasattr(value, "to_dict"):
try:
return value.to_dict()
except Exception:
pass
try:
return json.loads(json.dumps(value, default=str))
except Exception:
return str(value)
if isinstance(value, bytes):
try:
return value.decode()
except Exception:
return value.hex()
if isinstance(value, (str, int, float, bool)) or value is None:
return value
try:
return json.loads(json.dumps(value, default=str))
except Exception:
return str(value)
return _convert(n)
def upsert_payload(node_id, node) -> dict:
"""Return the payload expected by ``/api/nodes`` upsert requests.
Parameters:
node_id: Canonical node identifier.
node: Node representation to convert with :func:`_node_to_dict`.
Returns:
A mapping keyed by ``node_id`` describing the node.
"""
ndict = _node_to_dict(node)
return {node_id: ndict}
def _iso(ts: int | float) -> str:
"""Convert ``ts`` into an ISO-8601 timestamp in UTC."""
import datetime
return (
datetime.datetime.fromtimestamp(int(ts), datetime.UTC)
.isoformat()
.replace("+00:00", "Z")
)
def _first(d, *names, default=None):
"""Return the first matching attribute or key from ``d``.
Parameters:
d: Mapping or object providing nested attributes.
*names: Candidate names, optionally using ``dot.separated`` notation
for nested lookups.
default: Value returned when no candidates succeed.
Returns:
The first non-empty value encountered or ``default``.
"""
def _mapping_get(obj, key):
if isinstance(obj, Mapping) and key in obj:
return True, obj[key]
if hasattr(obj, "__getitem__"):
try:
return True, obj[key]
except Exception:
pass
if hasattr(obj, key):
return True, getattr(obj, key)
return False, None
for name in names:
cur = d
ok = True
for part in name.split("."):
ok, cur = _mapping_get(cur, part)
if not ok:
break
if ok:
if cur is None:
continue
if isinstance(cur, str) and cur == "":
continue
return cur
return default
def _coerce_int(value):
"""Best-effort conversion of ``value`` to an integer.
Parameters:
value: Any type supported by Meshtastic payloads.
Returns:
An integer or ``None`` when conversion is not possible.
"""
if value is None:
return None
if isinstance(value, bool):
return int(value)
if isinstance(value, int):
return value
if isinstance(value, float):
return int(value) if math.isfinite(value) else None
if isinstance(value, (str, bytes, bytearray)):
text = value.decode() if isinstance(value, (bytes, bytearray)) else value
stripped = text.strip()
if not stripped:
return None
try:
if stripped.lower().startswith("0x"):
return int(stripped, 16)
return int(stripped, 10)
except ValueError:
try:
return int(float(stripped))
except ValueError:
return None
try:
return int(value)
except (TypeError, ValueError):
return None
def _coerce_float(value):
"""Best-effort conversion of ``value`` to a float.
Parameters:
value: Any type supported by Meshtastic payloads.
Returns:
A float or ``None`` when conversion fails or results in ``NaN``.
"""
if value is None:
return None
if isinstance(value, bool):
return float(value)
if isinstance(value, (int, float)):
result = float(value)
return result if math.isfinite(result) else None
if isinstance(value, (str, bytes, bytearray)):
text = value.decode() if isinstance(value, (bytes, bytearray)) else value
stripped = text.strip()
if not stripped:
return None
try:
result = float(stripped)
except ValueError:
return None
return result if math.isfinite(result) else None
try:
result = float(value)
except (TypeError, ValueError):
return None
return result if math.isfinite(result) else None
def _pkt_to_dict(packet) -> dict:
"""Normalise a packet into a plain dictionary.
Parameters:
packet: Packet object or mapping emitted by Meshtastic.
Returns:
A dictionary representation suitable for downstream processing.
"""
if isinstance(packet, dict):
return packet
if isinstance(packet, ProtoMessage):
try:
return MessageToDict(
packet, preserving_proto_field_name=True, use_integers_for_enums=False
)
except Exception:
if hasattr(packet, "to_dict"):
try:
return packet.to_dict()
except Exception:
pass
try:
return json.loads(json.dumps(packet, default=lambda o: str(o)))
except Exception:
return {"_unparsed": str(packet)}
def _canonical_node_id(value) -> str | None:
"""Convert node identifiers into the canonical ``!xxxxxxxx`` format.
Parameters:
value: Input identifier which may be an int, float or string.
Returns:
The canonical identifier or ``None`` if conversion fails.
"""
if value is None:
return None
if isinstance(value, (int, float)):
try:
num = int(value)
except (TypeError, ValueError):
return None
if num < 0:
return None
return f"!{num & 0xFFFFFFFF:08x}"
if not isinstance(value, str):
return None
trimmed = value.strip()
if not trimmed:
return None
if trimmed.startswith("^"):
return trimmed
if trimmed.startswith("!"):
body = trimmed[1:]
elif trimmed.lower().startswith("0x"):
body = trimmed[2:]
elif trimmed.isdigit():
try:
return f"!{int(trimmed, 10) & 0xFFFFFFFF:08x}"
except ValueError:
return None
else:
body = trimmed
if not body:
return None
try:
return f"!{int(body, 16) & 0xFFFFFFFF:08x}"
except ValueError:
return None
def _node_num_from_id(node_id) -> int | None:
"""Extract the numeric node ID from a canonical identifier.
Parameters:
node_id: Identifier value accepted by :func:`_canonical_node_id`.
Returns:
The numeric node ID or ``None`` when parsing fails.
"""
if node_id is None:
return None
if isinstance(node_id, (int, float)):
try:
num = int(node_id)
except (TypeError, ValueError):
return None
return num if num >= 0 else None
if not isinstance(node_id, str):
return None
trimmed = node_id.strip()
if not trimmed:
return None
if trimmed.startswith("!"):
trimmed = trimmed[1:]
if trimmed.lower().startswith("0x"):
trimmed = trimmed[2:]
try:
return int(trimmed, 16)
except ValueError:
try:
return int(trimmed, 10)
except ValueError:
return None
def _merge_mappings(base, extra):
"""Merge two mapping-like objects recursively.
Parameters:
base: Existing mapping or mapping-like structure.
extra: Mapping or compatible object whose entries should overlay
``base``.
Returns:
A new dictionary containing the merged values.
"""
base_dict: dict
if isinstance(base, Mapping):
base_dict = dict(base)
elif base:
converted_base = _node_to_dict(base)
base_dict = dict(converted_base) if isinstance(converted_base, Mapping) else {}
else:
base_dict = {}
if not isinstance(extra, Mapping):
converted_extra = _node_to_dict(extra)
if not isinstance(converted_extra, Mapping):
return base_dict
extra = converted_extra
for key, value in extra.items():
if isinstance(value, Mapping):
existing = base_dict.get(key)
base_dict[key] = _merge_mappings(existing, value)
else:
base_dict[key] = _node_to_dict(value)
return base_dict
def _extract_payload_bytes(decoded_section: Mapping) -> bytes | None:
"""Return raw payload bytes from ``decoded_section`` when available.
Parameters:
decoded_section: Mapping that may include a ``payload`` entry.
Returns:
Raw payload bytes or ``None`` when the payload is missing or invalid.
"""
if not isinstance(decoded_section, Mapping):
return None
payload = decoded_section.get("payload")
if isinstance(payload, Mapping):
data = payload.get("__bytes_b64__") or payload.get("bytes")
if isinstance(data, str):
try:
return base64.b64decode(data)
except Exception:
return None
if isinstance(payload, (bytes, bytearray)):
return bytes(payload)
if isinstance(payload, str):
try:
return base64.b64decode(payload)
except Exception:
return None
return None
def _decode_nodeinfo_payload(payload_bytes):
"""Decode ``NodeInfo`` protobuf payloads from raw bytes.
Parameters:
payload_bytes: Serialized protobuf data from a NODEINFO packet.
Returns:
A :class:`meshtastic.protobuf.mesh_pb2.NodeInfo` instance or ``None``.
"""
if not payload_bytes:
return None
try:
from meshtastic.protobuf import mesh_pb2
except Exception:
return None
node_info = mesh_pb2.NodeInfo()
try:
node_info.ParseFromString(payload_bytes)
return node_info
except DecodeError:
try:
user_msg = mesh_pb2.User()
user_msg.ParseFromString(payload_bytes)
except DecodeError:
return None
node_info = mesh_pb2.NodeInfo()
node_info.user.CopyFrom(user_msg)
return node_info
def _nodeinfo_metrics_dict(node_info) -> dict | None:
"""Extract device metric fields from a NodeInfo message.
Parameters:
node_info: Parsed NodeInfo protobuf message.
Returns:
A dictionary containing selected metric fields, or ``None`` when no
metrics are present.
"""
if not node_info:
return None
metrics_field_names = {f[0].name for f in node_info.ListFields()}
if "device_metrics" not in metrics_field_names:
return None
metrics = {}
for field_desc, value in node_info.device_metrics.ListFields():
name = field_desc.name
if name == "battery_level":
metrics["batteryLevel"] = float(value)
elif name == "voltage":
metrics["voltage"] = float(value)
elif name == "channel_utilization":
metrics["channelUtilization"] = float(value)
elif name == "air_util_tx":
metrics["airUtilTx"] = float(value)
elif name == "uptime_seconds":
metrics["uptimeSeconds"] = int(value)
elif name == "humidity":
metrics["humidity"] = float(value)
elif name == "temperature":
metrics["temperature"] = float(value)
elif name == "barometric_pressure":
metrics["barometricPressure"] = float(value)
return metrics or None
def _nodeinfo_position_dict(node_info) -> dict | None:
"""Return a dictionary view of positional data from NodeInfo.
Parameters:
node_info: Parsed NodeInfo protobuf message.
Returns:
A dictionary of positional fields or ``None`` if no data exists.
"""
if not node_info:
return None
fields = {f[0].name for f in node_info.ListFields()}
if "position" not in fields:
return None
result = {}
latitude_i = None
longitude_i = None
for field_desc, value in node_info.position.ListFields():
name = field_desc.name
if name == "latitude_i":
latitude_i = int(value)
result["latitudeI"] = latitude_i
elif name == "longitude_i":
longitude_i = int(value)
result["longitudeI"] = longitude_i
elif name == "latitude":
result["latitude"] = float(value)
elif name == "longitude":
result["longitude"] = float(value)
elif name == "altitude":
result["altitude"] = int(value)
elif name == "time":
result["time"] = int(value)
elif name == "ground_speed":
result["groundSpeed"] = float(value)
elif name == "ground_track":
result["groundTrack"] = float(value)
elif name == "precision_bits":
result["precisionBits"] = int(value)
elif name == "location_source":
# Preserve the raw enum value to allow downstream formatting.
result["locationSource"] = int(value)
if "latitude" not in result and latitude_i is not None:
result["latitude"] = latitude_i / 1e7
if "longitude" not in result and longitude_i is not None:
result["longitude"] = longitude_i / 1e7
return result or None
def _nodeinfo_user_dict(node_info, decoded_user):
"""Combine protobuf and decoded user information into a mapping.
Parameters:
node_info: Parsed NodeInfo protobuf message that may contain a ``user``
field.
decoded_user: Mapping or protobuf message representing decoded user
data from the packet payload.
Returns:
A merged mapping of user information or ``None`` when no data exists.
"""
user_dict = None
if node_info:
field_names = {f[0].name for f in node_info.ListFields()}
if "user" in field_names:
try:
user_dict = MessageToDict(
node_info.user,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
except Exception:
user_dict = None
if isinstance(decoded_user, ProtoMessage):
try:
decoded_user = MessageToDict(
decoded_user,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
except Exception:
decoded_user = _node_to_dict(decoded_user)
if isinstance(decoded_user, Mapping):
user_dict = _merge_mappings(user_dict, decoded_user)
if isinstance(user_dict, Mapping):
canonical = _canonical_node_id(user_dict.get("id"))
if canonical:
user_dict = dict(user_dict)
user_dict["id"] = canonical
return user_dict
__all__ = [
"_canonical_node_id",
"_coerce_float",
"_coerce_int",
"_decode_nodeinfo_payload",
"_extract_payload_bytes",
"_first",
"_get",
"_iso",
"_merge_mappings",
"_node_num_from_id",
"_node_to_dict",
"_nodeinfo_metrics_dict",
"_nodeinfo_position_dict",
"_nodeinfo_user_dict",
"_pkt_to_dict",
"DecodeError",
"MessageToDict",
"ProtoMessage",
"upsert_payload",
]

26
data/neighbors.sql Normal file
View File

@@ -0,0 +1,26 @@
-- Copyright (C) 2025 l5yth
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
CREATE TABLE IF NOT EXISTS neighbors (
node_id TEXT NOT NULL,
neighbor_id TEXT NOT NULL,
snr REAL,
rx_time INTEGER NOT NULL,
PRIMARY KEY (node_id, neighbor_id),
FOREIGN KEY (node_id) REFERENCES nodes(node_id) ON DELETE CASCADE,
FOREIGN KEY (neighbor_id) REFERENCES nodes(node_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_neighbors_rx_time ON neighbors(rx_time);
CREATE INDEX IF NOT EXISTS idx_neighbors_neighbor_id ON neighbors(neighbor_id);

View File

@@ -36,6 +36,7 @@ CREATE TABLE IF NOT EXISTS nodes (
uptime_seconds INTEGER,
position_time INTEGER,
location_source TEXT,
precision_bits INTEGER,
latitude REAL,
longitude REAL,
altitude REAL

43
data/telemetry.sql Normal file
View File

@@ -0,0 +1,43 @@
-- Copyright (C) 2025 l5yth
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
CREATE TABLE IF NOT EXISTS telemetry (
id INTEGER PRIMARY KEY,
node_id TEXT,
node_num INTEGER,
from_id TEXT,
to_id TEXT,
rx_time INTEGER NOT NULL,
rx_iso TEXT NOT NULL,
telemetry_time INTEGER,
channel INTEGER,
portnum TEXT,
hop_limit INTEGER,
snr REAL,
rssi INTEGER,
bitfield INTEGER,
payload_b64 TEXT,
battery_level REAL,
voltage REAL,
channel_utilization REAL,
air_util_tx REAL,
uptime_seconds INTEGER,
temperature REAL,
relative_humidity REAL,
barometric_pressure REAL
);
CREATE INDEX IF NOT EXISTS idx_telemetry_rx_time ON telemetry(rx_time);
CREATE INDEX IF NOT EXISTS idx_telemetry_node_id ON telemetry(node_id);
CREATE INDEX IF NOT EXISTS idx_telemetry_time ON telemetry(telemetry_time);

View File

@@ -2,6 +2,8 @@
services:
web:
build:
context: .
dockerfile: web/Dockerfile
target: production
environment:
DEBUG: 0
@@ -9,6 +11,8 @@ services:
web-bridge:
build:
context: .
dockerfile: web/Dockerfile
target: production
environment:
DEBUG: 0
@@ -16,6 +20,8 @@ services:
ingestor:
build:
context: .
dockerfile: data/Dockerfile
target: production
environment:
DEBUG: 0
@@ -23,6 +29,8 @@ services:
ingestor-bridge:
build:
context: .
dockerfile: data/Dockerfile
target: production
environment:
DEBUG: 0

View File

@@ -1,6 +1,8 @@
x-web-base: &web-base
image: ghcr.io/l5yth/potato-mesh-web-linux-amd64:latest
image: ghcr.io/l5yth/potato-mesh-web-${POTATOMESH_IMAGE_ARCH:-linux-amd64}:latest
environment:
APP_ENV: ${APP_ENV:-production}
RACK_ENV: ${RACK_ENV:-production}
SITE_NAME: ${SITE_NAME:-My Meshtastic Network}
DEFAULT_CHANNEL: ${DEFAULT_CHANNEL:-#MediumFast}
DEFAULT_FREQUENCY: ${DEFAULT_FREQUENCY:-868MHz}
@@ -10,6 +12,7 @@ x-web-base: &web-base
MATRIX_ROOM: ${MATRIX_ROOM:-}
API_TOKEN: ${API_TOKEN}
DEBUG: ${DEBUG:-0}
command: ["ruby", "app.rb", "-p", "41447", "-o", "0.0.0.0"]
volumes:
- potatomesh_data:/app/data
- potatomesh_logs:/app/logs
@@ -24,7 +27,7 @@ x-web-base: &web-base
cpus: '0.25'
x-ingestor-base: &ingestor-base
image: ghcr.io/l5yth/potato-mesh-ingestor-linux-amd64:latest
image: ghcr.io/l5yth/potato-mesh-ingestor-${POTATOMESH_IMAGE_ARCH:-linux-amd64}:latest
environment:
MESH_SERIAL: ${MESH_SERIAL:-/dev/ttyACM0}
MESH_SNAPSHOT_SECS: ${MESH_SNAPSHOT_SECS:-60}

BIN
scrot-0.4.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 907 KiB

View File

@@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interactive debugging helpers for live Meshtastic sessions."""
import time, json, base64, threading
from pubsub import pub # comes with meshtastic
from meshtastic.serial_interface import SerialInterface
@@ -28,7 +30,14 @@ stop = threading.Event()
def to_jsonable(obj):
"""Recursively convert protobuf/bytes/etc. into JSON-serializable structures."""
"""Recursively convert complex objects into JSON-serialisable structures.
Parameters:
obj: Any Meshtastic-related payload or protobuf message.
Returns:
A structure composed of standard Python types.
"""
if obj is None:
return None
if isinstance(obj, ProtoMessage):
@@ -49,7 +58,14 @@ def to_jsonable(obj):
def extract_text(d):
"""Best-effort pull of decoded text from a dict produced by to_jsonable()."""
"""Best-effort pull of decoded text from :func:`to_jsonable` output.
Parameters:
d: Mapping derived from :func:`to_jsonable`.
Returns:
The decoded text when available, otherwise ``None``.
"""
dec = d.get("decoded") or {}
# Text packets usually at decoded.payload.text
payload = dec.get("payload") or {}
@@ -62,6 +78,12 @@ def extract_text(d):
def on_receive(packet, interface):
"""Display human-readable output for each received packet.
Parameters:
packet: Packet instance supplied by Meshtastic.
interface: Interface that produced the packet.
"""
global packet_count, last_rx_ts
packet_count += 1
last_rx_ts = time.time()
@@ -86,14 +108,20 @@ def on_receive(packet, interface):
def on_connected(interface, *args, **kwargs):
"""Log when a connection is established."""
print("[info] connection established")
def on_disconnected(interface, *args, **kwargs):
"""Log when the interface disconnects."""
print("[info] disconnected")
def main():
"""Run the interactive debugging loop."""
print(f"Opening Meshtastic on {PORT}")
# Use PubSub topics (reliable in current meshtastic)

View File

@@ -1,9 +1,31 @@
#!/usr/bin/env python3
import json, os, signal, sys, time, threading
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility script to dump Meshtastic traffic for offline analysis."""
from __future__ import annotations
import json
import os
import signal
import sys
import time
from datetime import datetime, timezone
from meshtastic.serial_interface import SerialInterface
from meshtastic.mesh_interface import MeshInterface
from meshtastic.serial_interface import SerialInterface
from pubsub import pub
PORT = os.environ.get("MESH_SERIAL", "/dev/ttyACM0")
@@ -13,11 +35,20 @@ OUT = os.environ.get("MESH_DUMP_FILE", "meshtastic-dump.ndjson")
f = open(OUT, "a", buffering=1, encoding="utf-8")
def now():
def now() -> str:
"""Return the current UTC timestamp in ISO 8601 format."""
return datetime.now(timezone.utc).isoformat()
def write(kind, payload):
def write(kind: str, payload: dict) -> None:
"""Append a JSON record to the dump file.
Parameters:
kind: Logical record type such as ``"packet"`` or ``"node"``.
payload: Serializable payload containing the record body.
"""
rec = {"ts": now(), "kind": kind, **payload}
f.write(json.dumps(rec, ensure_ascii=False, default=str) + "\n")
@@ -28,12 +59,26 @@ iface: MeshInterface = SerialInterface(PORT)
# Packet callback: every RF/Mesh packet the node receives/decodes lands here
def on_packet(packet, iface):
"""Write packet metadata whenever the radio receives a frame.
Parameters:
packet: Meshtastic packet object or dictionary.
iface: Interface instance delivering the packet.
"""
# 'packet' already includes decoded fields when available (portnum, payload, position, telemetry, etc.)
write("packet", {"packet": packet})
# Node callback: topology/metadata updates (nodeinfo, hops, lastHeard, etc.)
def on_node(node, iface):
"""Write node metadata updates produced by Meshtastic.
Parameters:
node: Meshtastic node object or mapping.
iface: Interface instance emitting the update.
"""
write("node", {"node": node})
@@ -57,6 +102,8 @@ except Exception as e:
# Keep the process alive until Ctrl-C
def _stop(signum, frame):
"""Handle termination signals by flushing buffers and exiting."""
write("meta", {"event": "stopping"})
try:
try:

20
tests/neighbors.json Normal file
View File

@@ -0,0 +1,20 @@
[
{
"node_id": "!7c5b0920",
"rx_time": 1758884186,
"node_broadcast_interval_secs": 1800,
"last_sent_by": "!9e99f8c0",
"neighbors": [
{ "node_id": "!2b22accc", "snr": -6.5, "rx_time": 1758884106 },
{ "node_id": "!43ba26d0", "snr": -5.0, "rx_time": 1758884120 },
{ "node_id": "!69ba6f71", "snr": -13.0, "rx_time": 1758884135 },
{ "node_id": "!fa848384", "snr": -14.75, "rx_time": 1758884150 },
{ "node_id": "!da6a35b4", "snr": -6.5, "rx_time": 1758884165 }
]
},
{
"node_id": "!cafebabe",
"rx_time": 1758883200,
"neighbors": []
}
]

File diff suppressed because it is too large Load Diff

84
tests/telemetry.json Normal file
View File

@@ -0,0 +1,84 @@
[
{
"id": 1256091342,
"node_id": "!9e95cf60",
"from_id": "!9e95cf60",
"to_id": "^all",
"rx_time": 1758024300,
"rx_iso": "2025-09-16T12:05:00Z",
"telemetry_time": 1758024300,
"channel": 0,
"portnum": "TELEMETRY_APP",
"battery_level": 101,
"bitfield": 1,
"payload_b64": "DTVr0mgSFQhlFQIrh0AdJb8YPyXYFSA9KJTPEg==",
"device_metrics": {
"batteryLevel": 101,
"voltage": 4.224,
"channelUtilization": 0.59666663,
"airUtilTx": 0.03908333,
"uptimeSeconds": 305044
},
"raw": {
"device_metrics": {
"battery_level": 101,
"voltage": 4.224,
"channel_utilization": 0.59666663,
"air_util_tx": 0.03908333,
"uptime_seconds": 305044
}
}
},
{
"id": 2817720548,
"node_id": "!2a2a2a2a",
"from_id": "!2a2a2a2a",
"to_id": "^all",
"rx_time": 1758024400,
"rx_iso": "2025-09-16T12:06:40Z",
"telemetry_time": 1758024390,
"channel": 0,
"portnum": "TELEMETRY_APP",
"bitfield": 1,
"environment_metrics": {
"temperature": 21.98,
"relativeHumidity": 39.475586,
"barometricPressure": 1017.8353
},
"raw": {
"environment_metrics": {
"temperature": 21.98,
"relative_humidity": 39.475586,
"barometric_pressure": 1017.8353
}
}
},
{
"id": 345678901,
"node_id": "!1234abcd",
"from_id": "!1234abcd",
"node_num": 305441741,
"to_id": "^all",
"rx_time": 1758024500,
"rx_iso": "2025-09-16T12:08:20Z",
"telemetry_time": 1758024450,
"channel": 1,
"portnum": "TELEMETRY_APP",
"payload_b64": "AAEC",
"device_metrics": {
"battery_level": 58.5,
"voltage": 3.92,
"channel_utilization": 0.284,
"air_util_tx": 0.051,
"uptime_seconds": 86400
},
"local_stats": {
"numPacketsTx": 1280,
"numPacketsRx": 1425,
"numClients": 6,
"numNodes": 18,
"freeHeap": 21344,
"heapLowWater": 19876
}
}
]

View File

@@ -1,7 +1,24 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import importlib
import sys
import types
"""End-to-end tests covering the mesh ingestion package."""
from dataclasses import dataclass
from pathlib import Path
from types import SimpleNamespace
@@ -11,7 +28,7 @@ import pytest
@pytest.fixture
def mesh_module(monkeypatch):
"""Import data.mesh with stubbed dependencies."""
"""Import :mod:`data.mesh` with stubbed dependencies."""
repo_root = Path(__file__).resolve().parents[1]
monkeypatch.syspath_prepend(str(repo_root))
@@ -48,9 +65,21 @@ def mesh_module(monkeypatch):
tcp_interface_mod.TCPInterface = DummyTCPInterface
ble_interface_mod = types.ModuleType("meshtastic.ble_interface")
class DummyBLEInterface:
def __init__(self, *_, **__):
self.closed = False
def close(self):
self.closed = True
ble_interface_mod.BLEInterface = DummyBLEInterface
meshtastic_mod = types.ModuleType("meshtastic")
meshtastic_mod.serial_interface = serial_interface_mod
meshtastic_mod.tcp_interface = tcp_interface_mod
meshtastic_mod.ble_interface = ble_interface_mod
if real_protobuf is not None:
meshtastic_mod.protobuf = real_protobuf
@@ -59,6 +88,7 @@ def mesh_module(monkeypatch):
sys.modules, "meshtastic.serial_interface", serial_interface_mod
)
monkeypatch.setitem(sys.modules, "meshtastic.tcp_interface", tcp_interface_mod)
monkeypatch.setitem(sys.modules, "meshtastic.ble_interface", ble_interface_mod)
if real_protobuf is not None:
monkeypatch.setitem(sys.modules, "meshtastic.protobuf", real_protobuf)
@@ -117,7 +147,7 @@ def mesh_module(monkeypatch):
monkeypatch.setitem(sys.modules, "google.protobuf.json_format", json_format_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.message", message_mod)
module_name = "data.mesh"
module_name = "data.mesh_ingestor"
if module_name in sys.modules:
module = importlib.reload(sys.modules[module_name])
else:
@@ -144,8 +174,9 @@ def test_snapshot_interval_defaults_to_60_seconds(mesh_module):
def test_create_serial_interface_allows_mock(mesh_module, value):
mesh = mesh_module
iface = mesh._create_serial_interface(value)
iface, resolved = mesh._create_serial_interface(value)
assert resolved == "mock"
assert isinstance(iface.nodes, dict)
iface.close()
@@ -161,9 +192,10 @@ def test_create_serial_interface_uses_serial_module(mesh_module, monkeypatch):
monkeypatch.setattr(mesh, "SerialInterface", fake_interface)
iface = mesh._create_serial_interface("/dev/ttyTEST")
iface, resolved = mesh._create_serial_interface("/dev/ttyTEST")
assert created["devPath"] == "/dev/ttyTEST"
assert resolved == "/dev/ttyTEST"
assert iface.nodes == {"!foo": sentinel}
@@ -178,9 +210,10 @@ def test_create_serial_interface_uses_tcp_for_ip(mesh_module, monkeypatch):
monkeypatch.setattr(mesh, "TCPInterface", fake_tcp_interface)
iface = mesh._create_serial_interface("192.168.1.25:4500")
iface, resolved = mesh._create_serial_interface("192.168.1.25:4500")
assert created == {"hostname": "192.168.1.25", "portNumber": 4500}
assert resolved == "tcp://192.168.1.25:4500"
assert iface.nodes == {}
@@ -195,10 +228,11 @@ def test_create_serial_interface_defaults_tcp_port(mesh_module, monkeypatch):
monkeypatch.setattr(mesh, "TCPInterface", fake_tcp_interface)
mesh._create_serial_interface("tcp://10.20.30.40")
_, resolved = mesh._create_serial_interface("tcp://10.20.30.40")
assert created["hostname"] == "10.20.30.40"
assert created["portNumber"] == mesh._DEFAULT_TCP_PORT
assert resolved == "tcp://10.20.30.40:4403"
def test_create_serial_interface_plain_ip(mesh_module, monkeypatch):
@@ -212,10 +246,67 @@ def test_create_serial_interface_plain_ip(mesh_module, monkeypatch):
monkeypatch.setattr(mesh, "TCPInterface", fake_tcp_interface)
mesh._create_serial_interface(" 192.168.50.10 ")
_, resolved = mesh._create_serial_interface(" 192.168.50.10 ")
assert created["hostname"] == "192.168.50.10"
assert created["portNumber"] == mesh._DEFAULT_TCP_PORT
assert resolved == "tcp://192.168.50.10:4403"
def test_create_serial_interface_ble(mesh_module, monkeypatch):
mesh = mesh_module
created = {}
def fake_ble_interface(*, address=None, **_):
created["address"] = address
return SimpleNamespace(nodes={}, close=lambda: None)
monkeypatch.setattr(mesh, "BLEInterface", fake_ble_interface)
iface, resolved = mesh._create_serial_interface("ed:4d:9e:95:cf:60")
assert created["address"] == "ED:4D:9E:95:CF:60"
assert resolved == "ED:4D:9E:95:CF:60"
assert iface.nodes == {}
def test_create_default_interface_falls_back_to_tcp(mesh_module, monkeypatch):
mesh = mesh_module
attempts = []
def fake_targets():
return ["/dev/ttyFAIL"]
def fake_create(port):
attempts.append(port)
if port.startswith("/dev/tty"):
raise RuntimeError("missing serial device")
return SimpleNamespace(nodes={}, close=lambda: None), "tcp://127.0.0.1:4403"
monkeypatch.setattr(mesh, "_default_serial_targets", fake_targets)
monkeypatch.setattr(mesh, "_create_serial_interface", fake_create)
iface, resolved = mesh._create_default_interface()
assert attempts == ["/dev/ttyFAIL", mesh._DEFAULT_TCP_TARGET]
assert resolved == "tcp://127.0.0.1:4403"
assert iface.nodes == {}
def test_create_default_interface_raises_when_unavailable(mesh_module, monkeypatch):
mesh = mesh_module
monkeypatch.setattr(mesh, "_default_serial_targets", lambda: ["/dev/ttyFAIL"])
def always_fail(port):
raise RuntimeError(f"boom for {port}")
monkeypatch.setattr(mesh, "_create_serial_interface", always_fail)
with pytest.raises(mesh.NoAvailableMeshInterface) as exc_info:
mesh._create_default_interface()
assert "/dev/ttyFAIL" in str(exc_info.value)
def test_node_to_dict_handles_nested_structures(mesh_module):
@@ -368,6 +459,58 @@ def test_store_packet_dict_posts_position(mesh_module, monkeypatch):
assert payload["raw"]["time"] == 1_758_624_189
def test_store_packet_dict_posts_neighborinfo(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 2049886869,
"rxTime": 1_758_884_186,
"fromId": "!7c5b0920",
"decoded": {
"portnum": "NEIGHBORINFO_APP",
"neighborinfo": {
"nodeId": 0x7C5B0920,
"lastSentById": 0x9E3AA2F0,
"nodeBroadcastIntervalSecs": 1800,
"neighbors": [
{"nodeId": 0x2B2A4D51, "snr": -6.5},
{"nodeId": 0x437FE3E0, "snr": -2.75, "rxTime": 1_758_884_150},
{"nodeId": "!0badc0de", "snr": None},
],
},
},
}
mesh.store_packet_dict(packet)
assert captured, "Expected POST to be triggered for neighbor info"
path, payload, priority = captured[0]
assert path == "/api/neighbors"
assert priority == mesh._NEIGHBOR_POST_PRIORITY
assert payload["node_id"] == "!7c5b0920"
assert payload["node_num"] == 0x7C5B0920
assert payload["rx_time"] == 1_758_884_186
assert payload["node_broadcast_interval_secs"] == 1800
assert payload["last_sent_by_id"] == "!9e3aa2f0"
neighbors = payload["neighbors"]
assert len(neighbors) == 3
assert neighbors[0]["neighbor_id"] == "!2b2a4d51"
assert neighbors[0]["neighbor_num"] == 0x2B2A4D51
assert neighbors[0]["rx_time"] == 1_758_884_186
assert neighbors[0]["snr"] == pytest.approx(-6.5)
assert neighbors[1]["neighbor_id"] == "!437fe3e0"
assert neighbors[1]["rx_time"] == 1_758_884_150
assert neighbors[1]["snr"] == pytest.approx(-2.75)
assert neighbors[2]["neighbor_id"] == "!0badc0de"
assert neighbors[2]["neighbor_num"] == 0x0BAD_C0DE
def test_store_packet_dict_handles_nodeinfo_packet(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
@@ -811,8 +954,9 @@ def test_main_retries_interface_creation(mesh_module, monkeypatch):
attempts.append(port)
if len(attempts) < 3:
raise RuntimeError("boom")
return iface
return iface, port
monkeypatch.setattr(mesh, "PORT", "/dev/ttyTEST")
monkeypatch.setattr(mesh, "_create_serial_interface", fake_create)
monkeypatch.setattr(mesh.threading, "Event", DummyEvent)
monkeypatch.setattr(mesh.signal, "signal", lambda *_, **__: None)
@@ -866,13 +1010,14 @@ def test_main_recreates_interface_after_snapshot_error(mesh_module, monkeypatch)
interface = FlakyInterface(fail_first)
interfaces.append(interface)
return interface
return interface, port
upsert_calls = []
def record_upsert(node_id, node):
upsert_calls.append(node_id)
monkeypatch.setattr(mesh, "PORT", "/dev/ttyTEST")
monkeypatch.setattr(mesh, "_create_serial_interface", fake_create)
monkeypatch.setattr(mesh, "upsert_node", record_upsert)
monkeypatch.setattr(mesh.threading, "Event", DummyEvent)
@@ -888,6 +1033,22 @@ def test_main_recreates_interface_after_snapshot_error(mesh_module, monkeypatch)
assert upsert_calls == ["!node"]
def test_main_exits_when_defaults_unavailable(mesh_module, monkeypatch):
mesh = mesh_module
def fail_default():
raise mesh.NoAvailableMeshInterface("no interface available")
monkeypatch.setattr(mesh, "PORT", None)
monkeypatch.setattr(mesh, "_create_default_interface", fail_default)
monkeypatch.setattr(mesh.signal, "signal", lambda *_, **__: None)
with pytest.raises(SystemExit) as exc_info:
mesh.main()
assert exc_info.value.code == 1
def test_store_packet_dict_uses_top_level_channel(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
@@ -979,6 +1140,104 @@ def test_store_packet_dict_includes_encrypted_payload(mesh_module, monkeypatch):
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_store_packet_dict_handles_telemetry_packet(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 1_256_091_342,
"rxTime": 1_758_024_300,
"fromId": "!9e95cf60",
"toId": "^all",
"decoded": {
"portnum": "TELEMETRY_APP",
"bitfield": 1,
"telemetry": {
"time": 1_758_024_300,
"deviceMetrics": {
"batteryLevel": 101,
"voltage": 4.224,
"channelUtilization": 0.59666663,
"airUtilTx": 0.03908333,
"uptimeSeconds": 305044,
},
"localStats": {
"numPacketsTx": 1280,
"numPacketsRx": 1425,
},
},
"payload": {
"__bytes_b64__": "DTVr0mgSFQhlFQIrh0AdJb8YPyXYFSA9KJTPEg==",
},
},
}
mesh.store_packet_dict(packet)
assert captured
path, payload, priority = captured[0]
assert path == "/api/telemetry"
assert priority == mesh._TELEMETRY_POST_PRIORITY
assert payload["id"] == 1_256_091_342
assert payload["node_id"] == "!9e95cf60"
assert payload["from_id"] == "!9e95cf60"
assert payload["rx_time"] == 1_758_024_300
assert payload["telemetry_time"] == 1_758_024_300
assert payload["channel"] == 0
assert payload["bitfield"] == 1
assert payload["payload_b64"] == "DTVr0mgSFQhlFQIrh0AdJb8YPyXYFSA9KJTPEg=="
assert payload["battery_level"] == pytest.approx(101.0)
assert payload["voltage"] == pytest.approx(4.224)
assert payload["channel_utilization"] == pytest.approx(0.59666663)
assert payload["air_util_tx"] == pytest.approx(0.03908333)
assert payload["uptime_seconds"] == 305044
def test_store_packet_dict_handles_environment_telemetry(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 2_817_720_548,
"rxTime": 1_758_024_400,
"from": 3_698_627_780,
"decoded": {
"portnum": "TELEMETRY_APP",
"telemetry": {
"time": 1_758_024_390,
"environmentMetrics": {
"temperature": 21.98,
"relativeHumidity": 39.475586,
"barometricPressure": 1017.8353,
},
},
},
}
mesh.store_packet_dict(packet)
assert captured
path, payload, priority = captured[0]
assert path == "/api/telemetry"
assert payload["id"] == 2_817_720_548
assert payload["node_id"] == "!dc7494c4"
assert payload["from_id"] == "!dc7494c4"
assert payload["telemetry_time"] == 1_758_024_390
assert payload["temperature"] == pytest.approx(21.98)
assert payload["relative_humidity"] == pytest.approx(39.475586)
assert payload["barometric_pressure"] == pytest.approx(1017.8353)
def test_post_queue_prioritises_messages(mesh_module, monkeypatch):
mesh = mesh_module
mesh._clear_post_queue()
@@ -1048,3 +1307,494 @@ def test_node_items_snapshot_handles_empty_input(mesh_module):
assert mesh._node_items_snapshot(None) == []
assert mesh._node_items_snapshot({}) == []
def test_debug_log_emits_when_enabled(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "DEBUG", True)
mesh._debug_log("hello world")
captured = capsys.readouterr()
assert "[debug] hello world" in captured.out
def test_event_wait_allows_default_timeout_handles_short_signature(
mesh_module, monkeypatch
):
mesh = mesh_module
def wait_without_timeout(self):
return True
monkeypatch.setattr(
mesh.threading.Event, "wait", wait_without_timeout, raising=False
)
assert mesh._event_wait_allows_default_timeout() is True
def test_event_wait_allows_default_timeout_handles_varargs(mesh_module, monkeypatch):
mesh = mesh_module
def wait_with_varargs(self, *args):
return False
monkeypatch.setattr(mesh.threading.Event, "wait", wait_with_varargs, raising=False)
assert mesh._event_wait_allows_default_timeout() is True
def test_parse_ble_target_rejects_invalid_values(mesh_module):
mesh = mesh_module
assert mesh._parse_ble_target("") is None
assert mesh._parse_ble_target(" ") is None
assert mesh._parse_ble_target("zz:zz:zz:zz:zz:zz") is None
def test_parse_network_target_additional_cases(mesh_module):
mesh = mesh_module
assert mesh._parse_network_target("") is None
assert mesh._parse_network_target(" ") is None
assert mesh._parse_network_target("tcp://example.com") is None
host, port = mesh._parse_network_target("tcp://10.1.2.3:abc")
assert (host, port) == ("10.1.2.3", mesh._DEFAULT_TCP_PORT)
host, port = mesh._parse_network_target("10.1.2.3:9001")
assert (host, port) == ("10.1.2.3", 9001)
def test_load_ble_interface_sets_global(monkeypatch):
repo_root = Path(__file__).resolve().parents[1]
monkeypatch.syspath_prepend(str(repo_root))
serial_interface_mod = types.ModuleType("meshtastic.serial_interface")
class DummySerial:
def __init__(self, *_, **__):
pass
serial_interface_mod.SerialInterface = DummySerial
tcp_interface_mod = types.ModuleType("meshtastic.tcp_interface")
tcp_interface_mod.TCPInterface = DummySerial
ble_interface_mod = types.ModuleType("meshtastic.ble_interface")
class DummyBLE:
def __init__(self, *_, **__):
pass
ble_interface_mod.BLEInterface = DummyBLE
meshtastic_mod = types.ModuleType("meshtastic")
meshtastic_mod.serial_interface = serial_interface_mod
meshtastic_mod.tcp_interface = tcp_interface_mod
meshtastic_mod.ble_interface = ble_interface_mod
monkeypatch.setitem(sys.modules, "meshtastic", meshtastic_mod)
monkeypatch.setitem(
sys.modules, "meshtastic.serial_interface", serial_interface_mod
)
monkeypatch.setitem(sys.modules, "meshtastic.tcp_interface", tcp_interface_mod)
monkeypatch.setitem(sys.modules, "meshtastic.ble_interface", ble_interface_mod)
module_name = "data.mesh"
module = (
importlib.import_module(module_name)
if module_name not in sys.modules
else importlib.reload(sys.modules[module_name])
)
monkeypatch.setattr(module, "BLEInterface", None)
resolved = module._load_ble_interface()
assert resolved is ble_interface_mod.BLEInterface
assert module.BLEInterface is ble_interface_mod.BLEInterface
def test_default_serial_targets_deduplicates(mesh_module, monkeypatch):
mesh = mesh_module
def fake_glob(pattern):
if pattern == "/dev/ttyUSB*":
return ["/dev/ttyUSB0", "/dev/ttyUSB0"]
if pattern == "/dev/ttyACM*":
return ["/dev/ttyACM1"]
return []
monkeypatch.setattr(mesh.interfaces.glob, "glob", fake_glob)
targets = mesh._default_serial_targets()
assert targets.count("/dev/ttyUSB0") == 1
assert "/dev/ttyACM1" in targets
assert "/dev/ttyACM0" in targets
def test_post_json_logs_failures(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "INSTANCE", "https://example.invalid")
monkeypatch.setattr(mesh, "DEBUG", True)
def boom(*_, **__):
raise RuntimeError("offline")
monkeypatch.setattr(mesh.queue.urllib.request, "urlopen", boom)
mesh._post_json("/api/test", {"foo": "bar"})
captured = capsys.readouterr()
assert "[warn] POST https://example.invalid/api/test failed" in captured.out
def test_queue_post_json_skips_when_active(mesh_module, monkeypatch):
mesh = mesh_module
mesh._clear_post_queue()
mesh.STATE.active = True
mesh._queue_post_json("/api/test", {"id": 1})
assert mesh.STATE.active is True
assert mesh.STATE.queue
mesh._clear_post_queue()
def test_node_to_dict_handles_proto_fallback(mesh_module, monkeypatch):
mesh = mesh_module
class FailingProto(mesh.ProtoMessage):
def to_dict(self):
raise RuntimeError("boom")
def __str__(self):
return "proto"
def fail_message_to_dict(*_, **__):
raise RuntimeError("nope")
monkeypatch.setattr(mesh, "MessageToDict", fail_message_to_dict)
monkeypatch.setattr(
mesh.json, "dumps", lambda *_, **__: (_ for _ in ()).throw(TypeError())
)
converted = mesh._node_to_dict({"value": FailingProto()})
assert converted["value"] == "proto"
def test_upsert_node_logs_in_debug(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "DEBUG", True)
captured = []
def fake_queue(path, payload, *, priority):
captured.append((path, payload, priority))
monkeypatch.setattr(mesh, "_queue_post_json", fake_queue)
mesh.upsert_node("!node", {"user": {"shortName": "SN", "longName": "LN"}})
assert captured
out = capsys.readouterr().out
assert "upserted node !node" in out
def test_coerce_int_and_float_cover_edge_cases(mesh_module):
mesh = mesh_module
assert mesh._coerce_int(None) is None
assert mesh._coerce_int(True) == 1
assert mesh._coerce_int(7) == 7
assert mesh._coerce_int(3.2) == 3
assert mesh._coerce_int(float("inf")) is None
assert mesh._coerce_int(" 0x10 ") == 16
assert mesh._coerce_int(" ") is None
assert mesh._coerce_int("7.0") == 7
assert mesh._coerce_int("nan") is None
class Intable:
def __int__(self):
return 9
class BadInt:
def __int__(self):
raise TypeError
assert mesh._coerce_int(Intable()) == 9
assert mesh._coerce_int(BadInt()) is None
assert mesh._coerce_float(None) is None
assert mesh._coerce_float(True) == 1.0
assert mesh._coerce_float(3) == 3.0
assert mesh._coerce_float(float("inf")) is None
assert mesh._coerce_float(" 1.5 ") == 1.5
assert mesh._coerce_float(" ") is None
assert mesh._coerce_float("nan") is None
class Floatable:
def __float__(self):
return 2.5
class BadFloat:
def __float__(self):
raise TypeError
assert mesh._coerce_float(Floatable()) == 2.5
assert mesh._coerce_float(BadFloat()) is None
def test_canonical_node_id_variants(mesh_module):
mesh = mesh_module
assert mesh._canonical_node_id(None) is None
assert mesh._canonical_node_id(0x1234) == "!00001234"
assert mesh._canonical_node_id(" ") is None
assert mesh._canonical_node_id("!deadbeef") == "!deadbeef"
assert mesh._canonical_node_id("0xCAFEBABE") == "!cafebabe"
assert mesh._canonical_node_id("12345") == "!00003039"
assert mesh._canonical_node_id("nothex") is None
def test_node_num_from_id_variants(mesh_module):
mesh = mesh_module
assert mesh._node_num_from_id(None) is None
assert mesh._node_num_from_id(42) == 42
assert mesh._node_num_from_id(-1) is None
assert mesh._node_num_from_id(" ") is None
assert mesh._node_num_from_id("!00ff") == 0xFF
assert mesh._node_num_from_id("0x10") == 16
assert mesh._node_num_from_id("123") == 0x123
assert mesh._node_num_from_id("bad") == int("bad", 16)
def test_merge_mappings_handles_non_mappings(mesh_module):
mesh = mesh_module
@dataclass
class UserBase:
id: str
@dataclass
class UserExtra:
name: str
@dataclass
class Holder:
user: object
base = Holder(UserBase("!1"))
extra = Holder(UserExtra("Node"))
merged = mesh._merge_mappings(base, extra)
assert merged == {"user": {"id": "!1", "name": "Node"}}
def test_extract_payload_bytes_edge_cases(mesh_module):
mesh = mesh_module
assert mesh._extract_payload_bytes(None) is None
assert (
mesh._extract_payload_bytes({"payload": {"__bytes_b64__": "invalid"}}) is None
)
assert mesh._extract_payload_bytes({"payload": b"data"}) == b"data"
assert mesh._extract_payload_bytes({"payload": "ZGF0YQ=="}) == b"data"
def test_decode_nodeinfo_payload_handles_user(mesh_module, monkeypatch):
mesh = mesh_module
from meshtastic.protobuf import mesh_pb2
user = mesh_pb2.User()
user.id = "!01020304"
payload = user.SerializeToString()
def raise_decode(self, *_):
raise mesh.DecodeError("fail")
monkeypatch.setattr(
mesh_pb2.NodeInfo, "ParseFromString", raise_decode, raising=False
)
node_info = mesh._decode_nodeinfo_payload(payload)
assert node_info is not None
assert node_info.user.id == "!01020304"
def test_nodeinfo_helpers_cover_fallbacks(mesh_module, monkeypatch):
mesh = mesh_module
from meshtastic.protobuf import mesh_pb2
node_info = mesh_pb2.NodeInfo()
node_info.device_metrics.battery_level = 50
node_info.position.latitude_i = int(1.23 * 1e7)
node_info.position.longitude_i = int(4.56 * 1e7)
node_info.position.location_source = 99
monkeypatch.setattr(
mesh_pb2.Position.LocSource,
"Name",
lambda value: (_ for _ in ()).throw(RuntimeError()),
raising=False,
)
metrics = mesh._nodeinfo_metrics_dict(node_info)
position = mesh._nodeinfo_position_dict(node_info)
assert metrics["batteryLevel"] == 50.0
assert position["locationSource"] == 99
class DummyProto(mesh.ProtoMessage):
def __init__(self):
self.id = "!11223344"
def __str__(self):
return "dummy-proto"
def to_dict(self):
return {"id": self.id}
def raise_message_to_dict(*_, **__):
raise RuntimeError()
monkeypatch.setattr(mesh, "MessageToDict", raise_message_to_dict)
user = mesh._nodeinfo_user_dict(node_info, DummyProto())
assert user["id"] == "!11223344"
def test_store_position_packet_defaults(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {"id": "7", "rxTime": "", "from": "!abcd", "to": "", "decoded": {}}
mesh.store_position_packet(packet, {})
assert captured
_, payload, _ = captured[0]
assert payload["node_id"] == "!0000abcd"
assert payload["node_num"] == int("abcd", 16)
assert payload["to_id"] is None
assert payload["latitude"] is None
assert payload["longitude"] is None
def test_store_nodeinfo_packet_debug(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "DEBUG", True)
monkeypatch.setattr(mesh, "_queue_post_json", lambda *_, **__: None)
from meshtastic.protobuf import mesh_pb2
node_info = mesh_pb2.NodeInfo()
user = node_info.user
user.id = "!01020304"
user.short_name = "A"
user.long_name = "B"
node_info.channel = 1
node_info.via_mqtt = True
node_info.is_ignored = True
node_info.is_key_manually_verified = True
payload = {
"__bytes_b64__": base64.b64encode(node_info.SerializeToString()).decode()
}
packet = {
"id": 1,
"rxTime": 1,
"decoded": {"portnum": "NODEINFO_APP", "payload": payload},
}
mesh.store_packet_dict(packet)
out = capsys.readouterr().out
assert "stored nodeinfo" in out
def test_store_neighborinfo_packet_debug(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "DEBUG", True)
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append(payload),
)
packet = {
"id": 1,
"rxTime": 2,
"fromId": "!12345678",
"decoded": {
"portnum": "NEIGHBORINFO_APP",
"neighborinfo": {
"nodeId": 0x12345678,
"neighbors": [],
},
},
}
mesh.store_packet_dict(packet)
assert captured
out = capsys.readouterr().out
assert "stored neighborinfo" in out
def test_store_packet_dict_debug_message(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "DEBUG", True)
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append(payload),
)
packet = {
"id": 2,
"rxTime": 10,
"fromId": "!abc",
"decoded": {"payload": {"text": "hi"}, "portnum": "TEXT_MESSAGE_APP"},
}
mesh.store_packet_dict(packet)
assert captured
out = capsys.readouterr().out
assert "stored message" in out
def test_on_receive_skips_seen_packets(mesh_module):
mesh = mesh_module
packet = {"_potatomesh_seen": True}
mesh.on_receive(packet, interface=None)
assert packet["_potatomesh_seen"] is True

View File

@@ -1,3 +1,5 @@
# syntax=docker/dockerfile:1.6
# Main application builder stage
FROM ruby:3.3-alpine AS builder
@@ -43,7 +45,8 @@ WORKDIR /app
COPY --from=builder /usr/local/bundle /usr/local/bundle
# Copy application code (exclude Dockerfile from web directory)
COPY --chown=potatomesh:potatomesh web/app.rb web/app.sh web/Gemfile web/Gemfile.lock* web/public/ web/spec/ ./
COPY --chown=potatomesh:potatomesh web/app.rb web/app.sh web/Gemfile web/Gemfile.lock* web/spec/ ./
COPY --chown=potatomesh:potatomesh web/public ./public
COPY --chown=potatomesh:potatomesh web/views/ ./views/
# Copy SQL schema files from data directory
@@ -60,7 +63,8 @@ USER potatomesh
EXPOSE 41447
# Default environment variables (can be overridden by host)
ENV APP_ENV=production \
ENV RACK_ENV=production \
APP_ENV=production \
MESH_DB=/app/data/mesh.db \
DB_BUSY_TIMEOUT_MS=5000 \
DB_BUSY_MAX_RETRIES=5 \

View File

@@ -18,6 +18,7 @@ gem "sinatra", "~> 4.0"
gem "sqlite3", "~> 1.7"
gem "rackup", "~> 2.2"
gem "puma", "~> 7.0"
gem "prometheus-client"
group :test do
gem "rspec", "~> 3.12"

2514
web/app.rb

File diff suppressed because it is too large Load Diff

12
web/package-lock.json generated Normal file
View File

@@ -0,0 +1,12 @@
{
"name": "potato-mesh",
"version": "0.5.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "potato-mesh",
"version": "0.5.0"
}
}
}

9
web/package.json Normal file
View File

@@ -0,0 +1,9 @@
{
"name": "potato-mesh",
"version": "0.5.0",
"type": "module",
"private": true,
"scripts": {
"test": "mkdir -p reports coverage && NODE_V8_COVERAGE=coverage node --test --experimental-test-coverage --test-reporter=junit --test-reporter-destination=reports/javascript-junit.xml && node ./scripts/export-coverage.js"
}
}

View File

@@ -0,0 +1,85 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { documentStub, resetDocumentStub } from './document-stub.js';
import { readAppConfig } from '../config.js';
import { DEFAULT_CONFIG, mergeConfig } from '../settings.js';
test('readAppConfig returns an empty object when the configuration element is missing', () => {
resetDocumentStub();
assert.deepEqual(readAppConfig(), {});
});
test('readAppConfig returns an empty object when the attribute is empty', () => {
resetDocumentStub();
documentStub.setConfigElement({ getAttribute: () => '' });
assert.deepEqual(readAppConfig(), {});
});
test('readAppConfig parses configuration JSON from the DOM attribute', () => {
resetDocumentStub();
const data = { refreshMs: 5000, chatEnabled: false };
documentStub.setConfigElement({
getAttribute: name => (name === 'data-app-config' ? JSON.stringify(data) : null)
});
assert.deepEqual(readAppConfig(), data);
});
test('readAppConfig returns an empty object and logs on parse failure', () => {
resetDocumentStub();
let called = false;
const originalError = console.error;
console.error = () => {
called = true;
};
documentStub.setConfigElement({
getAttribute: name => (name === 'data-app-config' ? 'not-json' : null)
});
assert.deepEqual(readAppConfig(), {});
assert.equal(called, true);
console.error = originalError;
});
test('mergeConfig applies default values when fields are missing', () => {
const result = mergeConfig({});
assert.deepEqual(result, {
...DEFAULT_CONFIG,
mapCenter: { ...DEFAULT_CONFIG.mapCenter },
tileFilters: { ...DEFAULT_CONFIG.tileFilters }
});
});
test('mergeConfig coerces numeric values and nested objects', () => {
const result = mergeConfig({
refreshIntervalSeconds: '30',
refreshMs: '45000',
mapCenter: { lat: '10.5', lon: '20.1' },
tileFilters: { dark: 'contrast(2)' },
chatEnabled: 0,
defaultChannel: '#Custom',
defaultFrequency: '915MHz',
maxNodeDistanceKm: '55.5'
});
assert.equal(result.refreshIntervalSeconds, 30);
assert.equal(result.refreshMs, 45000);
assert.deepEqual(result.mapCenter, { lat: 10.5, lon: 20.1 });
assert.deepEqual(result.tileFilters, { light: DEFAULT_CONFIG.tileFilters.light, dark: 'contrast(2)' });
assert.equal(result.chatEnabled, false);
assert.equal(result.defaultChannel, '#Custom');
assert.equal(result.defaultFrequency, '915MHz');
assert.equal(result.maxNodeDistanceKm, 55.5);
});
test('mergeConfig falls back to defaults for invalid numeric values', () => {
const result = mergeConfig({
refreshIntervalSeconds: 'NaN',
refreshMs: 'NaN',
maxNodeDistanceKm: 'oops'
});
assert.equal(result.refreshIntervalSeconds, DEFAULT_CONFIG.refreshIntervalSeconds);
assert.equal(result.refreshMs, DEFAULT_CONFIG.refreshMs);
assert.equal(result.maxNodeDistanceKm, DEFAULT_CONFIG.maxNodeDistanceKm);
});

View File

@@ -0,0 +1,39 @@
class DocumentStub {
constructor() {
this.reset();
}
reset() {
this.configElement = null;
this.listeners = new Map();
}
setConfigElement(element) {
this.configElement = element;
}
querySelector(selector) {
if (selector === '[data-app-config]') {
return this.configElement;
}
return null;
}
addEventListener(event, handler) {
this.listeners.set(event, handler);
}
dispatchEvent(event) {
const handler = this.listeners.get(event);
if (handler) {
handler();
}
}
}
export const documentStub = new DocumentStub();
export function resetDocumentStub() {
documentStub.reset();
}
globalThis.document = documentStub;

View File

@@ -0,0 +1,45 @@
/*
* Copyright (C) 2025 l5yth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* CSS selector used to locate the embedded configuration element.
*
* @type {string}
*/
const CONFIG_SELECTOR = '[data-app-config]';
/**
* Read and parse the serialized application configuration from the DOM.
*
* @returns {Object<string, *>} Parsed configuration object or an empty object when unavailable.
*/
export function readAppConfig() {
const el = document.querySelector(CONFIG_SELECTOR);
if (!el) {
return {};
}
const raw = el.getAttribute('data-app-config') || '';
if (!raw) {
return {};
}
try {
const parsed = JSON.parse(raw);
return typeof parsed === 'object' && parsed !== null ? parsed : {};
} catch (err) {
console.error('Failed to parse application configuration', err);
return {};
}
}

View File

@@ -0,0 +1,33 @@
/*
* Copyright (C) 2025 l5yth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { readAppConfig } from './config.js';
import { initializeApp } from './main.js';
import { DEFAULT_CONFIG, mergeConfig } from './settings.js';
export { DEFAULT_CONFIG, mergeConfig } from './settings.js';
/**
* Bootstraps the application once the DOM is ready by reading configuration
* data and delegating to ``initializeApp``.
*
* @returns {void}
*/
document.addEventListener('DOMContentLoaded', () => {
const rawConfig = readAppConfig();
const config = mergeConfig(rawConfig);
initializeApp(config);
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,61 @@
/**
* Default configuration values applied when the server omits a field.
*
* @type {{
* refreshMs: number,
* refreshIntervalSeconds: number,
* chatEnabled: boolean,
* defaultChannel: string,
* defaultFrequency: string,
* mapCenter: { lat: number, lon: number },
* maxNodeDistanceKm: number,
* tileFilters: { light: string, dark: string }
* }}
*/
export const DEFAULT_CONFIG = {
refreshMs: 60_000,
refreshIntervalSeconds: 60,
chatEnabled: true,
defaultChannel: '#MediumFast',
defaultFrequency: '868MHz',
mapCenter: { lat: 52.502889, lon: 13.404194 },
maxNodeDistanceKm: 137,
tileFilters: {
light: 'grayscale(1) saturate(0) brightness(0.92) contrast(1.05)',
dark: 'grayscale(1) invert(1) brightness(0.9) contrast(1.08)'
}
};
/**
* Merge raw configuration data from the DOM with the defaults.
*
* @param {Object<string, *>} raw Partial configuration read from ``readAppConfig``.
* @returns {typeof DEFAULT_CONFIG} Fully populated configuration object.
*/
export function mergeConfig(raw) {
const config = { ...DEFAULT_CONFIG, ...(raw || {}) };
config.mapCenter = {
lat: Number(raw?.mapCenter?.lat ?? DEFAULT_CONFIG.mapCenter.lat),
lon: Number(raw?.mapCenter?.lon ?? DEFAULT_CONFIG.mapCenter.lon)
};
config.tileFilters = {
light: raw?.tileFilters?.light || DEFAULT_CONFIG.tileFilters.light,
dark: raw?.tileFilters?.dark || DEFAULT_CONFIG.tileFilters.dark
};
const refreshIntervalSeconds = Number(
raw?.refreshIntervalSeconds ?? DEFAULT_CONFIG.refreshIntervalSeconds
);
config.refreshIntervalSeconds = Number.isFinite(refreshIntervalSeconds)
? refreshIntervalSeconds
: DEFAULT_CONFIG.refreshIntervalSeconds;
const refreshMs = Number(raw?.refreshMs ?? config.refreshIntervalSeconds * 1000);
config.refreshMs = Number.isFinite(refreshMs) ? refreshMs : DEFAULT_CONFIG.refreshMs;
config.chatEnabled = Boolean(raw?.chatEnabled ?? DEFAULT_CONFIG.chatEnabled);
config.defaultChannel = raw?.defaultChannel || DEFAULT_CONFIG.defaultChannel;
config.defaultFrequency = raw?.defaultFrequency || DEFAULT_CONFIG.defaultFrequency;
const maxDistance = Number(raw?.maxNodeDistanceKm ?? DEFAULT_CONFIG.maxNodeDistanceKm);
config.maxNodeDistanceKm = Number.isFinite(maxDistance)
? maxDistance
: DEFAULT_CONFIG.maxNodeDistanceKm;
return config;
}

View File

@@ -0,0 +1,96 @@
/*
* Copyright (C) 2025 l5yth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () {
'use strict';
/**
* Resolve the background colour that should be applied to the document.
*
* @returns {?string} CSS colour string or ``null`` if resolution fails.
*/
function resolveBackgroundColor() {
if (!document.body) {
return null;
}
var color = '';
try {
var styles = window.getComputedStyle(document.body);
if (styles) {
color = styles.getPropertyValue('--bg');
if (color) {
color = color.trim();
}
}
} catch (err) {
color = '';
}
if (!color) {
color = document.body.classList.contains('dark') ? '#0e1418' : '#f6f3ee';
}
return color;
}
/**
* Apply the resolved background colour to the page root elements.
*
* @returns {void}
*/
function applyBackground() {
var color = resolveBackgroundColor();
if (!color) {
return;
}
document.documentElement.style.backgroundColor = color;
document.documentElement.style.backgroundImage = 'none';
document.body.style.backgroundColor = color;
document.body.style.backgroundImage = 'none';
}
/**
* Initialize the background helper once the DOM is ready.
*
* @returns {void}
*/
function init() {
applyBackground();
}
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', init);
} else {
init();
}
window.addEventListener('themechange', applyBackground);
/**
* Testing hooks exposing background helpers.
*
* @type {{
* applyBackground: function(): void,
* resolveBackgroundColor: function(): (?string)
* }}
*/
window.__potatoBackground = {
applyBackground: applyBackground,
resolveBackgroundColor: resolveBackgroundColor
};
})();

View File

@@ -0,0 +1,130 @@
/*
* Copyright (C) 2025 l5yth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () {
/**
* Number of seconds theme preferences should persist in the cookie store.
*
* @type {number}
*/
var THEME_COOKIE_MAX_AGE = 60 * 60 * 24 * 7;
/**
* Retrieve a cookie value by name.
*
* @param {string} name Cookie identifier.
* @returns {?string} Decoded cookie value or ``null`` when absent.
*/
function getCookie(name) {
var matcher = new RegExp(
'(?:^|; )' + name.replace(/([.$?*|{}()\[\]\\/+^])/g, '\\$1') + '=([^;]*)'
);
var match = document.cookie.match(matcher);
return match ? decodeURIComponent(match[1]) : null;
}
/**
* Persist a cookie with optional attributes.
*
* @param {string} name Cookie identifier.
* @param {string} value Value to store.
* @param {Object<string, *>} [opts] Additional cookie attributes.
* @returns {void}
*/
function setCookie(name, value, opts) {
var options = Object.assign(
{ path: '/', 'max-age': THEME_COOKIE_MAX_AGE, SameSite: 'Lax' },
opts || {}
);
var updated = encodeURIComponent(name) + '=' + encodeURIComponent(value);
for (var k in options) {
if (!Object.prototype.hasOwnProperty.call(options, k)) continue;
updated += '; ' + k + (options[k] === true ? '' : '=' + options[k]);
}
document.cookie = updated;
}
/**
* Store the user's preferred theme selection.
*
* @param {string} value Theme identifier to persist.
* @returns {void}
*/
function persistTheme(value) {
setCookie('theme', value, { 'max-age': THEME_COOKIE_MAX_AGE });
}
function applyTheme(value) {
var themeValue = value === 'dark' ? 'dark' : 'light';
var root = document.documentElement;
var isDark = themeValue === 'dark';
if (root) {
root.setAttribute('data-theme', themeValue);
}
if (document.body) {
document.body.classList.toggle('dark', isDark);
document.body.setAttribute('data-theme', themeValue);
}
return isDark;
}
var theme = getCookie('theme');
if (theme !== 'dark' && theme !== 'light') {
theme = 'dark';
}
persistTheme(theme);
applyTheme(theme);
function handleReady() {
var isDark = applyTheme(theme);
var btn = document.getElementById('themeToggle');
if (btn) {
btn.textContent = isDark ? '☀️' : '🌙';
}
if (typeof window.applyFiltersToAllTiles === 'function') {
window.applyFiltersToAllTiles();
}
}
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', handleReady);
} else {
handleReady();
}
/**
* Testing hooks exposing cookie helpers for integration tests.
*
* @type {{
* getCookie: function(string): (?string),
* setCookie: function(string, string, Object<string, *>=): void,
* persistTheme: function(string): void,
* maxAge: number
* }}
*/
window.__themeCookie = {
getCookie: getCookie,
setCookie: setCookie,
persistTheme: persistTheme,
maxAge: THEME_COOKIE_MAX_AGE
};
})();

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,45 @@
import { promises as fs } from 'node:fs';
import path from 'node:path';
const coverageDir = 'coverage';
const reportsDir = 'reports';
const outputPath = path.join(reportsDir, 'javascript-coverage.json');
async function ensureReportsDir() {
try {
await fs.mkdir(reportsDir, { recursive: true });
} catch (error) {
console.error('Failed to ensure reports directory', error);
process.exit(1);
}
}
async function copyLatestCoverage() {
let entries;
try {
entries = await fs.readdir(coverageDir);
} catch (error) {
if (error.code === 'ENOENT') {
console.warn('Coverage directory not found; skipping export.');
return;
}
throw error;
}
const coverageFiles = entries.filter(name => name.endsWith('.json'));
if (!coverageFiles.length) {
console.warn('No coverage files generated; skipping export.');
return;
}
// Sort to pick the most recent entry deterministically.
coverageFiles.sort();
const latest = coverageFiles[coverageFiles.length - 1];
const source = path.join(coverageDir, latest);
await fs.copyFile(source, outputPath);
console.log(`Copied coverage report to ${outputPath}`);
}
await ensureReportsDir();
await copyLatestCoverage();

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff