Compare commits

..

61 Commits

Author SHA1 Message Date
l5y dcb512636c update readme for 0.2 (#118)
* update readme for 0.2

* update readme for 0.2

* update readme for 0.2

* update readme for 0.2
2025-09-17 10:23:36 +02:00
l5y 7c6bf801e9 Add PotatoMesh logo to header and favicon (#117)
* Add PotatoMesh logo to header and favicon

* Ensure header logo remains visible

* update svg
2025-09-17 10:12:35 +02:00
l5y 71e9f89aae Harden API auth and request limits (#116)
* Harden API auth and request limits

* run rufo
2025-09-17 08:00:25 +02:00
l5y 0936c6087b Add sortable node table columns (#114) 2025-09-17 07:06:13 +02:00
l5y 95e3e8723a Add short name overlay for node details (#111)
* Add node details overlay for short names

* Simplify short info overlay layout
2025-09-16 23:22:41 +02:00
l5y 671a910936 Adjust python ingestor interval to 60 seconds (#112) 2025-09-16 21:07:53 +02:00
l5y 3b64e829a8 Hide location columns on medium screens (#109) 2025-09-16 19:43:31 +02:00
l5y 84ed739a61 Handle message updates based on sender info (#108)
* Handle message updates based on sender info

* run rufo
2025-09-16 19:41:56 +02:00
l5y cffdb7dca6 Prioritize node posts in queued API updates (#107)
* Prioritize node posts in queued API updates

* run black
2025-09-16 19:30:38 +02:00
l5y 4182a9f83c Add auto-refresh toggle (#105) 2025-09-16 19:21:54 +02:00
l5y 9873f6105d Adjust Leaflet popup styling for dark mode (#104)
* Adjust Leaflet popup styling for dark mode

* some css fixing
2025-09-16 17:14:36 +00:00
l5y 8d3829cc4e feat: add site info overlay (#103) 2025-09-16 19:00:31 +02:00
l5y e424485761 Add long name tooltip to short name badge (#102) 2025-09-16 18:58:29 +02:00
l5y baf7f5d137 Ensure node numeric aliases are derived from canonical IDs (#101)
* Derive node numeric aliases when missing

* Preserve raw message senders when storing payloads

* Normalize packet message sender ids when available

* run rufo
2025-09-16 18:41:49 +02:00
l5y 3edf60c625 chore: clean up repository (#96)
* chore: clean up repository

* Fix message spec node lookup for numeric IDs (#98)

* Fix message spec node lookup for numeric IDs

* run rufo

* Fix message node fallback lookup (#99)
2025-09-16 15:25:12 +02:00
l5y 1beb343501 Handle SQLite busy errors when upserting nodes (#100) 2025-09-16 15:24:01 +02:00
l5y 0c0f877b13 Configure Sinatra logging level from DEBUG flag (#97)
* Configure Sinatra logging level

* Fix logger level helper invocation

* Fix Sinatra logger helper definition syntax
2025-09-16 14:46:50 +02:00
l5y f7a1b5c5ad Add penetration tests for authentication and SQL injection (#95) 2025-09-16 13:13:57 +02:00
l5y 051d09dcaf Document Python and Ruby source modules (#94) 2025-09-16 13:13:12 +02:00
l5y eb900aecb6 Add tests covering mesh helper edge cases (#93)
* test: expand coverage for mesh helpers

* run black
2025-09-16 12:48:01 +02:00
l5y f16393eafd fix py code cov (#92) 2025-09-16 12:10:17 +02:00
l5y 49dcfebfb3 Add Codecov coverage and test analytics for Python CI (#91) 2025-09-16 12:04:46 +02:00
l5y 1c13b99f3b Skip null fields when choosing packet identifiers (#88) 2025-09-16 11:56:02 +02:00
l5y 54a1eb5b42 create python yml ga (#90)
* Create python.yml

* ci: add black

* run an actual formatter

* also add rufo

* fix pytest

* run black
2025-09-16 11:50:33 +02:00
l5y 2818c6d2b8 Add unit tests for mesh ingestor script (#89) 2025-09-16 11:44:28 +02:00
l5y f4aa5d3873 Add coverage for debug logging on messages without sender (#86)
* Add debug logging spec for messages without sender

* Route debug logging through Kernel.warn

* Relax debug log matchers
2025-09-16 11:33:03 +02:00
l5y 542f4dd0e2 Handle concurrent node snapshot updates (#85) 2025-09-16 11:10:11 +02:00
l5y 4a72cdda75 Fix extraction of packet sender ids (#84) 2025-09-16 10:35:11 +02:00
l5y 4b9d581448 Add coverage for API authentication and payload edge cases (#83) 2025-09-16 10:18:10 +02:00
l5y 1d3b3f11e9 Add Codecov test analytics to Ruby workflow (#82) 2025-09-16 10:12:25 +02:00
l5y e97824fd0b Configure SimpleCov for Codecov coverage (#81) 2025-09-16 09:58:44 +02:00
l5y 1cd9058685 update codecov job (#80)
* update codecov job

* add codecov condif
2025-09-16 09:55:53 +02:00
l5y 47e23ea14c fix readme badges (#79)
* fix readme badges

* fix readme badges
2025-09-16 09:46:44 +02:00
l5y afd18794c7 Add Codecov upload step to Ruby workflow (#78) 2025-09-16 09:43:09 +02:00
l5y 203bd623bd Add Apache license headers to source files (#77)
* Add Apache license headers to source files

* fix formatting
2025-09-16 09:39:28 +02:00
l5y 2b6b44a31d Add integration specs for node and message APIs (#76) 2025-09-16 09:29:31 +02:00
l5y 0059a6aab3 docs: update for 0.2.0 release (#75)
* docs: update for 0.2.0 release

* docs: add scrot 0.2
2025-09-16 09:23:11 +02:00
l5y fc30a080ff create ruby workflow (#74)
* create ruby workflow

* add step for dependencies

* bump ruby version

* Set up Ruby action in web directory
2025-09-16 08:52:33 +02:00
l5y 7399c02be9 Add RSpec tests for app boot and database setup (#73) 2025-09-16 08:25:13 +02:00
l5y 02e985d2a8 Align refresh controls with status text (#72)
* Align refresh controls with status text

* Improve mobile alignment for refresh controls
2025-09-16 08:21:15 +02:00
l5y 954352809f spec: update testdata 2025-09-16 08:11:11 +02:00
l5y 7eb36a5a3d remove duplication 2025-09-15 21:35:59 +02:00
l5y 0768b4d91a Improve mobile layout (#68)
* Improve mobile layout

* styling tweaks
2025-09-15 21:32:56 +02:00
l5y be1306c9c0 Normalize message sender IDs using node numbers (#67) 2025-09-15 21:04:29 +02:00
l5y 7904717597 style: simplify node table (#65) 2025-09-15 18:16:36 +02:00
l5y e2c19e1611 Add debug logging for missing from_id (#64) 2025-09-15 18:15:46 +02:00
l5y b230e79ab0 Handle nested dataclasses in node snapshots (#63) 2025-09-15 14:59:23 +02:00
l5y 31727e35bb add placeholder for default frequency 2025-09-15 14:48:12 +02:00
l5y 22127bbfb4 ignore log files 2025-09-15 14:44:32 +02:00
l5y 413278544a Log node object on snapshot update failure (#62) 2025-09-15 14:34:56 +02:00
l5y 580a588df7 Run schema initialization only when database or tables are missing (#61) 2025-09-15 14:05:01 +02:00
l5y b39b83fb51 Send mesh data to Potatomesh API (#60)
* feat: post mesh data to API

* Serialize node objects before posting

* don't put raw json in api/db
2025-09-15 14:00:48 +02:00
l5y 6d948603c9 Convert boolean flags to integers for SQLite (#59) 2025-09-15 13:37:30 +02:00
l5y 648bcc9b92 Use packet id as message primary key (#58)
* Use packet id as message primary key

* fix query

* fix query
2025-09-15 13:34:59 +02:00
l5y 4dc1227be7 Add POST /api/messages and enforce API token (#56) 2025-09-15 13:13:47 +02:00
l5y 3b097feaae Update README.md 2025-09-15 12:17:45 +02:00
l5y da2e5fbde1 feat: parameterize community info (#55)
* feat: parameterize community info

* chore: restore test data and document env defaults

* also make default channel configurable
2025-09-15 12:15:51 +02:00
l5y 003db7c36a feat: add dark mode toggle (#54)
* feat: add dark mode toggle

* fix chat colors in dark mode
2025-09-15 11:53:49 +02:00
l5y 9aa640338d Update README.md 2025-09-15 11:44:44 +02:00
l5y 3c24b71f16 ignore copies 2025-09-15 11:42:27 +02:00
l5y eee6738a9c add changelog 2025-09-15 08:49:12 +02:00
34 changed files with 11485 additions and 4280 deletions
+6
View File
@@ -0,0 +1,6 @@
coverage:
status:
project:
default:
target: 99%
threshold: 1%
+4 -9
View File
@@ -1,15 +1,10 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "ruby" # See documentation for possible values
directory: "/web" # Location of package manifests
- package-ecosystem: "ruby"
directory: "/web"
schedule:
interval: "weekly"
- package-ecosystem: "python" # See documentation for possible values
directory: "/data" # Location of package manifests
- package-ecosystem: "python"
directory: "/"
schedule:
interval: "weekly"
+1 -61
View File
@@ -1,14 +1,3 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL Advanced"
on:
@@ -20,20 +9,10 @@ on:
jobs:
analyze:
name: Analyze (${{ matrix.language }})
# Runner size impacts CodeQL analysis time. To learn more, please see:
# - https://gh.io/recommended-hardware-resources-for-running-codeql
# - https://gh.io/supported-runners-and-hardware-resources
# - https://gh.io/using-larger-runners (GitHub.com only)
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
permissions:
# required for all workflows
security-events: write
# required to fetch internal or private CodeQL packs
packages: read
# only required for workflows in private repositories
actions: read
contents: read
@@ -47,53 +26,14 @@ jobs:
build-mode: none
- language: javascript-typescript
build-mode: none
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'rust', 'swift'
# Use `c-cpp` to analyze code written in C, C++ or both
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Add any setup steps before running the `github/codeql-action/init` action.
# This includes steps like installing compilers or runtimes (`actions/setup-node`
# or others). This is typically only required for manual builds.
# - name: Setup runtime (example)
# uses: actions/setup-example@v1
# Initializes the CodeQL tools for scanning.
uses: actions/checkout@v5
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# If the analyze step fails for one of the languages you are analyzing with
# "We were unable to automatically build your code", modify the matrix above
# to set the build mode to "manual" for that language. Then modify this step
# to build your code.
# ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- if: matrix.build-mode == 'manual'
shell: bash
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code, for example:'
echo ' make bootstrap'
echo ' make release'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
+47
View File
@@ -0,0 +1,47 @@
name: Python
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Set up Python 3.13
uses: actions/setup-python@v3
with:
python-version: "3.13"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install black pytest pytest-cov meshtastic
- name: Lint with black
run: |
black --check ./
- name: Test with pytest and coverage
run: |
mkdir -p reports
pytest --cov=data --cov-report=term --cov-report=xml:reports/python-coverage.xml --junitxml=reports/python-junit.xml
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: reports/python-coverage.xml
flags: python-ingestor
name: python-ingestor
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: reports/python-junit.xml
flags: python-ingestor
+55
View File
@@ -0,0 +1,55 @@
name: Ruby
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
permissions:
contents: read
jobs:
test:
defaults:
run:
working-directory: ./web
runs-on: ubuntu-latest
strategy:
matrix:
ruby-version: ['3.3', '3.4']
steps:
- uses: actions/checkout@v5
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby-version }}
bundler-cache: true
working-directory: ./web
- name: Set up dependencies
run: bundle install
- name: Run rufo
run: bundle exec rufo --check .
- name: Run tests
run: |
mkdir -p tmp/test-results
bundle exec rspec \
--require rspec_junit_formatter \
--format progress \
--format RspecJunitFormatter \
--out tmp/test-results/rspec.xml
- name: Upload test results to Codecov
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./web/tmp/test-results/rspec.xml
flags: ruby-${{ matrix.ruby-version }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
flags: ruby-${{ matrix.ruby-version }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
+5
View File
@@ -57,3 +57,8 @@ Gemfile.lock
# Python cache directories
__pycache__/
.coverage
coverage/
coverage.xml
htmlcov/
reports/
+40
View File
@@ -0,0 +1,40 @@
# CHANGELOG
## v0.2.0
## v0.1.0
* Show daily node count in title and header by @l5yth in <https://github.com/l5yth/potato-mesh/pull/49>
* Add daily date separators to chat log by @l5yth in <https://github.com/l5yth/potato-mesh/pull/47>
* Feat: make frontend responsive for mobile by @l5yth in <https://github.com/l5yth/potato-mesh/pull/46>
* Harden mesh utilities by @l5yth in <https://github.com/l5yth/potato-mesh/pull/45>
* Filter out distant nodes from Berlin map view by @l5yth in <https://github.com/l5yth/potato-mesh/pull/43>
* Display filtered active node counts in #MediumFast subheading by @l5yth in <https://github.com/l5yth/potato-mesh/pull/44>
* Limit chat log and highlight short names by role by @l5yth in <https://github.com/l5yth/potato-mesh/pull/42>
* Fix string/integer comparison in node query by @l5yth in <https://github.com/l5yth/potato-mesh/pull/40>
* Escape chat message and node entries by @l5yth in <https://github.com/l5yth/potato-mesh/pull/39>
* Sort chat entries by timestamp by @l5yth in <https://github.com/l5yth/potato-mesh/pull/38>
* Feat: append messages to chat log by @l5yth in <https://github.com/l5yth/potato-mesh/pull/36>
* Normalize future timestamps for nodes by @l5yth in <https://github.com/l5yth/potato-mesh/pull/35>
* Optimize web frontend and Ruby app by @l5yth in <https://github.com/l5yth/potato-mesh/pull/32>
* Add messages API endpoint with node details by @l5yth in <https://github.com/l5yth/potato-mesh/pull/33>
* Clamp node timestamps and sync last_heard with position time by @l5yth in <https://github.com/l5yth/potato-mesh/pull/31>
* Refactor: replace deprecated utcfromtimestamp by @l5yth in <https://github.com/l5yth/potato-mesh/pull/30>
* Add optional debug logging for node and message operations by @l5yth in <https://github.com/l5yth/potato-mesh/pull/29>
* Data: enable serial collection of messages on channel 0 by @l5yth in <https://github.com/l5yth/potato-mesh/pull/25>
* Add first_heard timestamp by @l5yth in <https://github.com/l5yth/potato-mesh/pull/23>
* Add persistent footer with contact information by @l5yth in <https://github.com/l5yth/potato-mesh/pull/22>
* Sort initial chat entries by last-heard by @l5yth in <https://github.com/l5yth/potato-mesh/pull/20>
* Display position time in relative 'time ago' format by @l5yth in <https://github.com/l5yth/potato-mesh/pull/19>
* Adjust marker size and map tile opacity by @l5yth in <https://github.com/l5yth/potato-mesh/pull/18>
* Add chat box for node notifications by @l5yth in <https://github.com/l5yth/potato-mesh/pull/17>
* Color markers by role with grayscale map by @l5yth in <https://github.com/l5yth/potato-mesh/pull/16>
* Default missing node role to client by @l5yth in <https://github.com/l5yth/potato-mesh/pull/15>
* Show live node count in nodes page titles by @l5yth in <https://github.com/l5yth/potato-mesh/pull/14>
* Filter stale nodes and add live search by @l5yth in <https://github.com/l5yth/potato-mesh/pull/13>
* Remove raw node JSON column by @l5yth in <https://github.com/l5yth/potato-mesh/pull/12>
* Add JSON ingest API for node updates by @l5yth in <https://github.com/l5yth/potato-mesh/pull/11>
* Ignore Python __pycache__ directories by @l5yth in <https://github.com/l5yth/potato-mesh/pull/10>
* Feat: load nodes from json for tests by @l5yth in <https://github.com/l5yth/potato-mesh/pull/8>
* Handle dataclass fields in node snapshots by @l5yth in <https://github.com/l5yth/potato-mesh/pull/6>
* Add index page and /nodes route for node map by @l5yth in <https://github.com/l5yth/potato-mesh/pull/4>
+2 -2
View File
@@ -33,7 +33,7 @@
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
Object form, made available under the Licen2se, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
@@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Copyright (C) 2025 l5yth
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
+85 -62
View File
@@ -1,70 +1,37 @@
# potato-mesh
# 🥔 PotatoMesh
a simple meshtastic node dashboard for your local community. here: berlin mediumfast.
[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/l5yth/potato-mesh/ruby.yml?branch=main)](https://github.com/l5yth/potato-mesh/actions)
[![GitHub release (latest by date)](https://img.shields.io/github/v/release/l5yth/potato-mesh)](https://github.com/l5yth/potato-mesh/releases)
[![codecov](https://codecov.io/gh/l5yth/potato-mesh/branch/main/graph/badge.svg?token=FS7252JVZT)](https://codecov.io/gh/l5yth/potato-mesh)
[![Open-Source License](https://img.shields.io/github/license/l5yth/potato-mesh)](LICENSE)
[![Contributions Welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/l5yth/potato-mesh/issues)
![screenshot of the first version](./scrot-0.1.png)
A simple Meshtastic-powered node dashboard for your local community. _No MQTT clutter, just local LoRa aether._
## status
* Web app with chat window and map view showing nodes and messages.
* API to POST (authenticated) and to GET nodes and messages.
* Supplemental Python ingestor to feed the POST APIs of the Web app with data remotely.
* Shows new node notifications (first seen) in chat.
* Allows searching and filtering for nodes in map and table view.
_in active development._
Live demo for Berlin #MediumFast: [potatomesh.net](https://potatomesh.net)
what works:
![screenshot of the second version](./scrot-0.2.png)
* updating nodes from a locally connected meshtastic device (via serial)
* awaiting messages on default channel (0) from a local meshtastic device
* storing nodes and messages in a local database (sqlite3)
* displaying nodes ordered by last seen in a web app table view
* displaying nodes by geographic coordinates on a map layer, coloured by device role
* displaying new node notifications and chat messages in default channel in chat box
* displaying active node count and filtering nodes by name
* exposing nodes and messages to api endpoints
## Web App
what does not work _(yet):_
* posting nodes and messages to the api endpoints _(wip)_
## requirements
requires a meshtastic node connected (via serial) to gather mesh data and the meshtastic cli.
requires the meshtastic python api for the database.
Requires Ruby for the Sinatra web app and SQLite3 for the app's database.
```bash
python -m venv .venv
source .venv/bin/activate
pip install -U meshtastic
```
requires latest ruby and ruby gems for the sinatra web app.
```bash
gem install bundler
pacman -S ruby sqlite3
gem install sinatra sqlite3 rackup puma rspec rack-test rufo
cd ./web
bundle install
```
### database
### Run
uses python meshtastic library to ingest mesh data into an sqlite3 database locally
run `mesh.sh` in `data/` to keep updating node records and parsing new incoming messages.
```bash
MESH_SERIAL=/dev/ttyACM0 DEBUG=1 ./mesh.sh
[...]
[debug] upserted node !849b7154 shortName='7154'
[debug] upserted node !ba653ae8 shortName='3ae8'
[debug] upserted node !16ced364 shortName='Pat'
[debug] stored message from '!9ee71c38' to '^all' ch=0 text='Guten Morgen!'
```
enable debug output with `DEBUG=1`, specify the serial port with `MESH_SERIAL` (default `/dev/ttyACM0`).
### web app
uses a ruby sinatra webapp to display data from the sqlite database
run `app.sh` in `web/` to run the sinatra webserver and check
[127.0.0.1:41447](http://127.0.0.1:41447/) for the correct node map.
Check out the `app.sh` run script in `./web` directory.
```bash
API_TOKEN="1eb140fd-cab4-40be-b862-41c607762246" ./app.sh
@@ -76,17 +43,73 @@ Puma starting in single mode...
* Listening on http://127.0.0.1:41447
```
set `API_TOKEN` required for authorizations on the api post-endpoints (wip).
Check [127.0.0.1:41447](http://127.0.0.1:41447/) for the development preview
of the node map. Set `API_TOKEN` required for authorizations on the API's POST endpoints.
## api
The web app can be configured with environment variables (defaults shown):
the web app contains an api:
* `SITE_NAME` - title and header shown in the ui (default: "Meshtastic Berlin")
* `DEFAULT_CHANNEL` - default channel shown in the ui (default: "#MediumFast")
* `DEFAULT_FREQUENCY` - default channel shown in the ui (default: "868MHz")
* `MAP_CENTER_LAT` / `MAP_CENTER_LON` - default map center coordinates (default: `52.502889` / `13.404194`)
* `MAX_NODE_DISTANCE_KM` - hide nodes farther than this distance from the center (default: `137`)
* `MATRIX_ROOM` - matrix room id for a footer link (default: `#meshtastic-berlin:matrix.org`)
* GET `/api/nodes?limit=1000` - returns the latest 1000 nodes reported to the app
* GET `/api/messages?limit=1000` - returns the latest 1000 messages
Example:
the `POST` apis are _currently being worked on (tm)._
```bash
SITE_NAME="Meshtastic Berlin" MAP_CENTER_LAT=52.502889 MAP_CENTER_LON=13.404194 MAX_NODE_DISTANCE_KM=137 MATRIX_ROOM="#meshtastic-berlin:matrix.org" ./app.sh
```
## license
### API
apache v2.0
The web app contains an API:
* GET `/api/nodes?limit=100` - returns the latest 100 nodes reported to the app
* GET `/api/messages?limit=100` - returns the latest 100 messages
* POST `/api/nodes` - upserts nodes provided as JSON object mapping node ids to node data (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/messages` - appends messages provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
The `API_TOKEN` environment variable must be set to a non-empty value and match the token supplied in the `Authorization` header for `POST` requests.
## Python Ingestor
The web app is not meant to be run locally connected to a Meshtastic node but rather
on a remote host without access to a physical Meshtastic device. Therefore, it only
accepts data through the API POST endpoints. Benefit is, here multiple nodes across the
community can feed the dashboard with data. The web app handles messages and nodes
by ID and there will be no duplication.
For convenience, the directory `./data` contains a Python ingestor. It connects to a local
Meshtastic node via serial port to gather nodes and messages seen by the node.
```bash
pacman -S python
cd ./data
python -m venv .venv
source .venv/bin/activate
pip install -U meshtastic
```
It uses the Meshtastic Python library to ingest mesh data and post nodes and messages
to the configured potato-mesh instance.
Check out `mesh.sh` ingestor script in the `./data` directory.
```bash
POTATOMESH_INSTANCE=http://127.0.0.1:41447 API_TOKEN=1eb140fd-cab4-40be-b862-41c607762246 MESH_SERIAL=/dev/ttyACM0 DEBUG=1 ./mesh.sh
Mesh daemon: nodes+messages → http://127.0.0.1 | port=41447 | channel=0
[...]
[debug] upserted node !849b7154 shortName='7154'
[debug] upserted node !ba653ae8 shortName='3ae8'
[debug] upserted node !16ced364 shortName='Pat'
[debug] stored message from '!9ee71c38' to '^all' ch=0 text='Guten Morgen!'
```
Run the script with `POTATOMESH_INSTANCE` and `API_TOKEN` to keep updating
node records and parsing new incoming messages. Enable debug output with `DEBUG=1`,
specify the serial port with `MESH_SERIAL` (default `/dev/ttyACM0`), etc.
## License
Apache v2.0, Contact <COM0@l5y.tech>
+2
View File
@@ -2,3 +2,5 @@
*.db-wal
*.db-shm
*.backup
*.copy
*.log
+19
View File
@@ -0,0 +1,19 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data utilities for the Potato Mesh synchronisation daemon.
The ``data.mesh`` module exposes helpers for reading Meshtastic node and
message information before forwarding it to the accompanying web application.
"""
+328 -110
View File
@@ -1,104 +1,211 @@
#!/usr/bin/env python3
import json, os, sqlite3, time, threading, signal
from pathlib import Path
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mesh daemon helpers for synchronising Meshtastic data.
This module wraps the Meshtastic serial interface and exposes helper
functions that serialise nodes and text messages to JSON before forwarding
them to the accompanying web API. It also provides the long-running daemon
entry point that performs these synchronisation tasks.
"""
import dataclasses
import heapq
import itertools
import json, os, time, threading, signal, urllib.request, urllib.error
from collections.abc import Mapping
from meshtastic.serial_interface import SerialInterface
from meshtastic.mesh_interface import MeshInterface
from pubsub import pub
from google.protobuf.json_format import MessageToDict
from google.protobuf.message import Message as ProtoMessage
# --- Config (env overrides) ---------------------------------------------------
DB = os.environ.get("MESH_DB", "mesh.db")
PORT = os.environ.get("MESH_SERIAL", "/dev/ttyACM0")
SNAPSHOT_SECS = int(os.environ.get("MESH_SNAPSHOT_SECS", "30"))
SNAPSHOT_SECS = int(os.environ.get("MESH_SNAPSHOT_SECS", "60"))
CHANNEL_INDEX = int(os.environ.get("MESH_CHANNEL_INDEX", "0"))
DEBUG = os.environ.get("DEBUG") == "1"
INSTANCE = os.environ.get("POTATOMESH_INSTANCE", "").rstrip("/")
API_TOKEN = os.environ.get("API_TOKEN", "")
# --- DB setup -----------------------------------------------------------------
nodeSchema = Path(__file__).with_name("nodes.sql").read_text()
conn = sqlite3.connect(DB, check_same_thread=False)
conn.executescript(nodeSchema)
msgSchema = Path(__file__).with_name("messages.sql").read_text()
conn.executescript(msgSchema)
conn.commit()
DB_LOCK = threading.Lock()
# --- POST queue ----------------------------------------------------------------
_POST_QUEUE_LOCK = threading.Lock()
_POST_QUEUE = []
_POST_QUEUE_COUNTER = itertools.count()
_POST_QUEUE_ACTIVE = False
_NODE_POST_PRIORITY = 0
_MESSAGE_POST_PRIORITY = 10
_DEFAULT_POST_PRIORITY = 50
def _get(obj, key, default=None):
"""Return value for key/attribute from dicts or objects."""
"""Return a key or attribute value from ``obj``.
Args:
obj: Mapping or object containing the desired value.
key: Key or attribute name to look up.
default: Value returned when the key is missing.
Returns:
The resolved value if present, otherwise ``default``.
"""
if isinstance(obj, dict):
return obj.get(key, default)
return getattr(obj, key, default)
# --- Node upsert --------------------------------------------------------------
def upsert_node(node_id, n):
user = _get(n, "user") or {}
met = _get(n, "deviceMetrics") or {}
pos = _get(n, "position") or {}
lh = _get(n, "lastHeard")
pt = _get(pos, "time")
now = int(time.time())
if pt is not None and pt > now:
pt = None
if lh is not None and lh > now:
lh = now
if pt is not None and (lh is None or lh < pt):
lh = pt
row = (
node_id,
_get(n, "num"),
_get(user, "shortName"),
_get(user, "longName"),
_get(user, "macaddr"),
_get(user, "hwModel") or _get(n, "hwModel"),
_get(user, "role"),
_get(user, "publicKey"),
_get(user, "isUnmessagable"),
_get(n, "isFavorite"),
_get(n, "hopsAway"),
_get(n, "snr"),
lh,
lh,
_get(met, "batteryLevel"),
_get(met, "voltage"),
_get(met, "channelUtilization"),
_get(met, "airUtilTx"),
_get(met, "uptimeSeconds"),
pt,
_get(pos, "locationSource"),
_get(pos, "latitude"),
_get(pos, "longitude"),
_get(pos, "altitude"),
# --- HTTP helpers -------------------------------------------------------------
def _post_json(path: str, payload: dict):
"""Send a JSON payload to the configured web API.
Args:
path: API path relative to the configured ``INSTANCE``.
payload: Mapping serialised to JSON for the request body.
"""
if not INSTANCE:
return
url = f"{INSTANCE}{path}"
data = json.dumps(payload).encode("utf-8")
req = urllib.request.Request(
url, data=data, headers={"Content-Type": "application/json"}
)
with DB_LOCK:
conn.execute(
"""
INSERT INTO nodes(node_id,num,short_name,long_name,macaddr,hw_model,role,public_key,is_unmessagable,is_favorite,
hops_away,snr,last_heard,first_heard,battery_level,voltage,channel_utilization,air_util_tx,uptime_seconds,
position_time,location_source,latitude,longitude,altitude)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(node_id) DO UPDATE SET
num=excluded.num, short_name=excluded.short_name, long_name=excluded.long_name, macaddr=excluded.macaddr,
hw_model=excluded.hw_model, role=excluded.role, public_key=excluded.public_key, is_unmessagable=excluded.is_unmessagable,
is_favorite=excluded.is_favorite, hops_away=excluded.hops_away, snr=excluded.snr, last_heard=excluded.last_heard,
battery_level=excluded.battery_level, voltage=excluded.voltage, channel_utilization=excluded.channel_utilization,
air_util_tx=excluded.air_util_tx, uptime_seconds=excluded.uptime_seconds, position_time=excluded.position_time,
location_source=excluded.location_source, latitude=excluded.latitude, longitude=excluded.longitude,
altitude=excluded.altitude
""",
row,
if API_TOKEN:
req.add_header("Authorization", f"Bearer {API_TOKEN}")
try:
with urllib.request.urlopen(req, timeout=10) as resp:
resp.read()
except Exception as e:
if DEBUG:
print(f"[warn] POST {url} failed: {e}")
def _enqueue_post_json(path: str, payload: dict, priority: int):
"""Store a POST request in the priority queue."""
with _POST_QUEUE_LOCK:
heapq.heappush(
_POST_QUEUE, (priority, next(_POST_QUEUE_COUNTER), path, payload)
)
def _drain_post_queue():
"""Process queued POST requests in priority order."""
global _POST_QUEUE_ACTIVE
while True:
with _POST_QUEUE_LOCK:
if not _POST_QUEUE:
_POST_QUEUE_ACTIVE = False
return
_priority, _idx, path, payload = heapq.heappop(_POST_QUEUE)
_post_json(path, payload)
def _queue_post_json(
path: str, payload: dict, *, priority: int = _DEFAULT_POST_PRIORITY
):
"""Queue a POST request and start processing if idle."""
global _POST_QUEUE_ACTIVE
_enqueue_post_json(path, payload, priority)
with _POST_QUEUE_LOCK:
if _POST_QUEUE_ACTIVE:
return
_POST_QUEUE_ACTIVE = True
_drain_post_queue()
def _clear_post_queue():
"""Clear the pending POST queue (used by tests)."""
global _POST_QUEUE_ACTIVE
with _POST_QUEUE_LOCK:
_POST_QUEUE.clear()
_POST_QUEUE_ACTIVE = False
# --- Node upsert --------------------------------------------------------------
def _node_to_dict(n) -> dict:
"""Convert Meshtastic node or user structures into plain dictionaries.
Args:
n: ``dict``, dataclass or protobuf message describing a node or user.
Returns:
JSON serialisable representation of ``n``.
"""
def _convert(value):
"""Recursively convert dataclasses and protobuf messages."""
if isinstance(value, dict):
return {k: _convert(v) for k, v in value.items()}
if isinstance(value, (list, tuple, set)):
return [_convert(v) for v in value]
if dataclasses.is_dataclass(value):
return {k: _convert(getattr(value, k)) for k in value.__dataclass_fields__}
if isinstance(value, ProtoMessage):
return MessageToDict(
value, preserving_proto_field_name=True, use_integers_for_enums=False
)
if isinstance(value, bytes):
try:
return value.decode()
except Exception:
return value.hex()
if isinstance(value, (str, int, float, bool)) or value is None:
return value
try:
return json.loads(json.dumps(value, default=str))
except Exception:
return str(value)
return _convert(n)
def upsert_node(node_id, n):
"""Forward a node snapshot to the web API.
Args:
node_id: Unique identifier of the node in the mesh.
n: Node object obtained from the Meshtastic serial interface.
"""
ndict = _node_to_dict(n)
_queue_post_json("/api/nodes", {node_id: ndict}, priority=_NODE_POST_PRIORITY)
if DEBUG:
user = _get(ndict, "user") or {}
short = _get(user, "shortName")
print(f"[debug] upserted node {node_id} shortName={short!r}")
# --- Message logging via PubSub -----------------------------------------------
def _iso(ts: int | float) -> str:
"""Return an ISO-8601 timestamp string for ``ts``.
Args:
ts: POSIX timestamp as ``int`` or ``float``.
Returns:
Timestamp formatted with a trailing ``Z`` to denote UTC.
"""
import datetime
return (
@@ -108,25 +215,59 @@ def _iso(ts: int | float) -> str:
)
def _first(d: dict, *names, default=None):
"""Return first present key from names (supports nested 'a.b' lookups)."""
def _first(d, *names, default=None):
"""Return the first non-empty key from ``names`` (supports nested lookups).
Keys that resolve to ``None`` or an empty string are skipped so callers can
provide multiple potential field names without accidentally capturing an
explicit ``null`` value.
Args:
d: Mapping or object to query.
*names: Candidate field names using dotted paths for nesting.
default: Value returned when all candidates are missing.
Returns:
The first matching value or ``default`` if none resolve to content.
"""
def _mapping_get(obj, key):
if isinstance(obj, Mapping) and key in obj:
return True, obj[key]
if hasattr(obj, "__getitem__"):
try:
return True, obj[key]
except Exception:
pass
if hasattr(obj, key):
return True, getattr(obj, key)
return False, None
for name in names:
cur = d
parts = name.split(".")
ok = True
for p in parts:
if isinstance(cur, dict) and p in cur:
cur = cur[p]
else:
ok = False
for part in name.split("."):
ok, cur = _mapping_get(cur, part)
if not ok:
break
if ok:
if cur is None:
continue
if isinstance(cur, str) and cur == "":
continue
return cur
return default
def _pkt_to_dict(packet) -> dict:
"""Convert protobuf MeshPacket or already-dict into a JSON-friendly dict."""
"""Normalise a received packet into a JSON-friendly dictionary.
Args:
packet: Protobuf ``MeshPacket`` or dictionary received from the daemon.
Returns:
Packet data ready for JSON serialisation.
"""
if isinstance(packet, dict):
return packet
if isinstance(packet, ProtoMessage):
@@ -141,9 +282,14 @@ def _pkt_to_dict(packet) -> dict:
def store_packet_dict(p: dict):
"""
Store only TEXT messages (decoded.payload.text) to the DB.
Safe against snake/camel case differences.
"""Persist text messages extracted from a decoded packet.
Only packets from the ``TEXT_MESSAGE_APP`` port are forwarded to the
web API. Field lookups tolerate camelCase and snake_case variants for
compatibility across Meshtastic releases.
Args:
p: Packet dictionary produced by ``_pkt_to_dict``.
"""
dec = p.get("decoded") or {}
text = _first(dec, "payload.text", "text", default=None)
@@ -166,35 +312,39 @@ def store_packet_dict(p: dict):
ch = 0
# timestamps & ids
pkt_id = _first(p, "id", "packet_id", "packetId", default=None)
if pkt_id is None:
return # ignore packets without an id
rx_time = int(_first(p, "rxTime", "rx_time", default=time.time()))
from_id = _first(p, "fromId", "from_id", "from", default=None)
to_id = _first(p, "toId", "to_id", "to", default=None)
if (from_id is None or str(from_id) == "") and DEBUG:
try:
raw = json.dumps(p, default=str)
except Exception:
raw = str(p)
print(f"[debug] packet missing from_id: {raw}")
# link metrics
snr = _first(p, "snr", "rx_snr", "rxSnr", default=None)
rssi = _first(p, "rssi", "rx_rssi", "rxRssi", default=None)
hop = _first(p, "hopLimit", "hop_limit", default=None)
row = (
rx_time,
_iso(rx_time),
from_id,
to_id,
ch,
str(portnum) if portnum is not None else None,
text,
float(snr) if snr is not None else None,
int(rssi) if rssi is not None else None,
int(hop) if hop is not None else None,
)
with DB_LOCK:
conn.execute(
"""INSERT INTO messages
(rx_time, rx_iso, from_id, to_id, channel, portnum, text, snr, rssi, hop_limit)
VALUES (?,?,?,?,?,?,?,?,?,?)""",
row,
)
conn.commit()
msg = {
"id": int(pkt_id),
"rx_time": rx_time,
"rx_iso": _iso(rx_time),
"from_id": from_id,
"to_id": to_id,
"channel": ch,
"portnum": str(portnum) if portnum is not None else None,
"text": text,
"snr": float(snr) if snr is not None else None,
"rssi": int(rssi) if rssi is not None else None,
"hop_limit": int(hop) if hop is not None else None,
}
_queue_post_json("/api/messages", msg, priority=_MESSAGE_POST_PRIORITY)
if DEBUG:
print(
@@ -204,6 +354,13 @@ def store_packet_dict(p: dict):
# PubSub receive handler
def on_receive(packet, interface):
"""PubSub callback that stores inbound text messages.
Args:
packet: Packet received from the Meshtastic interface.
interface: Serial interface instance (unused).
"""
p = None
try:
p = _pkt_to_dict(packet)
@@ -214,7 +371,54 @@ def on_receive(packet, interface):
# --- Main ---------------------------------------------------------------------
def _node_items_snapshot(nodes_obj, retries: int = 3):
"""Return a snapshot list of ``(node_id, node)`` pairs.
The Meshtastic ``SerialInterface`` updates ``iface.nodes`` from another
thread. When that happens during iteration Python raises ``RuntimeError``.
To keep the daemon quiet we retry a few times and, if it keeps changing,
bail out for this loop.
Args:
nodes_obj: Container mapping node IDs to node objects.
retries: Number of attempts performed before giving up.
Returns:
Snapshot of node entries or ``None`` when retries were exhausted because
the container kept mutating.
"""
if not nodes_obj:
return []
items_callable = getattr(nodes_obj, "items", None)
if callable(items_callable):
for _ in range(max(1, retries)):
try:
return list(items_callable())
except RuntimeError as err:
if "dictionary changed size during iteration" not in str(err):
raise
time.sleep(0)
return None
if hasattr(nodes_obj, "__iter__") and hasattr(nodes_obj, "__getitem__"):
for _ in range(max(1, retries)):
try:
keys = list(nodes_obj)
return [(k, nodes_obj[k]) for k in keys]
except RuntimeError as err:
if "dictionary changed size during iteration" not in str(err):
raise
time.sleep(0)
return None
return []
def main():
"""Run the mesh synchronisation daemon."""
# Subscribe to PubSub topics (reliable in current meshtastic)
pub.subscribe(on_receive, "meshtastic.receive")
@@ -223,19 +427,36 @@ def main():
stop = threading.Event()
def handle_sig(*_):
"""Stop the daemon when a termination signal is received."""
stop.set()
signal.signal(signal.SIGINT, handle_sig)
signal.signal(signal.SIGTERM, handle_sig)
print(f"Mesh daemon: nodes+messages → {DB} | port={PORT} | channel={CHANNEL_INDEX}")
target = INSTANCE or "(no POTATOMESH_INSTANCE)"
print(
f"Mesh daemon: nodes+messages → {target} | port={PORT} | channel={CHANNEL_INDEX}"
)
while not stop.is_set():
try:
nodes = getattr(iface, "nodes", {}) or {}
for node_id, n in nodes.items():
upsert_node(node_id, n)
with DB_LOCK:
conn.commit()
node_items = _node_items_snapshot(nodes)
if node_items is None:
if DEBUG:
print(
"[debug] skipping node snapshot; nodes changed during iteration"
)
else:
for node_id, n in node_items:
try:
upsert_node(node_id, n)
except Exception as e:
print(
f"[warn] failed to update node snapshot for {node_id}: {e}"
)
if DEBUG:
print(f"[debug] node object: {n!r}")
except Exception as e:
print(f"[warn] failed to update node snapshot: {e}")
stop.wait(SNAPSHOT_SECS)
@@ -244,9 +465,6 @@ def main():
iface.close()
except Exception:
pass
with DB_LOCK:
conn.commit()
conn.close()
if __name__ == "__main__":
+16 -1
View File
@@ -1,7 +1,22 @@
#!/usr/bin/env bash
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
python -m venv .venv
source .venv/bin/activate
pip install -U meshtastic
pip install -U meshtastic black pytest
exec python mesh.py
+26 -12
View File
@@ -1,16 +1,30 @@
-- Copyright (C) 2025 l5yth
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
rx_time INTEGER NOT NULL, -- unix seconds when received
rx_iso TEXT NOT NULL, -- ISO8601 UTC timestamp
from_id TEXT, -- sender node id (string form)
to_id TEXT, -- recipient node id
channel INTEGER, -- channel index
portnum TEXT, -- application portnum (e.g. TEXT_MESSAGE_APP)
text TEXT, -- decoded text payload if present
snr REAL, -- signal-to-noise ratio
rssi INTEGER, -- received signal strength
hop_limit INTEGER, -- hops left when received
raw_json TEXT -- entire packet JSON dump
id INTEGER PRIMARY KEY,
rx_time INTEGER NOT NULL,
rx_iso TEXT NOT NULL,
from_id TEXT,
to_id TEXT,
channel INTEGER,
portnum TEXT,
text TEXT,
snr REAL,
rssi INTEGER,
hop_limit INTEGER,
raw_json TEXT
);
CREATE INDEX IF NOT EXISTS idx_messages_rx_time ON messages(rx_time);
+14 -1
View File
@@ -1,4 +1,17 @@
-- nodes.sql
-- Copyright (C) 2025 l5yth
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
PRAGMA journal_mode=WAL;
CREATE TABLE IF NOT EXISTS nodes (
BIN
View File
Binary file not shown.

After

Width:  |  Height:  |  Size: 748 KiB

BIN
View File
Binary file not shown.
-1442
View File
File diff suppressed because it is too large Load Diff
-2096
View File
File diff suppressed because it is too large Load Diff
+15
View File
@@ -1,4 +1,19 @@
#!/usr/bin/env python3
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time, json, base64, threading
from pubsub import pub # comes with meshtastic
from meshtastic.serial_interface import SerialInterface
BIN
View File
Binary file not shown.
+4002
View File
File diff suppressed because it is too large Load Diff
+3653
View File
File diff suppressed because it is too large Load Diff
+482
View File
@@ -0,0 +1,482 @@
import importlib
import sys
import types
from dataclasses import dataclass
from pathlib import Path
from types import SimpleNamespace
import pytest
@pytest.fixture
def mesh_module(monkeypatch):
"""Import data.mesh with stubbed dependencies."""
repo_root = Path(__file__).resolve().parents[1]
monkeypatch.syspath_prepend(str(repo_root))
# Stub meshtastic.serial_interface.SerialInterface
serial_interface_mod = types.ModuleType("meshtastic.serial_interface")
class DummySerialInterface:
def __init__(self, *_, **__):
self.closed = False
def close(self):
self.closed = True
serial_interface_mod.SerialInterface = DummySerialInterface
meshtastic_mod = types.ModuleType("meshtastic")
meshtastic_mod.serial_interface = serial_interface_mod
monkeypatch.setitem(sys.modules, "meshtastic", meshtastic_mod)
monkeypatch.setitem(
sys.modules, "meshtastic.serial_interface", serial_interface_mod
)
# Stub pubsub.pub
pubsub_mod = types.ModuleType("pubsub")
class DummyPub:
def __init__(self):
self.subscriptions = []
def subscribe(self, *args, **kwargs):
self.subscriptions.append((args, kwargs))
pubsub_mod.pub = DummyPub()
monkeypatch.setitem(sys.modules, "pubsub", pubsub_mod)
# Stub google.protobuf modules used by mesh.py
json_format_mod = types.ModuleType("google.protobuf.json_format")
def message_to_dict(obj, *_, **__):
if hasattr(obj, "to_dict"):
return obj.to_dict()
if hasattr(obj, "__dict__"):
return dict(obj.__dict__)
return {}
json_format_mod.MessageToDict = message_to_dict
message_mod = types.ModuleType("google.protobuf.message")
class DummyProtoMessage:
pass
message_mod.Message = DummyProtoMessage
protobuf_mod = types.ModuleType("google.protobuf")
protobuf_mod.json_format = json_format_mod
protobuf_mod.message = message_mod
google_mod = types.ModuleType("google")
google_mod.protobuf = protobuf_mod
monkeypatch.setitem(sys.modules, "google", google_mod)
monkeypatch.setitem(sys.modules, "google.protobuf", protobuf_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.json_format", json_format_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.message", message_mod)
module_name = "data.mesh"
if module_name in sys.modules:
module = importlib.reload(sys.modules[module_name])
else:
module = importlib.import_module(module_name)
if hasattr(module, "_clear_post_queue"):
module._clear_post_queue()
yield module
# Ensure a clean import for the next test
if hasattr(module, "_clear_post_queue"):
module._clear_post_queue()
sys.modules.pop(module_name, None)
def test_snapshot_interval_defaults_to_60_seconds(mesh_module):
mesh = mesh_module
assert mesh.SNAPSHOT_SECS == 60
def test_node_to_dict_handles_nested_structures(mesh_module):
mesh = mesh_module
@dataclass
class Child:
number: int
class DummyProto(mesh.ProtoMessage):
def __init__(self, **payload):
self._payload = payload
def to_dict(self):
return self._payload
@dataclass
class Node:
info: Child
proto: DummyProto
payload: bytes
seq: list
node = Node(Child(5), DummyProto(value=7), b"hi", [Child(1), DummyProto(value=9)])
result = mesh._node_to_dict(node)
assert result["info"] == {"number": 5}
assert result["proto"] == {"value": 7}
assert result["payload"] == "hi"
assert result["seq"] == [{"number": 1}, {"value": 9}]
def test_store_packet_dict_posts_text_message(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 123,
"rxTime": 1_700_000_000,
"fromId": "!abc",
"toId": "!def",
"channel": "2",
"hopLimit": "3",
"snr": "1.25",
"rxRssi": "-70",
"decoded": {
"payload": {"text": "hello"},
"portnum": "TEXT_MESSAGE_APP",
"channel": 4,
},
}
mesh.store_packet_dict(packet)
assert captured, "Expected POST to be triggered for text message"
path, payload, priority = captured[0]
assert path == "/api/messages"
assert payload["id"] == 123
assert payload["channel"] == 4
assert payload["from_id"] == "!abc"
assert payload["to_id"] == "!def"
assert payload["text"] == "hello"
assert payload["portnum"] == "TEXT_MESSAGE_APP"
assert payload["rx_time"] == 1_700_000_000
assert payload["rx_iso"] == mesh._iso(1_700_000_000)
assert payload["hop_limit"] == 3
assert payload["snr"] == pytest.approx(1.25)
assert payload["rssi"] == -70
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_store_packet_dict_ignores_non_text(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda *args, **kwargs: captured.append((args, kwargs)),
)
packet = {
"id": 456,
"rxTime": 1_700_000_100,
"fromId": "!abc",
"toId": "!def",
"decoded": {
"payload": {"text": "ignored"},
"portnum": "POSITION_APP",
},
}
mesh.store_packet_dict(packet)
assert not captured, "Non-text messages should not be queued"
def test_node_items_snapshot_handles_transient_runtime_error(mesh_module):
mesh = mesh_module
class FlakyDict(dict):
def __init__(self):
super().__init__({"node": {"foo": "bar"}})
self.calls = 0
def items(self):
self.calls += 1
if self.calls == 1:
raise RuntimeError("dictionary changed size during iteration")
return super().items()
nodes = FlakyDict()
snapshot = mesh._node_items_snapshot(nodes, retries=3)
assert snapshot == [("node", {"foo": "bar"})]
assert nodes.calls == 2
def test_node_items_snapshot_returns_none_when_still_mutating(mesh_module):
mesh = mesh_module
class AlwaysChanging(dict):
def __init__(self):
super().__init__({"node": {"foo": "bar"}})
def items(self):
raise RuntimeError("dictionary changed size during iteration")
nodes = AlwaysChanging()
snapshot = mesh._node_items_snapshot(nodes, retries=2)
assert snapshot is None
def test_get_handles_dicts_and_objects(mesh_module):
mesh = mesh_module
class Dummy:
value = "obj"
assert mesh._get({"key": 1}, "key") == 1
assert mesh._get({"key": 1}, "missing", "fallback") == "fallback"
dummy = Dummy()
assert mesh._get(dummy, "value") == "obj"
assert mesh._get(dummy, "missing", "default") == "default"
def test_post_json_skips_without_instance(mesh_module, monkeypatch):
mesh = mesh_module
monkeypatch.setattr(mesh, "INSTANCE", "")
def fail_request(*_, **__):
raise AssertionError("Request should not be created when INSTANCE is empty")
monkeypatch.setattr(mesh.urllib.request, "Request", fail_request)
mesh._post_json("/ignored", {"foo": "bar"})
def test_post_json_sends_payload_with_token(mesh_module, monkeypatch):
mesh = mesh_module
monkeypatch.setattr(mesh, "INSTANCE", "https://example.test")
monkeypatch.setattr(mesh, "API_TOKEN", "secret")
captured = {}
def fake_urlopen(req, timeout=0):
captured["req"] = req
class DummyResponse:
def __enter__(self):
return self
def __exit__(self, *exc):
return False
def read(self):
return b"ok"
return DummyResponse()
monkeypatch.setattr(mesh.urllib.request, "urlopen", fake_urlopen)
mesh._post_json("/api/test", {"hello": "world"})
req = captured["req"]
assert req.full_url == "https://example.test/api/test"
assert req.headers["Content-type"] == "application/json"
assert req.get_header("Authorization") == "Bearer secret"
assert mesh.json.loads(req.data.decode("utf-8")) == {"hello": "world"}
def test_node_to_dict_handles_non_utf8_bytes(mesh_module):
mesh = mesh_module
@dataclass
class Node:
payload: bytes
other: object
class Custom:
def __str__(self):
return "custom!"
node = Node(b"\xff", Custom())
result = mesh._node_to_dict(node)
assert result["payload"] == "ff"
assert result["other"] == "custom!"
def test_first_prefers_first_non_empty_value(mesh_module):
mesh = mesh_module
data = {"primary": {"value": ""}, "secondary": {"value": "found"}}
assert mesh._first(data, "primary.value", "secondary.value") == "found"
assert mesh._first(data, "missing.path", default="fallback") == "fallback"
def test_first_handles_attribute_sources(mesh_module):
mesh = mesh_module
ns = SimpleNamespace(empty=None, value="attr")
assert mesh._first(ns, "empty", "value") == "attr"
def test_pkt_to_dict_handles_dict_and_proto(mesh_module, monkeypatch):
mesh = mesh_module
assert mesh._pkt_to_dict({"a": 1}) == {"a": 1}
class DummyProto(mesh.ProtoMessage):
def to_dict(self):
return {"value": 5}
assert mesh._pkt_to_dict(DummyProto()) == {"value": 5}
class Unknown:
pass
def broken_dumps(*_, **__):
raise TypeError("boom")
monkeypatch.setattr(mesh.json, "dumps", broken_dumps)
fallback = mesh._pkt_to_dict(Unknown())
assert set(fallback) == {"_unparsed"}
assert isinstance(fallback["_unparsed"], str)
def test_store_packet_dict_uses_top_level_channel(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": "789",
"rxTime": 123456,
"from": "!abc",
"to": "!def",
"channel": "5",
"decoded": {"text": "hi", "portnum": 1},
}
mesh.store_packet_dict(packet)
assert captured, "Expected message to be stored"
path, payload, priority = captured[0]
assert path == "/api/messages"
assert payload["channel"] == 5
assert payload["portnum"] == "1"
assert payload["text"] == "hi"
assert payload["snr"] is None and payload["rssi"] is None
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_store_packet_dict_handles_invalid_channel(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 321,
"rxTime": 999,
"fromId": "!abc",
"decoded": {
"payload": {"text": "hello"},
"portnum": "TEXT_MESSAGE_APP",
"channel": "not-a-number",
},
}
mesh.store_packet_dict(packet)
assert captured
path, payload, priority = captured[0]
assert path == "/api/messages"
assert payload["channel"] == 0
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_post_queue_prioritises_nodes(mesh_module, monkeypatch):
mesh = mesh_module
mesh._clear_post_queue()
calls = []
def record(path, payload):
calls.append((path, payload))
monkeypatch.setattr(mesh, "_post_json", record)
mesh._enqueue_post_json("/api/messages", {"id": 1}, mesh._MESSAGE_POST_PRIORITY)
mesh._enqueue_post_json(
"/api/nodes", {"!node": {"foo": "bar"}}, mesh._NODE_POST_PRIORITY
)
mesh._drain_post_queue()
assert [path for path, _ in calls] == ["/api/nodes", "/api/messages"]
def test_store_packet_dict_requires_id(mesh_module, monkeypatch):
mesh = mesh_module
def fail_post(*_, **__):
raise AssertionError("Should not post without an id")
monkeypatch.setattr(mesh, "_queue_post_json", fail_post)
packet = {"decoded": {"payload": {"text": "hello"}, "portnum": "TEXT_MESSAGE_APP"}}
mesh.store_packet_dict(packet)
def test_on_receive_logs_when_store_fails(mesh_module, monkeypatch, capsys):
mesh = mesh_module
monkeypatch.setattr(mesh, "_pkt_to_dict", lambda pkt: {"id": 1})
def boom(*_, **__):
raise ValueError("boom")
monkeypatch.setattr(mesh, "store_packet_dict", boom)
mesh.on_receive(object(), interface=None)
captured = capsys.readouterr()
assert "failed to store packet" in captured.out
def test_node_items_snapshot_iterable_without_items(mesh_module):
mesh = mesh_module
class Iterable:
def __init__(self):
self._data = {"node": {"foo": "bar"}}
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
return self._data[key]
snapshot = mesh._node_items_snapshot(Iterable(), retries=1)
assert snapshot == [("node", {"foo": "bar"})]
def test_node_items_snapshot_handles_empty_input(mesh_module):
mesh = mesh_module
assert mesh._node_items_snapshot(None) == []
assert mesh._node_items_snapshot({}) == []
+21
View File
@@ -0,0 +1,21 @@
#!/usr/bin/env bash
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
sqlite3 ../data/mesh.db ".backup './mesh.db'"
curl http://127.0.0.1:41447/api/nodes |jq > ./nodes.json
curl http://127.0.0.1:41447/api/messages |jq > ./messages.json
+23
View File
@@ -1,6 +1,29 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source "https://rubygems.org"
gem "sinatra", "~> 4.0"
gem "sqlite3", "~> 1.7"
gem "rackup", "~> 2.2"
gem "puma", "~> 7.0"
group :test do
gem "rspec", "~> 3.12"
gem "rack-test", "~> 2.1"
gem "rufo", "~> 0.18.1"
gem "simplecov", "~> 0.22", require: false
gem "simplecov_json_formatter", "~> 0.1", require: false
gem "rspec_junit_formatter", "~> 0.6", require: false
end
+422 -29
View File
@@ -1,16 +1,139 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# frozen_string_literal: true
# Main Sinatra application exposing the Meshtastic node and message archive.
# The daemon in +data/mesh.py+ pushes updates into the SQLite database that
# this web process reads from, providing JSON APIs and a rendered HTML index
# page for human visitors.
require "sinatra"
require "json"
require "sqlite3"
require "fileutils"
require "logger"
require "rack/utils"
# run ../data/mesh.sh to populate nodes and messages database
DB_PATH = ENV.fetch("MESH_DB", File.join(__dir__, "../data/mesh.db"))
DB_BUSY_TIMEOUT_MS = ENV.fetch("DB_BUSY_TIMEOUT_MS", "5000").to_i
DB_BUSY_MAX_RETRIES = ENV.fetch("DB_BUSY_MAX_RETRIES", "5").to_i
DB_BUSY_RETRY_DELAY = ENV.fetch("DB_BUSY_RETRY_DELAY", "0.05").to_f
WEEK_SECONDS = 7 * 24 * 60 * 60
DEFAULT_MAX_JSON_BODY_BYTES = 1_048_576
MAX_JSON_BODY_BYTES = begin
raw = ENV.fetch("MAX_JSON_BODY_BYTES", DEFAULT_MAX_JSON_BODY_BYTES.to_s)
value = Integer(raw, 10)
value.positive? ? value : DEFAULT_MAX_JSON_BODY_BYTES
rescue ArgumentError
DEFAULT_MAX_JSON_BODY_BYTES
end
set :public_folder, File.join(__dir__, "public")
set :views, File.join(__dir__, "views")
SITE_NAME = ENV.fetch("SITE_NAME", "Meshtastic Berlin")
DEFAULT_CHANNEL = ENV.fetch("DEFAULT_CHANNEL", "#MediumFast")
DEFAULT_FREQUENCY = ENV.fetch("DEFAULT_FREQUENCY", "868MHz")
MAP_CENTER_LAT = ENV.fetch("MAP_CENTER_LAT", "52.502889").to_f
MAP_CENTER_LON = ENV.fetch("MAP_CENTER_LON", "13.404194").to_f
MAX_NODE_DISTANCE_KM = ENV.fetch("MAX_NODE_DISTANCE_KM", "137").to_f
MATRIX_ROOM = ENV.fetch("MATRIX_ROOM", "#meshtastic-berlin:matrix.org")
DEBUG = ENV["DEBUG"] == "1"
class << Sinatra::Application
def apply_logger_level!
logger = settings.logger
return unless logger
logger.level = DEBUG ? Logger::DEBUG : Logger::WARN
end
end
Sinatra::Application.configure do
app_logger = Logger.new($stdout)
set :logger, app_logger
use Rack::CommonLogger, app_logger
Sinatra::Application.apply_logger_level!
end
# Open the SQLite database with a configured busy timeout.
#
# @param readonly [Boolean] whether to open the database in read-only mode.
# @return [SQLite3::Database]
def open_database(readonly: false)
SQLite3::Database.new(DB_PATH, readonly: readonly).tap do |db|
db.busy_timeout = DB_BUSY_TIMEOUT_MS
end
end
# Execute the provided block, retrying when SQLite reports the database is
# temporarily locked.
#
# @param max_retries [Integer] maximum number of retries after the initial
# attempt.
# @param base_delay [Float] base delay in seconds for linear backoff between
# retries.
# @yieldreturn [Object] result of the block once it succeeds.
def with_busy_retry(max_retries: DB_BUSY_MAX_RETRIES, base_delay: DB_BUSY_RETRY_DELAY)
attempts = 0
begin
yield
rescue SQLite3::BusyException
attempts += 1
raise if attempts > max_retries
sleep(base_delay * attempts)
retry
end
end
# Checks whether the SQLite database already contains the required tables.
#
# @return [Boolean] true when both +nodes+ and +messages+ tables exist.
def db_schema_present?
return false unless File.exist?(DB_PATH)
db = open_database(readonly: true)
tables = db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name IN ('nodes','messages')").flatten
tables.include?("nodes") && tables.include?("messages")
rescue SQLite3::Exception
false
ensure
db&.close
end
# Create the SQLite database and seed it with the node and message schemas.
#
# @return [void]
def init_db
FileUtils.mkdir_p(File.dirname(DB_PATH))
db = open_database
%w[nodes messages].each do |schema|
sql_file = File.expand_path("../data/#{schema}.sql", __dir__)
db.execute_batch(File.read(sql_file))
end
ensure
db&.close
end
init_db unless db_schema_present?
# Retrieve recently heard nodes ordered by their last contact time.
#
# @param limit [Integer] maximum number of rows returned.
# @return [Array<Hash>] collection of node records formatted for the API.
def query_nodes(limit)
db = SQLite3::Database.new(DB_PATH, readonly: true, results_as_hash: true)
db = open_database(readonly: true)
db.results_as_hash = true
now = Time.now.to_i
min_last_heard = now - WEEK_SECONDS
rows = db.execute <<~SQL, [min_last_heard, limit]
@@ -39,43 +162,136 @@ ensure
db&.close
end
# GET /api/nodes
#
# Returns a JSON array of the most recently heard nodes.
get "/api/nodes" do
content_type :json
limit = [params["limit"]&.to_i || 200, 1000].min
query_nodes(limit).to_json
end
# Retrieve recent text messages joined with related node information.
#
# @param limit [Integer] maximum number of rows returned.
# @return [Array<Hash>] collection of message rows suitable for serialisation.
def query_messages(limit)
db = SQLite3::Database.new(DB_PATH, readonly: true)
db = open_database(readonly: true)
db.results_as_hash = true
rows = db.execute <<~SQL, [limit]
SELECT m.*, n.*, m.snr AS msg_snr
FROM messages m
LEFT JOIN nodes n ON m.from_id = n.node_id
LEFT JOIN nodes n ON (
m.from_id = n.node_id OR (
CAST(m.from_id AS TEXT) <> '' AND
CAST(m.from_id AS TEXT) GLOB '[0-9]*' AND
CAST(m.from_id AS INTEGER) = n.num
)
)
ORDER BY m.rx_time DESC
LIMIT ?
SQL
msg_fields = %w[id rx_time rx_iso from_id to_id channel portnum text msg_snr rssi hop_limit raw_json]
msg_fields = %w[id rx_time rx_iso from_id to_id channel portnum text msg_snr rssi hop_limit]
rows.each do |r|
if DEBUG && (r["from_id"].nil? || r["from_id"].to_s.empty?)
raw = db.execute("SELECT * FROM messages WHERE id = ?", [r["id"]]).first
Kernel.warn "[debug] messages row before join: #{raw.inspect}"
Kernel.warn "[debug] row after join: #{r.inspect}"
end
node = {}
r.keys.each do |k|
next if msg_fields.include?(k)
node[k] = r.delete(k)
end
r["snr"] = r.delete("msg_snr")
r["node"] = node unless node.empty?
if r["from_id"] && (node["node_id"].nil? || node["node_id"].to_s.empty?)
lookup_keys = []
canonical = normalize_node_id(db, r["from_id"])
lookup_keys << canonical if canonical
raw_ref = r["from_id"].to_s.strip
lookup_keys << raw_ref unless raw_ref.empty?
lookup_keys << raw_ref.to_i if raw_ref.match?(/\A[0-9]+\z/)
fallback = nil
lookup_keys.uniq.each do |ref|
sql = ref.is_a?(Integer) ? "SELECT * FROM nodes WHERE num = ?" : "SELECT * FROM nodes WHERE node_id = ?"
fallback = db.get_first_row(sql, [ref])
break if fallback
end
if fallback
fallback.each do |key, value|
next unless key.is_a?(String)
next if msg_fields.include?(key)
node[key] = value if node[key].nil?
end
end
end
node["role"] = "CLIENT" if node.key?("role") && (node["role"].nil? || node["role"].to_s.empty?)
r["node"] = node
if DEBUG && (r["from_id"].nil? || r["from_id"].to_s.empty?)
Kernel.warn "[debug] row after processing: #{r.inspect}"
end
end
rows
ensure
db&.close
end
# GET /api/messages
#
# Returns a JSON array of stored text messages including node metadata.
get "/api/messages" do
content_type :json
limit = [params["limit"]&.to_i || 200, 1000].min
query_messages(limit).to_json
end
# Determine the numeric node reference for a canonical node identifier.
#
# The Meshtastic protobuf encodes the node ID as a hexadecimal string prefixed
# with an exclamation mark (for example ``!4ed36bd0``). Many payloads also
# include a decimal ``num`` alias, but some integrations omit it. When the
# alias is missing we can reconstruct it from the canonical identifier so that
# later joins using ``nodes.num`` continue to work.
#
# @param node_id [String, nil] canonical node identifier (e.g. ``!4ed36bd0``).
# @param payload [Hash] raw node payload provided by the data daemon.
# @return [Integer, nil] numeric node reference if it can be determined.
def resolve_node_num(node_id, payload)
raw = payload["num"]
case raw
when Integer
return raw
when Numeric
return raw.to_i
when String
trimmed = raw.strip
return nil if trimmed.empty?
return Integer(trimmed, 10) if trimmed.match?(/\A[0-9]+\z/)
return Integer(trimmed.delete_prefix("0x").delete_prefix("0X"), 16) if trimmed.match?(/\A0[xX][0-9A-Fa-f]+\z/)
if trimmed.match?(/\A[0-9A-Fa-f]+\z/)
canonical = node_id.is_a?(String) ? node_id.strip : ""
return Integer(trimmed, 16) if canonical.match?(/\A!?[0-9A-Fa-f]+\z/)
end
end
return nil unless node_id.is_a?(String)
hex = node_id.strip
return nil if hex.empty?
hex = hex.delete_prefix("!")
return nil unless hex.match?(/\A[0-9A-Fa-f]+\z/)
Integer(hex, 16)
rescue ArgumentError
nil
end
# Insert or update a node row with the most recent metrics.
#
# @param db [SQLite3::Database] open database handle.
# @param node_id [String] primary identifier for the node.
# @param n [Hash] node payload provided by the data daemon.
def upsert_node(db, node_id, n)
user = n["user"] || {}
met = n["deviceMetrics"] || {}
@@ -87,17 +303,26 @@ def upsert_node(db, node_id, n)
pt = nil if pt && pt > now
lh = now if lh && lh > now
lh = pt if pt && (!lh || lh < pt)
bool = ->(v) {
case v
when true then 1
when false then 0
else v
end
}
node_num = resolve_node_num(node_id, n)
row = [
node_id,
n["num"],
node_num,
user["shortName"],
user["longName"],
user["macaddr"],
user["hwModel"] || n["hwModel"],
role,
user["publicKey"],
user["isUnmessagable"],
n["isFavorite"],
bool.call(user["isUnmessagable"]),
bool.call(n["isFavorite"]),
n["hopsAway"],
n["snr"],
lh,
@@ -113,39 +338,174 @@ def upsert_node(db, node_id, n)
pos["longitude"],
pos["altitude"],
]
db.execute <<~SQL, row
INSERT INTO nodes(node_id,num,short_name,long_name,macaddr,hw_model,role,public_key,is_unmessagable,is_favorite,
hops_away,snr,last_heard,first_heard,battery_level,voltage,channel_utilization,air_util_tx,uptime_seconds,
position_time,location_source,latitude,longitude,altitude)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(node_id) DO UPDATE SET
num=excluded.num, short_name=excluded.short_name, long_name=excluded.long_name, macaddr=excluded.macaddr,
hw_model=excluded.hw_model, role=excluded.role, public_key=excluded.public_key, is_unmessagable=excluded.is_unmessagable,
is_favorite=excluded.is_favorite, hops_away=excluded.hops_away, snr=excluded.snr, last_heard=excluded.last_heard,
battery_level=excluded.battery_level, voltage=excluded.voltage, channel_utilization=excluded.channel_utilization,
air_util_tx=excluded.air_util_tx, uptime_seconds=excluded.uptime_seconds, position_time=excluded.position_time,
location_source=excluded.location_source, latitude=excluded.latitude, longitude=excluded.longitude,
altitude=excluded.altitude
WHERE COALESCE(excluded.last_heard,0) >= COALESCE(nodes.last_heard,0)
SQL
with_busy_retry do
db.execute <<~SQL, row
INSERT INTO nodes(node_id,num,short_name,long_name,macaddr,hw_model,role,public_key,is_unmessagable,is_favorite,
hops_away,snr,last_heard,first_heard,battery_level,voltage,channel_utilization,air_util_tx,uptime_seconds,
position_time,location_source,latitude,longitude,altitude)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(node_id) DO UPDATE SET
num=excluded.num, short_name=excluded.short_name, long_name=excluded.long_name, macaddr=excluded.macaddr,
hw_model=excluded.hw_model, role=excluded.role, public_key=excluded.public_key, is_unmessagable=excluded.is_unmessagable,
is_favorite=excluded.is_favorite, hops_away=excluded.hops_away, snr=excluded.snr, last_heard=excluded.last_heard,
battery_level=excluded.battery_level, voltage=excluded.voltage, channel_utilization=excluded.channel_utilization,
air_util_tx=excluded.air_util_tx, uptime_seconds=excluded.uptime_seconds, position_time=excluded.position_time,
location_source=excluded.location_source, latitude=excluded.latitude, longitude=excluded.longitude,
altitude=excluded.altitude
WHERE COALESCE(excluded.last_heard,0) >= COALESCE(nodes.last_heard,0)
SQL
end
end
# Ensure the request includes the expected bearer token.
#
# @return [void]
# @raise [Sinatra::Halt] when authentication fails.
def require_token!
token = ENV["API_TOKEN"]
provided = request.env["HTTP_AUTHORIZATION"].to_s.sub(/^Bearer\s+/i, "")
halt 403, { error: "Forbidden" }.to_json unless token && provided == token
halt 403, { error: "Forbidden" }.to_json unless token && !token.empty? && secure_token_match?(token, provided)
end
# Perform a constant-time comparison between two strings, returning false on
# length mismatches or invalid input.
#
# @param expected [String]
# @param provided [String]
# @return [Boolean]
def secure_token_match?(expected, provided)
return false unless expected.is_a?(String) && provided.is_a?(String)
expected_bytes = expected.b
provided_bytes = provided.b
return false unless expected_bytes.bytesize == provided_bytes.bytesize
Rack::Utils.secure_compare(expected_bytes, provided_bytes)
rescue Rack::Utils::SecurityError
false
end
# Read the request body enforcing a maximum allowed size.
#
# @param limit [Integer, nil] optional override for the number of bytes.
# @return [String]
def read_json_body(limit: nil)
max_bytes = limit || MAX_JSON_BODY_BYTES
max_bytes = max_bytes.to_i
max_bytes = MAX_JSON_BODY_BYTES if max_bytes <= 0
body = request.body.read(max_bytes + 1)
body = "" if body.nil?
halt 413, { error: "payload too large" }.to_json if body.bytesize > max_bytes
body
ensure
request.body.rewind if request.body.respond_to?(:rewind)
end
# Determine whether the canonical node identifier should replace the provided
# sender reference for a message payload.
#
# @param message [Object] raw request payload element.
# @return [Boolean]
def prefer_canonical_sender?(message)
message.is_a?(Hash) && message.key?("packet_id") && !message.key?("id")
end
# Insert a text message if it does not already exist.
#
# @param db [SQLite3::Database] open database handle.
# @param m [Hash] message payload provided by the data daemon.
def insert_message(db, m)
msg_id = m["id"] || m["packet_id"]
return unless msg_id
rx_time = m["rx_time"]&.to_i || Time.now.to_i
rx_iso = m["rx_iso"] || Time.at(rx_time).utc.iso8601
raw_from_id = m["from_id"]
if raw_from_id.nil? || raw_from_id.to_s.strip.empty?
alt_from = m["from"]
raw_from_id = alt_from unless alt_from.nil? || alt_from.to_s.strip.empty?
end
trimmed_from_id = raw_from_id.nil? ? nil : raw_from_id.to_s.strip
trimmed_from_id = nil if trimmed_from_id&.empty?
canonical_from_id = normalize_node_id(db, raw_from_id)
use_canonical = canonical_from_id && (trimmed_from_id.nil? || prefer_canonical_sender?(m))
from_id = if use_canonical
canonical_from_id.to_s.strip
else
trimmed_from_id
end
from_id = nil if from_id&.empty?
row = [
msg_id,
rx_time,
rx_iso,
from_id,
m["to_id"],
m["channel"],
m["portnum"],
m["text"],
m["snr"],
m["rssi"],
m["hop_limit"],
]
with_busy_retry do
existing = db.get_first_row("SELECT from_id FROM messages WHERE id = ?", [msg_id])
if existing
if from_id
existing_from = existing.is_a?(Hash) ? existing["from_id"] : existing[0]
existing_from_str = existing_from&.to_s
should_update = existing_from_str.nil? || existing_from_str.strip.empty?
should_update ||= existing_from != from_id
db.execute("UPDATE messages SET from_id = ? WHERE id = ?", [from_id, msg_id]) if should_update
end
else
begin
db.execute <<~SQL, row
INSERT INTO messages(id,rx_time,rx_iso,from_id,to_id,channel,portnum,text,snr,rssi,hop_limit)
VALUES (?,?,?,?,?,?,?,?,?,?,?)
SQL
rescue SQLite3::ConstraintException
db.execute("UPDATE messages SET from_id = ? WHERE id = ?", [from_id, msg_id]) if from_id
end
end
end
end
# Resolve a node reference to the canonical node ID when possible.
#
# @param db [SQLite3::Database] open database handle.
# @param node_ref [Object] raw node identifier or numeric reference.
# @return [String, nil] canonical node ID or nil if it cannot be resolved.
def normalize_node_id(db, node_ref)
return nil if node_ref.nil?
ref_str = node_ref.to_s.strip
return nil if ref_str.empty?
node_id = db.get_first_value("SELECT node_id FROM nodes WHERE node_id = ?", [ref_str])
return node_id if node_id
begin
ref_num = Integer(ref_str, 10)
rescue ArgumentError
return nil
end
db.get_first_value("SELECT node_id FROM nodes WHERE num = ?", [ref_num])
end
# POST /api/nodes
#
# Upserts one or more nodes provided as a JSON object keyed by node ID.
post "/api/nodes" do
require_token!
content_type :json
begin
data = JSON.parse(request.body.read)
data = JSON.parse(read_json_body)
rescue JSON::ParserError
halt 400, { error: "invalid JSON" }.to_json
end
halt 400, { error: "too many nodes" }.to_json if data.is_a?(Hash) && data.size > 1000
db = SQLite3::Database.new(DB_PATH)
db = open_database
data.each do |node_id, node|
upsert_node(db, node_id, node)
end
@@ -154,6 +514,39 @@ ensure
db&.close
end
get "/" do
send_file File.join(settings.public_folder, "index.html")
# POST /api/messages
#
# Accepts an array or object describing text messages and stores each entry.
post "/api/messages" do
require_token!
content_type :json
begin
data = JSON.parse(read_json_body)
rescue JSON::ParserError
halt 400, { error: "invalid JSON" }.to_json
end
messages = data.is_a?(Array) ? data : [data]
halt 400, { error: "too many messages" }.to_json if messages.size > 1000
db = open_database
messages.each do |msg|
insert_message(db, msg)
end
{ status: "ok" }.to_json
ensure
db&.close
end
# GET /
#
# Renders the main site with configuration-driven defaults for the template.
get "/" do
erb :index, locals: {
site_name: SITE_NAME,
default_channel: DEFAULT_CHANNEL,
default_frequency: DEFAULT_FREQUENCY,
map_center_lat: MAP_CENTER_LAT,
map_center_lon: MAP_CENTER_LON,
max_node_distance_km: MAX_NODE_DISTANCE_KM,
matrix_room: MATRIX_ROOM,
}
end
+15
View File
@@ -1,4 +1,19 @@
#!/usr/bin/env bash
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
bundle install
View File
-455
View File
@@ -1,455 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<title>Meshtastic Berlin</title>
<!-- Leaflet CSS/JS (CDN) -->
<link
rel="stylesheet"
href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
crossorigin=""
/>
<script
src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"
integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo="
crossorigin=""
></script>
<style>
:root { --pad: 16px; }
body { font-family: system-ui, Segoe UI, Roboto, Ubuntu, Arial, sans-serif; margin: var(--pad); padding-bottom: 32px; }
h1 { margin: 0 0 8px }
.meta { color:#555; margin-bottom:12px }
.pill{ display:inline-block; padding:2px 8px; border-radius:999px; background:#eee; font-size:12px }
#map { flex: 1; height: 60vh; border: 1px solid #ddd; border-radius: 8px; }
table { border-collapse: collapse; width: 100%; margin-top: var(--pad); }
th, td { border-bottom: 1px solid #ddd; padding: 6px; text-align: left; }
th { position: sticky; top: 0; background: #fafafa; }
.mono { font-family: ui-monospace, Menlo, Consolas, monospace; }
.row { display: flex; gap: var(--pad); align-items: center; justify-content: space-between; }
.map-row { display: flex; gap: var(--pad); align-items: stretch; }
#chat { flex: 0 0 33%; max-width: 33%; height: 60vh; border: 1px solid #ddd; border-radius: 8px; overflow-y: auto; padding: 6px; font-size: 12px; }
.chat-entry-node { font-family: ui-monospace, Menlo, Consolas, monospace; color: #555 }
.chat-entry-msg { font-family: ui-monospace, Menlo, Consolas, monospace; }
.chat-entry-date { font-family: ui-monospace, Menlo, Consolas, monospace; font-weight: bold; }
.short-name { display:inline-block; border-radius:4px; padding:0 2px; }
.controls { display: flex; gap: 8px; align-items: center; }
button { padding: 6px 10px; border: 1px solid #ccc; background: #fff; border-radius: 6px; cursor: pointer; }
button:hover { background: #f6f6f6; }
label { font-size: 14px; color: #333; }
input[type="text"] { padding: 6px 10px; border: 1px solid #ccc; border-radius: 6px; }
.legend { background: #fff; padding: 6px 8px; border: 1px solid #ccc; border-radius: 4px; font-size: 12px; line-height: 18px; }
.legend span { display: inline-block; width: 12px; height: 12px; margin-right: 6px; vertical-align: middle; }
#map .leaflet-tile { filter: opacity(70%); }
footer { position: fixed; bottom: 0; left: var(--pad); width: calc(100% - 2 * var(--pad)); background: #fafafa; border-top: 1px solid #ddd; text-align: center; font-size: 12px; padding: 4px 0; }
@media (max-width: 768px) {
.map-row { flex-direction: column; }
#map { order: 1; flex: none; max-width: 100%; height: 50vh; }
#chat { order: 2; flex: none; max-width: 100%; height: 30vh; }
}
</style>
</head>
<body>
<h1>Meshtastic Berlin</h1>
<div class="row meta">
<div>
<span id="refreshInfo"></span>
<button id="refreshBtn" type="button">Refresh now</button>
<span id="status" class="pill">loading…</span>
</div>
<div class="controls">
<label><input type="checkbox" id="fitBounds" checked /> Auto-fit map</label>
<input type="text" id="filterInput" placeholder="Filter nodes" />
</div>
</div>
<div class="map-row">
<div id="chat" aria-label="Chat log"></div>
<div id="map" role="region" aria-label="Nodes map"></div>
</div>
<table id="nodes">
<thead>
<tr>
<th>Node ID</th>
<th>Short</th>
<th>Long Name</th>
<th>Last Seen</th>
<th>Role</th>
<th>HW Model</th>
<th>Battery</th>
<th>Voltage</th>
<th>Uptime</th>
<th>Channel Util</th>
<th>Air Util Tx</th>
<th>Latitude</th>
<th>Longitude</th>
<th>Altitude</th>
<th>Last Position</th>
</tr>
</thead>
<tbody></tbody>
</table>
<footer>
PotatoMesh GitHub: <a href="https://github.com/l5yth/potato-mesh" target="_blank">l5yth/potato-mesh</a>
Meshtastic Berlin Matrix:
<a href="https://matrix.to/#/#meshtastic-berlin:matrix.org" target="_blank">#meshtastic-berlin:matrix.org</a>
</footer>
<script>
const statusEl = document.getElementById('status');
const fitBoundsEl = document.getElementById('fitBounds');
const refreshBtn = document.getElementById('refreshBtn');
const filterInput = document.getElementById('filterInput');
const titleEl = document.querySelector('title');
const headerEl = document.querySelector('h1');
const chatEl = document.getElementById('chat');
const refreshInfo = document.getElementById('refreshInfo');
const baseTitle = document.title;
let allNodes = [];
const seenNodeIds = new Set();
const seenMessageIds = new Set();
let lastChatDate;
const NODE_LIMIT = 1000;
const CHAT_LIMIT = 1000;
const REFRESH_MS = 60000;
refreshInfo.textContent = `#MediumFast — auto-refresh every ${REFRESH_MS / 1000} seconds.`;
const MAP_CENTER = L.latLng(52.502889, 13.404194);
const MAX_NODE_DISTANCE_KM = 137;
const roleColors = Object.freeze({
CLIENT: '#A8D5BA',
CLIENT_HIDDEN: '#B8DCA9',
CLIENT_MUTE: '#D2E3A2',
TRACKER: '#E8E6A1',
SENSOR: '#F4E3A3',
LOST_AND_FOUND: '#F9D4A6',
REPEATER: '#F7B7A3',
ROUTER_LATE: '#F29AA3',
ROUTER: '#E88B94'
});
// --- Map setup ---
const map = L.map('map', { worldCopyJump: true });
const tiles = L.tileLayer('https://tiles.stadiamaps.com/tiles/stamen_toner_lite/{z}/{x}/{y}.png', {
maxZoom: 18,
attribution: '&copy; OpenStreetMap contributors &amp; WMF Labs'
}).addTo(map);
// Default view (Berlin center) until first data arrives
map.setView(MAP_CENTER, 10);
const markersLayer = L.layerGroup().addTo(map);
const legend = L.control({ position: 'bottomright' });
legend.onAdd = function () {
const div = L.DomUtil.create('div', 'legend');
for (const [role, color] of Object.entries(roleColors)) {
div.innerHTML += `<div><span style="background:${color}"></span>${role}</div>`;
}
return div;
};
legend.addTo(map);
// --- Helpers ---
function escapeHtml(str) {
return String(str)
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#39;');
}
function renderShortHtml(short, role){
if (!short) {
return `<span class="short-name" style="background:#ccc">?&nbsp;&nbsp;&nbsp;</span>`;
}
const padded = escapeHtml(String(short).padStart(4, ' ')).replace(/ /g, '&nbsp;');
const color = roleColors[role] || roleColors.CLIENT;
return `<span class="short-name" style="background:${color}">${padded}</span>`;
}
function appendChatEntry(div) {
chatEl.appendChild(div);
while (chatEl.childElementCount > CHAT_LIMIT) {
chatEl.removeChild(chatEl.firstChild);
}
chatEl.scrollTop = chatEl.scrollHeight;
}
function maybeAddDateDivider(ts) {
if (!ts) return;
const d = new Date(ts * 1000);
const key = `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}`;
if (lastChatDate !== key) {
lastChatDate = key;
const midnight = new Date(d);
midnight.setHours(0, 0, 0, 0);
const div = document.createElement('div');
div.className = 'chat-entry-date';
div.textContent = `-- ${formatDate(midnight)} --`;
appendChatEntry(div);
}
}
function addNewNodeChatEntry(n) {
maybeAddDateDivider(n.first_heard);
const div = document.createElement('div');
const ts = formatTime(new Date(n.first_heard * 1000));
div.className = 'chat-entry-node';
const short = renderShortHtml(n.short_name, n.role);
const longName = escapeHtml(n.long_name || '');
div.innerHTML = `[${ts}] ${short} <em>New node: ${longName}</em>`;
appendChatEntry(div);
}
function addNewMessageChatEntry(m) {
maybeAddDateDivider(m.rx_time);
const div = document.createElement('div');
const ts = formatTime(new Date(m.rx_time * 1000));
const short = renderShortHtml(m.node?.short_name, m.node?.role);
const text = escapeHtml(m.text || '');
div.className = 'chat-entry-msg';
div.innerHTML = `[${ts}] ${short} ${text}`;
appendChatEntry(div);
}
function pad(n) { return String(n).padStart(2, "0"); }
function formatTime(d) {
return pad(d.getHours()) + ":" +
pad(d.getMinutes()) + ":" +
pad(d.getSeconds());
}
function formatDate(d) {
return d.getFullYear() + "-" +
pad(d.getMonth() + 1) + "-" +
pad(d.getDate());
}
function fmtHw(v) {
return v && v !== "UNSET" ? String(v) : "";
}
function fmtCoords(v, d = 5) {
if (v == null || v === '') return "";
const n = Number(v);
return Number.isFinite(n) ? n.toFixed(d) : "";
}
function fmtAlt(v, s) {
return (v == null || v === '') ? "" : `${v}${s}`;
}
function fmtTx(v, d = 3) {
if (v == null || v === '') return "";
const n = Number(v);
return Number.isFinite(n) ? `${n.toFixed(d)}%` : "";
}
function timeHum(unixSec) {
if (!unixSec) return "";
if (unixSec < 0) return "0s";
if (unixSec < 60) return `${unixSec}s`;
if (unixSec < 3600) return `${Math.floor(unixSec/60)}m ${Math.floor((unixSec%60))}s`;
if (unixSec < 86400) return `${Math.floor(unixSec/3600)}h ${Math.floor((unixSec%3600)/60)}m`;
return `${Math.floor(unixSec/86400)}d ${Math.floor((unixSec%86400)/3600)}h`;
}
function timeAgo(unixSec, nowSec = Date.now()/1000) {
if (!unixSec) return "";
const diff = Math.floor(nowSec - Number(unixSec));
if (diff < 0) return "0s";
if (diff < 60) return `${diff}s`;
if (diff < 3600) return `${Math.floor(diff/60)}m ${Math.floor((diff%60))}s`;
if (diff < 86400) return `${Math.floor(diff/3600)}h ${Math.floor((diff%3600)/60)}m`;
return `${Math.floor(diff/86400)}d ${Math.floor((diff%86400)/3600)}h`;
}
async function fetchNodes(limit = NODE_LIMIT) {
const r = await fetch(`/api/nodes?limit=${limit}`, { cache: 'no-store' });
if (!r.ok) throw new Error('HTTP ' + r.status);
return r.json();
}
async function fetchMessages(limit = NODE_LIMIT) {
const r = await fetch(`/api/messages?limit=${limit}`, { cache: 'no-store' });
if (!r.ok) throw new Error('HTTP ' + r.status);
return r.json();
}
function computeDistances(nodes) {
for (const n of nodes) {
const latRaw = n.latitude;
const lonRaw = n.longitude;
if (latRaw == null || latRaw === '' || lonRaw == null || lonRaw === '') {
n.distance_km = null;
continue;
}
const lat = Number(latRaw);
const lon = Number(lonRaw);
if (!Number.isFinite(lat) || !Number.isFinite(lon)) {
n.distance_km = null;
continue;
}
n.distance_km = L.latLng(lat, lon).distanceTo(MAP_CENTER) / 1000;
}
}
function renderTable(nodes, nowSec) {
const tb = document.querySelector('#nodes tbody');
const frag = document.createDocumentFragment();
for (const n of nodes) {
const tr = document.createElement('tr');
tr.innerHTML = `
<td class="mono">${n.node_id || ""}</td>
<td>${renderShortHtml(n.short_name, n.role)}</td>
<td>${n.long_name || ""}</td>
<td>${timeAgo(n.last_heard, nowSec)}</td>
<td>${n.role || "CLIENT"}</td>
<td>${fmtHw(n.hw_model)}</td>
<td>${fmtAlt(n.battery_level, "%")}</td>
<td>${fmtAlt(n.voltage, "V")}</td>
<td>${timeHum(n.uptime_seconds)}</td>
<td>${fmtTx(n.channel_utilization)}</td>
<td>${fmtTx(n.air_util_tx)}</td>
<td>${fmtCoords(n.latitude)}</td>
<td>${fmtCoords(n.longitude)}</td>
<td>${fmtAlt(n.altitude, "m")}</td>
<td class="mono">${n.pos_time_iso ? `${timeAgo(n.position_time, nowSec)}` : ""}</td>`;
frag.appendChild(tr);
}
tb.replaceChildren(frag);
}
function renderMap(nodes, nowSec) {
markersLayer.clearLayers();
const pts = [];
for (const n of nodes) {
const latRaw = n.latitude, lonRaw = n.longitude;
if (latRaw == null || latRaw === '' || lonRaw == null || lonRaw === '') continue;
const lat = Number(latRaw), lon = Number(lonRaw);
if (!Number.isFinite(lat) || !Number.isFinite(lon)) continue;
if (n.distance_km != null && n.distance_km > MAX_NODE_DISTANCE_KM) continue;
const color = roleColors[n.role] || '#3388ff';
const marker = L.circleMarker([lat, lon], {
radius: 9,
color: '#000',
weight: 1,
fillColor: color,
fillOpacity: 0.7,
opacity: 0.7
});
const lines = [
`<b>${n.long_name || ''}</b>`,
`${renderShortHtml(n.short_name, n.role)} <span class="mono">${n.node_id || ''}</span>`,
n.hw_model ? `Model: ${fmtHw(n.hw_model)}` : null,
`Role: ${n.role || 'CLIENT'}`,
(n.battery_level != null ? `Battery: ${fmtAlt(n.battery_level, "%")}, ${fmtAlt(n.voltage, "V")}` : null),
(n.last_heard ? `Last seen: ${timeAgo(n.last_heard, nowSec)}` : null),
(n.pos_time_iso ? `Last Position: ${timeAgo(n.position_time, nowSec)}` : null),
(n.uptime_seconds ? `Uptime: ${timeHum(n.uptime_seconds)}` : null),
].filter(Boolean);
marker.bindPopup(lines.join('<br/>'));
marker.addTo(markersLayer);
pts.push([lat, lon]);
}
if (pts.length && fitBoundsEl.checked) {
const b = L.latLngBounds(pts);
map.fitBounds(b.pad(0.2), { animate: false });
}
}
function applyFilter() {
const q = filterInput.value.trim().toLowerCase();
const nodes = !q ? allNodes : allNodes.filter(n => {
return [n.node_id, n.short_name, n.long_name]
.filter(Boolean)
.some(v => v.toLowerCase().includes(q));
});
const nowSec = Date.now()/1000;
renderTable(nodes, nowSec);
renderMap(nodes, nowSec);
updateCount(nodes, nowSec);
updateRefreshInfo(nodes, nowSec);
}
filterInput.addEventListener('input', applyFilter);
async function refresh() {
try {
statusEl.textContent = 'refreshing…';
const nodes = await fetchNodes();
computeDistances(nodes);
const newNodes = [];
for (const n of nodes) {
if (n.node_id && !seenNodeIds.has(n.node_id)) {
newNodes.push(n);
}
}
const messages = await fetchMessages();
const newMessages = [];
for (const m of messages) {
if (m.id && !seenMessageIds.has(m.id)) {
newMessages.push(m);
}
}
const entries = [];
for (const n of newNodes) entries.push({ type: 'node', ts: n.first_heard ?? 0, item: n });
for (const m of newMessages) entries.push({ type: 'msg', ts: m.rx_time ?? 0, item: m });
entries.sort((a, b) => {
if (a.ts !== b.ts) return a.ts - b.ts;
return a.type === 'node' && b.type === 'msg' ? -1 : a.type === 'msg' && b.type === 'node' ? 1 : 0;
});
for (const e of entries) {
if (e.type === 'node') {
addNewNodeChatEntry(e.item);
if (e.item.node_id) seenNodeIds.add(e.item.node_id);
} else {
addNewMessageChatEntry(e.item);
if (e.item.id) seenMessageIds.add(e.item.id);
}
}
allNodes = nodes;
applyFilter();
statusEl.textContent = 'updated ' + new Date().toLocaleTimeString();
} catch (e) {
statusEl.textContent = 'error: ' + e.message;
console.error(e);
}
}
refresh();
setInterval(refresh, REFRESH_MS);
refreshBtn.addEventListener('click', refresh);
function updateCount(nodes, nowSec) {
const dayAgoSec = nowSec - 86400;
const count = nodes.filter(n => n.last_heard && Number(n.last_heard) >= dayAgoSec).length;
const text = `${baseTitle} (${count})`;
titleEl.textContent = text;
headerEl.textContent = text;
}
function updateRefreshInfo(nodes, nowSec) {
const windows = [
{ label: 'hour', secs: 3600 },
{ label: 'day', secs: 86400 },
{ label: 'week', secs: 7 * 86400 },
];
const counts = windows.map(w => {
const c = nodes.filter(n => n.last_heard && nowSec - Number(n.last_heard) <= w.secs).length;
return `${c}/${w.label}`;
}).join(', ');
refreshInfo.textContent = `#MediumFast — active nodes: ${counts} — auto-refresh every ${REFRESH_MS / 1000} seconds.`;
}
</script>
</body>
</html>
File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 65 KiB

+804
View File
@@ -0,0 +1,804 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# frozen_string_literal: true
require "spec_helper"
require "sqlite3"
require "json"
require "time"
require "base64"
RSpec.describe "Potato Mesh Sinatra app" do
let(:app) { Sinatra::Application }
def fixture_path(name)
File.expand_path("../../tests/#{name}", __dir__)
end
def with_db(readonly: false)
db = SQLite3::Database.new(DB_PATH, readonly: readonly)
yield db
ensure
db&.close
end
def clear_database
with_db do |db|
db.execute("DELETE FROM messages")
db.execute("DELETE FROM nodes")
end
end
def reject_nil_values(hash)
hash.reject { |_, value| value.nil? }
end
def build_node_payload(node)
payload = {
"user" => reject_nil_values(
"shortName" => node["short_name"],
"longName" => node["long_name"],
"hwModel" => node["hw_model"],
"role" => node["role"],
),
"hwModel" => node["hw_model"],
"lastHeard" => node["last_heard"],
"snr" => node["snr"],
}
metrics = reject_nil_values(
"batteryLevel" => node["battery_level"],
"voltage" => node["voltage"],
"channelUtilization" => node["channel_utilization"],
"airUtilTx" => node["air_util_tx"],
"uptimeSeconds" => node["uptime_seconds"],
)
payload["deviceMetrics"] = metrics unless metrics.empty?
position = reject_nil_values(
"time" => node["position_time"],
"latitude" => node["latitude"],
"longitude" => node["longitude"],
"altitude" => node["altitude"],
)
payload["position"] = position unless position.empty?
payload
end
def expected_last_heard(node)
[node["last_heard"], node["position_time"]].compact.max
end
def expected_node_row(node)
final_last = expected_last_heard(node)
{
"node_id" => node["node_id"],
"short_name" => node["short_name"],
"long_name" => node["long_name"],
"hw_model" => node["hw_model"],
"role" => node["role"] || "CLIENT",
"snr" => node["snr"],
"battery_level" => node["battery_level"],
"voltage" => node["voltage"],
"last_heard" => final_last,
"first_heard" => final_last,
"uptime_seconds" => node["uptime_seconds"],
"channel_utilization" => node["channel_utilization"],
"air_util_tx" => node["air_util_tx"],
"position_time" => node["position_time"],
"latitude" => node["latitude"],
"longitude" => node["longitude"],
"altitude" => node["altitude"],
}
end
def expect_same_value(actual, expected, tolerance: 1e-6)
if expected.nil?
expect(actual).to be_nil
elsif expected.is_a?(Float)
expect(actual).to be_within(tolerance).of(expected)
else
expect(actual).to eq(expected)
end
end
def import_nodes_fixture
nodes_fixture.each do |node|
payload = { node["node_id"] => build_node_payload(node) }
post "/api/nodes", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
end
end
def import_messages_fixture
messages_fixture.each do |message|
payload = message.reject { |key, _| key == "node" }
post "/api/messages", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
end
end
let(:api_token) { "spec-token" }
let(:auth_headers) do
{
"CONTENT_TYPE" => "application/json",
"HTTP_AUTHORIZATION" => "Bearer #{api_token}",
}
end
let(:nodes_fixture) { JSON.parse(File.read(fixture_path("nodes.json"))) }
let(:messages_fixture) { JSON.parse(File.read(fixture_path("messages.json"))) }
let(:reference_time) do
latest = nodes_fixture.map { |node| node["last_heard"] }.compact.max
Time.at((latest || Time.now.to_i) + 1000)
end
before do
@original_token = ENV["API_TOKEN"]
ENV["API_TOKEN"] = api_token
allow(Time).to receive(:now).and_return(reference_time)
clear_database
end
after do
ENV["API_TOKEN"] = @original_token
end
describe "logging configuration" do
before do
Sinatra::Application.apply_logger_level!
end
after do
Sinatra::Application.apply_logger_level!
end
it "defaults to WARN when debug logging is disabled" do
expect(Sinatra::Application.settings.logger.level).to eq(Logger::WARN)
end
it "switches to DEBUG when debug logging is enabled" do
stub_const("DEBUG", true)
Sinatra::Application.apply_logger_level!
expect(Sinatra::Application.settings.logger.level).to eq(Logger::DEBUG)
end
end
describe "GET /" do
it "responds successfully" do
get "/"
expect(last_response).to be_ok
end
end
describe "database initialization" do
it "creates the schema when booting" do
expect(File).to exist(DB_PATH)
db = SQLite3::Database.new(DB_PATH, readonly: true)
tables = db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name IN ('nodes','messages')").flatten
expect(tables).to include("nodes")
expect(tables).to include("messages")
ensure
db&.close
end
end
describe "authentication" do
it "rejects requests without a matching bearer token" do
post "/api/nodes", {}.to_json, { "CONTENT_TYPE" => "application/json" }
expect(last_response.status).to eq(403)
expect(JSON.parse(last_response.body)).to eq("error" => "Forbidden")
end
it "rejects requests when the API token is not configured" do
ENV["API_TOKEN"] = nil
post "/api/messages", {}.to_json, { "CONTENT_TYPE" => "application/json" }
expect(last_response.status).to eq(403)
expect(JSON.parse(last_response.body)).to eq("error" => "Forbidden")
ensure
ENV["API_TOKEN"] = api_token
end
it "rejects requests with the wrong bearer token" do
headers = auth_headers.merge("HTTP_AUTHORIZATION" => "Bearer wrong-token")
post "/api/messages", {}.to_json, headers
expect(last_response.status).to eq(403)
expect(JSON.parse(last_response.body)).to eq("error" => "Forbidden")
end
it "does not accept alternate authorization schemes" do
basic = Base64.strict_encode64("attacker:password")
headers = auth_headers.merge("HTTP_AUTHORIZATION" => "Basic #{basic}")
post "/api/nodes", {}.to_json, headers
expect(last_response.status).to eq(403)
expect(JSON.parse(last_response.body)).to eq("error" => "Forbidden")
end
it "rejects tokens with unexpected trailing characters" do
headers = auth_headers.merge("HTTP_AUTHORIZATION" => "Bearer #{api_token} ")
post "/api/messages", {}.to_json, headers
expect(last_response.status).to eq(403)
expect(JSON.parse(last_response.body)).to eq("error" => "Forbidden")
end
end
describe "POST /api/nodes" do
it "imports nodes from fixture data into the database" do
import_nodes_fixture
expected_nodes = nodes_fixture.map do |node|
[node["node_id"], expected_node_row(node)]
end.to_h
with_db(readonly: true) do |db|
db.results_as_hash = true
rows = db.execute(<<~SQL)
SELECT node_id, short_name, long_name, hw_model, role, snr,
battery_level, voltage, last_heard, first_heard,
uptime_seconds, channel_utilization, air_util_tx,
position_time, latitude, longitude, altitude
FROM nodes
ORDER BY node_id
SQL
expect(rows.size).to eq(expected_nodes.size)
rows.each do |row|
expected = expected_nodes.fetch(row["node_id"])
expect(row["short_name"]).to eq(expected["short_name"])
expect(row["long_name"]).to eq(expected["long_name"])
expect(row["hw_model"]).to eq(expected["hw_model"])
expect(row["role"]).to eq(expected["role"])
expect_same_value(row["snr"], expected["snr"])
expect_same_value(row["battery_level"], expected["battery_level"])
expect_same_value(row["voltage"], expected["voltage"])
expect(row["last_heard"]).to eq(expected["last_heard"])
expect(row["first_heard"]).to eq(expected["first_heard"])
expect_same_value(row["uptime_seconds"], expected["uptime_seconds"])
expect_same_value(row["channel_utilization"], expected["channel_utilization"])
expect_same_value(row["air_util_tx"], expected["air_util_tx"])
expect_same_value(row["position_time"], expected["position_time"])
expect_same_value(row["latitude"], expected["latitude"])
expect_same_value(row["longitude"], expected["longitude"])
expect_same_value(row["altitude"], expected["altitude"])
end
end
end
it "returns 400 when the payload is not valid JSON" do
post "/api/nodes", "{", auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "invalid JSON")
end
it "returns 400 when more than 1000 nodes are provided" do
payload = (0..1000).each_with_object({}) do |i, acc|
acc["node-#{i}"] = {}
end
post "/api/nodes", payload.to_json, auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "too many nodes")
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM nodes")
expect(count).to eq(0)
end
end
it "returns 413 when the request body exceeds the configured byte limit" do
limit = 64
stub_const("MAX_JSON_BODY_BYTES", limit)
payload = { "huge-node" => { "user" => { "shortName" => "A" * (limit + 50) } } }.to_json
expect(payload.bytesize).to be > limit
post "/api/nodes", payload, auth_headers
expect(last_response.status).to eq(413)
expect(JSON.parse(last_response.body)).to eq("error" => "payload too large")
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM nodes")
expect(count).to eq(0)
end
end
it "treats SQL-looking node identifiers as plain data" do
malicious_id = "spec-node'); DROP TABLE nodes;--"
payload = {
malicious_id => {
"user" => { "shortName" => "Spec Attack" },
"lastHeard" => reference_time.to_i,
},
}
post "/api/nodes", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT node_id, short_name FROM nodes WHERE node_id = ?",
[malicious_id],
)
expect(row["node_id"]).to eq(malicious_id)
expect(row["short_name"]).to eq("Spec Attack")
tables = db.get_first_value(
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='nodes'",
)
expect(tables).to eq(1)
end
end
it "retries node upserts when the database reports it is locked" do
node = nodes_fixture.first
payload = { node["node_id"] => build_node_payload(node) }
call_count = 0
allow_any_instance_of(SQLite3::Database).to receive(:execute).and_wrap_original do |method, sql, *args|
if sql.include?("INSERT INTO nodes")
call_count += 1
raise SQLite3::BusyException, "database is locked" if call_count == 1
end
method.call(sql, *args)
end
post "/api/nodes", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
expect(call_count).to be >= 2
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM nodes WHERE node_id = ?", [node["node_id"]])
expect(count).to eq(1)
last_heard = db.get_first_value("SELECT last_heard FROM nodes WHERE node_id = ?", [node["node_id"]])
expect(last_heard).to eq(expected_last_heard(node))
end
end
end
describe "POST /api/messages" do
it "persists messages from fixture data" do
import_nodes_fixture
import_messages_fixture
expected_messages = messages_fixture.map do |message|
[message["id"], message.reject { |key, _| key == "node" }]
end.to_h
with_db(readonly: true) do |db|
db.results_as_hash = true
rows = db.execute(<<~SQL)
SELECT id, rx_time, rx_iso, from_id, to_id, channel,
portnum, text, snr, rssi, hop_limit
FROM messages
ORDER BY id
SQL
expect(rows.size).to eq(expected_messages.size)
rows.each do |row|
expected = expected_messages.fetch(row["id"])
expect(row["rx_time"]).to eq(expected["rx_time"])
expect(row["rx_iso"]).to eq(expected["rx_iso"])
expect(row["from_id"]).to eq(expected["from_id"])
expect(row["to_id"]).to eq(expected["to_id"])
expect(row["channel"]).to eq(expected["channel"])
expect(row["portnum"]).to eq(expected["portnum"])
expect(row["text"]).to eq(expected["text"])
expect_same_value(row["snr"], expected["snr"])
expect(row["rssi"]).to eq(expected["rssi"])
expect(row["hop_limit"]).to eq(expected["hop_limit"])
end
end
end
it "returns 400 when the payload is not valid JSON" do
post "/api/messages", "{", auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "invalid JSON")
end
it "rejects message payloads that are larger than the configured byte limit" do
limit = 64
stub_const("MAX_JSON_BODY_BYTES", limit)
payload = [{ "id" => "m1", "text" => "A" * (limit + 50) }].to_json
expect(payload.bytesize).to be > limit
post "/api/messages", payload, auth_headers
expect(last_response.status).to eq(413)
expect(JSON.parse(last_response.body)).to eq("error" => "payload too large")
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM messages")
expect(count).to eq(0)
end
end
it "returns 400 when more than 1000 messages are provided" do
payload = Array.new(1001) { |i| { "packet_id" => i + 1 } }
post "/api/messages", payload.to_json, auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "too many messages")
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM messages")
expect(count).to eq(0)
end
end
it "accepts array payloads, normalizes node references, and skips messages without an id" do
node_id = "!spec-normalized"
node_payload = {
node_id => {
"num" => 123,
"user" => { "shortName" => "Spec" },
"lastHeard" => reference_time.to_i - 60,
"position" => { "time" => reference_time.to_i - 120 },
},
}
post "/api/nodes", node_payload.to_json, auth_headers
expect(last_response).to be_ok
messages_payload = [
{
"packet_id" => 101,
"from_id" => "123",
"text" => "normalized",
},
{
"packet_id" => 102,
"from_id" => " ",
"text" => "blank",
},
{
"text" => "missing id",
},
]
post "/api/messages", messages_payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
rows = db.execute("SELECT id, from_id, rx_time, rx_iso, text FROM messages ORDER BY id")
expect(rows.size).to eq(2)
first, second = rows
expect(first["id"]).to eq(101)
expect(first["from_id"]).to eq(node_id)
expect(first["rx_time"]).to eq(reference_time.to_i)
expect(first["rx_iso"]).to eq(reference_time.utc.iso8601)
expect(first["text"]).to eq("normalized")
expect(second["id"]).to eq(102)
expect(second["from_id"]).to be_nil
expect(second["rx_time"]).to eq(reference_time.to_i)
expect(second["rx_iso"]).to eq(reference_time.utc.iso8601)
expect(second["text"]).to eq("blank")
end
end
it "stores messages containing SQL control characters without executing them" do
payload = {
"packet_id" => 404,
"from_id" => "attacker",
"text" => "'); DROP TABLE nodes;--",
}
post "/api/messages", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT id, text FROM messages WHERE id = ?",
[404],
)
expect(row["id"]).to eq(404)
expect(row["text"]).to eq("'); DROP TABLE nodes;--")
tables = db.get_first_value(
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='nodes'",
)
expect(tables).to eq(1)
end
end
it "updates existing messages only when sender information is provided" do
message_id = 9001
initial_time = reference_time.to_i - 120
initial_iso = Time.at(initial_time).utc.iso8601
base_payload = {
"packet_id" => message_id,
"rx_time" => initial_time,
"rx_iso" => initial_iso,
"to_id" => "^all",
"channel" => 1,
"portnum" => "TEXT_MESSAGE_APP",
"text" => "initial payload",
"snr" => 7.25,
"rssi" => -58,
"hop_limit" => 2,
}
post "/api/messages", base_payload.merge("from_id" => nil).to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row("SELECT id, from_id, rx_time, rx_iso, text FROM messages WHERE id = ?", [message_id])
expect(row["from_id"]).to be_nil
expect(row["rx_time"]).to eq(initial_time)
expect(row["rx_iso"]).to eq(initial_iso)
expect(row["text"]).to eq("initial payload")
end
updated_time = initial_time + 60
updated_iso = Time.at(updated_time).utc.iso8601
post "/api/messages", base_payload.merge(
"rx_time" => updated_time,
"rx_iso" => updated_iso,
"text" => "overwritten without sender",
"from_id" => " ",
).to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row("SELECT id, from_id, rx_time, rx_iso, text FROM messages WHERE id = ?", [message_id])
expect(row["from_id"]).to be_nil
expect(row["rx_time"]).to eq(initial_time)
expect(row["rx_iso"]).to eq(initial_iso)
expect(row["text"]).to eq("initial payload")
end
final_time = updated_time + 30
final_iso = Time.at(final_time).utc.iso8601
post "/api/messages", base_payload.merge(
"rx_time" => final_time,
"rx_iso" => final_iso,
"from" => "!spec-sender",
).to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row("SELECT id, from_id, rx_time, rx_iso, text FROM messages WHERE id = ?", [message_id])
expect(row["from_id"]).to eq("!spec-sender")
expect(row["rx_time"]).to eq(initial_time)
expect(row["rx_iso"]).to eq(initial_iso)
expect(row["text"]).to eq("initial payload")
end
end
end
describe "GET /api/nodes" do
it "returns the stored nodes with derived timestamps" do
import_nodes_fixture
get "/api/nodes"
expect(last_response).to be_ok
actual = JSON.parse(last_response.body)
expect(actual.size).to eq(nodes_fixture.size)
actual_by_id = actual.each_with_object({}) do |row, acc|
acc[row["node_id"]] = row
end
nodes_fixture.each do |node|
expected = expected_node_row(node)
actual_row = actual_by_id.fetch(node["node_id"])
expect(actual_row["short_name"]).to eq(expected["short_name"])
expect(actual_row["long_name"]).to eq(expected["long_name"])
expect(actual_row["hw_model"]).to eq(expected["hw_model"])
expect(actual_row["role"]).to eq(expected["role"])
expect_same_value(actual_row["snr"], expected["snr"])
expect_same_value(actual_row["battery_level"], expected["battery_level"])
expect_same_value(actual_row["voltage"], expected["voltage"])
expect(actual_row["last_heard"]).to eq(expected["last_heard"])
expect(actual_row["first_heard"]).to eq(expected["first_heard"])
expect_same_value(actual_row["uptime_seconds"], expected["uptime_seconds"])
expect_same_value(actual_row["channel_utilization"], expected["channel_utilization"])
expect_same_value(actual_row["air_util_tx"], expected["air_util_tx"])
expect_same_value(actual_row["position_time"], expected["position_time"])
expect_same_value(actual_row["latitude"], expected["latitude"])
expect_same_value(actual_row["longitude"], expected["longitude"])
expect_same_value(actual_row["altitude"], expected["altitude"])
if expected["last_heard"]
expected_last_seen_iso = Time.at(expected["last_heard"]).utc.iso8601
expect(actual_row["last_seen_iso"]).to eq(expected_last_seen_iso)
else
expect(actual_row["last_seen_iso"]).to be_nil
end
if node["position_time"]
expected_pos_iso = Time.at(node["position_time"]).utc.iso8601
expect(actual_row["pos_time_iso"]).to eq(expected_pos_iso)
else
expect(actual_row).not_to have_key("pos_time_iso")
end
end
end
end
describe "GET /api/messages" do
it "returns the stored messages along with joined node data" do
import_nodes_fixture
import_messages_fixture
get "/api/messages"
expect(last_response).to be_ok
actual = JSON.parse(last_response.body)
expect(actual.size).to eq(messages_fixture.size)
actual_by_id = actual.each_with_object({}) do |row, acc|
acc[row["id"]] = row
end
nodes_by_id = {}
node_aliases = {}
nodes_fixture.each do |node|
node_id = node["node_id"]
expected_row = expected_node_row(node)
nodes_by_id[node_id] = expected_row
if (num = node["num"])
node_aliases[num.to_s] = node_id
end
end
messages_fixture.each do |message|
node = message["node"]
next unless node.is_a?(Hash)
canonical = node["node_id"]
num = node["num"]
next unless canonical && num
node_aliases[num.to_s] ||= canonical
end
messages_fixture.each do |message|
expected = message.reject { |key, _| key == "node" }
actual_row = actual_by_id.fetch(message["id"])
expect(actual_row["rx_time"]).to eq(expected["rx_time"])
expect(actual_row["rx_iso"]).to eq(expected["rx_iso"])
expect(actual_row["from_id"]).to eq(expected["from_id"])
expect(actual_row["to_id"]).to eq(expected["to_id"])
expect(actual_row["channel"]).to eq(expected["channel"])
expect(actual_row["portnum"]).to eq(expected["portnum"])
expect(actual_row["text"]).to eq(expected["text"])
expect_same_value(actual_row["snr"], expected["snr"])
expect(actual_row["rssi"]).to eq(expected["rssi"])
expect(actual_row["hop_limit"]).to eq(expected["hop_limit"])
if expected["from_id"]
lookup_id = expected["from_id"]
node_expected = nodes_by_id[lookup_id]
unless node_expected
canonical_id = node_aliases[lookup_id.to_s]
expect(canonical_id).not_to be_nil,
"node fixture missing for from_id #{lookup_id.inspect}"
node_expected = nodes_by_id.fetch(canonical_id)
end
node_actual = actual_row.fetch("node")
expect(node_actual["node_id"]).to eq(node_expected["node_id"])
expect(node_actual["short_name"]).to eq(node_expected["short_name"])
expect(node_actual["long_name"]).to eq(node_expected["long_name"])
expect(node_actual["role"]).to eq(node_expected["role"])
expect_same_value(node_actual["snr"], node_expected["snr"])
expect_same_value(node_actual["battery_level"], node_expected["battery_level"])
expect_same_value(node_actual["voltage"], node_expected["voltage"])
expect(node_actual["last_heard"]).to eq(node_expected["last_heard"])
expect(node_actual["first_heard"]).to eq(node_expected["first_heard"])
expect_same_value(node_actual["latitude"], node_expected["latitude"])
expect_same_value(node_actual["longitude"], node_expected["longitude"])
expect_same_value(node_actual["altitude"], node_expected["altitude"])
else
expect(actual_row["node"]).to be_a(Hash)
expect(actual_row["node"]["node_id"]).to be_nil
end
end
end
context "when DEBUG logging is enabled" do
it "logs diagnostics for messages missing a sender" do
stub_const("DEBUG", true)
allow(Kernel).to receive(:warn)
message_id = 987_654
payload = {
"packet_id" => message_id,
"from_id" => " ",
"text" => "debug logging",
}
post "/api/messages", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
get "/api/messages"
expect(last_response).to be_ok
expect(Kernel).to have_received(:warn).with(
a_string_matching(/\[debug\] messages row before join: .*"id"\s*=>\s*#{message_id}/),
)
expect(Kernel).to have_received(:warn).with(
a_string_matching(/\[debug\] row after join: .*"id"\s*=>\s*#{message_id}/),
)
expect(Kernel).to have_received(:warn).with(
a_string_matching(/\[debug\] row after processing: .*"id"\s*=>\s*#{message_id}/),
)
messages = JSON.parse(last_response.body)
expect(messages.size).to eq(1)
expect(messages.first["from_id"]).to be_nil
end
end
end
end
+62
View File
@@ -0,0 +1,62 @@
# Copyright (C) 2025 l5yth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# frozen_string_literal: true
require "simplecov"
require "simplecov_json_formatter"
SimpleCov.formatters = SimpleCov::Formatter::MultiFormatter.new(
[
SimpleCov::Formatter::SimpleFormatter,
SimpleCov::Formatter::HTMLFormatter,
SimpleCov::Formatter::JSONFormatter,
],
)
SimpleCov.start do
enable_coverage :branch
add_filter "/spec/"
end
require "tmpdir"
require "fileutils"
ENV["RACK_ENV"] = "test"
SPEC_TMPDIR = Dir.mktmpdir("potato-mesh-spec-")
ENV["MESH_DB"] = File.join(SPEC_TMPDIR, "mesh.db")
require_relative "../app"
require "rack/test"
require "rspec"
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.shared_context_metadata_behavior = :apply_to_host_groups
config.include Rack::Test::Methods
config.after(:suite) do
FileUtils.remove_entry(SPEC_TMPDIR) if File.directory?(SPEC_TMPDIR)
end
end
+984
View File
@@ -0,0 +1,984 @@
<!doctype html>
<!--
Copyright (C) 2025 l5yth
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<title><%= site_name %></title>
<link rel="icon" type="image/svg+xml" href="/potatomesh-logo.svg" />
<% refresh_interval_seconds = 60 %>
<!-- Leaflet CSS/JS (CDN) -->
<link
rel="stylesheet"
href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
crossorigin=""
/>
<script
src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"
integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo="
crossorigin=""
></script>
<style>
:root { --pad: 16px; }
body { font-family: system-ui, Segoe UI, Roboto, Ubuntu, Arial, sans-serif; margin: var(--pad); padding-bottom: 32px; }
h1 { margin: 0 0 8px }
.site-title { display: inline-flex; align-items: center; gap: 12px; }
.site-title img { width: 52px; height: 52px; display: block; border-radius: 12px; }
.meta { color:#555; margin-bottom:12px }
.pill{ display:inline-block; padding:2px 8px; border-radius:999px; background:#eee; font-size:12px }
#map { flex: 1; height: 60vh; border: 1px solid #ddd; border-radius: 8px; }
table { border-collapse: collapse; width: 100%; margin: 0; }
th, td { padding: 4px 6px; text-align: left; }
th { position: sticky; top: 0; background: #fafafa; }
.mono { font-family: ui-monospace, Menlo, Consolas, monospace; }
.row { display: flex; gap: var(--pad); align-items: center; justify-content: space-between; }
.map-row { display: flex; gap: var(--pad); align-items: stretch; }
#chat { flex: 0 0 33%; max-width: 33%; height: 60vh; border: 1px solid #ddd; border-radius: 8px; overflow-y: auto; padding: 6px; font-size: 12px; }
.chat-entry-node { font-family: ui-monospace, Menlo, Consolas, monospace; color: #555 }
.chat-entry-msg { font-family: ui-monospace, Menlo, Consolas, monospace; }
.chat-entry-date { font-family: ui-monospace, Menlo, Consolas, monospace; font-weight: bold; }
.short-name { display:inline-block; border-radius:4px; padding:0 2px; }
.short-name[data-node-info] { cursor: pointer; }
.short-info-overlay { position: absolute; background: #fff; color: #111; border: 1px solid #ccc; border-radius: 8px; box-shadow: 0 8px 24px rgba(0, 0, 0, 0.18); padding: 8px 10px 10px; font-size: 11px; line-height: 1.4; min-width: 200px; max-width: 240px; z-index: 2000; }
.short-info-overlay[hidden] { display: none; }
.short-info-overlay .short-info-close { position: absolute; top: 4px; right: 4px; border: none; background: transparent; font-size: 14px; line-height: 1; padding: 2px; border-radius: 4px; cursor: pointer; color: inherit; }
.short-info-overlay .short-info-close:hover { background: rgba(0, 0, 0, 0.08); }
.short-info-content { margin: 0; }
.meta-info { display: flex; flex-direction: column; gap: 6px; align-items: flex-start; }
.refresh-row { display: grid; grid-template-columns: minmax(0, 1fr) auto; gap: 12px; align-items: start; width: 100%; }
.refresh-info { margin: 0; color: #555; }
.refresh-actions { display: flex; gap: 8px; align-items: center; flex-wrap: wrap; justify-self: end; }
.auto-refresh-toggle { display: inline-flex; align-items: center; gap: 6px; }
.controls { display: flex; gap: 8px; align-items: center; }
.controls label { display: inline-flex; align-items: center; gap: 6px; }
button { padding: 6px 10px; border: 1px solid #ccc; background: #fff; border-radius: 6px; cursor: pointer; }
button:hover { background: #f6f6f6; }
.sort-button { padding: 0; border: none; background: none; color: inherit; font: inherit; cursor: pointer; display: inline-flex; align-items: center; gap: 4px; }
.sort-button:hover { background: none; }
.sort-button:focus-visible { outline: 2px solid #4a90e2; outline-offset: 2px; }
.sort-indicator { font-size: 0.75em; opacity: 0.6; }
th[aria-sort] .sort-indicator { opacity: 1; }
label { font-size: 14px; color: #333; }
input[type="text"] { padding: 6px 10px; border: 1px solid #ccc; border-radius: 6px; }
.legend { background: #fff; padding: 6px 8px; border: 1px solid #ccc; border-radius: 4px; font-size: 12px; line-height: 18px; }
.legend span { display: inline-block; width: 12px; height: 12px; margin-right: 6px; vertical-align: middle; }
#map .leaflet-tile { filter: opacity(70%); }
.leaflet-popup-content-wrapper,
.leaflet-popup-tip {
background: #fff;
color: #333;
box-shadow: 0 3px 14px rgba(0, 0, 0, 0.4);
}
#nodes { font-size: 12px; }
footer { position: fixed; bottom: 0; left: var(--pad); width: calc(100% - 2 * var(--pad)); background: #fafafa; border-top: 1px solid #ddd; text-align: center; font-size: 12px; padding: 4px 0; }
.info-overlay { position: fixed; inset: 0; background: rgba(0, 0, 0, 0.45); display: flex; align-items: center; justify-content: center; padding: var(--pad); z-index: 1000; }
.info-overlay[hidden] { display: none; }
.info-dialog { background: #fff; color: #111; max-width: 420px; width: min(100%, 420px); border-radius: 12px; box-shadow: 0 16px 40px rgba(0, 0, 0, 0.2); position: relative; padding: 20px 24px; outline: none; }
.info-dialog:focus { outline: 2px solid #4a90e2; outline-offset: 4px; }
.info-close { position: absolute; top: 10px; right: 10px; padding: 4px; border: none; background: transparent; font-size: 20px; line-height: 1; border-radius: 999px; }
.info-close:hover { background: rgba(0, 0, 0, 0.06); }
.info-title { margin: 0 0 8px; font-size: 20px; }
.info-intro { margin: 0 0 12px; font-size: 14px; color: #444; }
.info-details { margin: 0; font-size: 14px; line-height: 1.6; }
.info-details dt { font-weight: 600; margin-top: 12px; color: #222; }
.info-details dd { margin: 4px 0 0; }
.info-details dd a { color: inherit; word-break: break-word; }
@media (max-width: 1280px) {
#nodes th:nth-child(12),
#nodes td:nth-child(12),
#nodes th:nth-child(13),
#nodes td:nth-child(13),
#nodes th:nth-child(14),
#nodes td:nth-child(14),
#nodes th:nth-child(15),
#nodes td:nth-child(15) {
display: none;
}
}
@media (max-width: 768px) {
.row { flex-direction: column; align-items: stretch; gap: var(--pad); }
.site-title img { width: 44px; height: 44px; }
.map-row { flex-direction: column; }
.controls { order: 2; display: grid; grid-template-columns: auto minmax(0, 1fr) auto auto; align-items: center; width: 100%; gap: 12px; }
.controls input[type="text"] { width: 100%; }
.controls button { justify-self: end; }
.meta-info { order: 1; width: 100%; }
.refresh-row { grid-template-columns: 1fr; row-gap: 8px; }
.refresh-actions { flex-direction: row; align-items: center; gap: 8px; justify-self: start; flex-wrap: nowrap; }
#map { order: 1; flex: none; max-width: 100%; height: 50vh; }
#chat { order: 2; flex: none; max-width: 100%; height: 30vh; }
#nodes th:nth-child(1),
#nodes td:nth-child(1),
#nodes th:nth-child(5),
#nodes td:nth-child(5),
#nodes th:nth-child(6),
#nodes td:nth-child(6),
#nodes th:nth-child(9),
#nodes td:nth-child(9),
#nodes th:nth-child(12),
#nodes td:nth-child(12),
#nodes th:nth-child(13),
#nodes td:nth-child(13),
#nodes th:nth-child(14),
#nodes td:nth-child(14),
#nodes th:nth-child(15),
#nodes td:nth-child(15) {
display: none;
}
}
/* Dark mode overrides */
body.dark { background: #111; color: #eee; }
body.dark .meta { color: #bbb; }
body.dark .refresh-info { color: #bbb; }
body.dark .pill { background: #444; }
body.dark #map { border-color: #444; }
body.dark #chat { border-color: #444; background: #222; color: #eee; }
body.dark th { background: #222; }
body.dark button { background: #333; border-color: #444; color: #eee; }
body.dark button:hover { background: #444; }
body.dark .sort-button { background: none; border: none; color: inherit; }
body.dark .sort-button:hover { background: none; }
body.dark label { color: #ddd; }
body.dark input[type="text"] { background: #222; color: #eee; border-color: #444; }
body.dark .legend { background: #333; border-color: #444; color: #eee; }
body.dark .leaflet-popup-content-wrapper,
body.dark .leaflet-popup-tip {
background: #333;
color: #eee;
box-shadow: 0 3px 14px rgba(0, 0, 0, 0.8);
}
body.dark footer { background: #222; border-top-color: #444; color: #eee; }
body.dark a { color: #9bd; }
body.dark .chat-entry-node { color: #777 }
body.dark .chat-entry-msg { color: #bbb }
body.dark .short-name { color: #555 }
body.dark .chat-entry-date { color: #bbb }
body.dark .info-overlay { background: rgba(0, 0, 0, 0.7); }
body.dark .info-dialog { background: #1c1c1c; color: #eee; border: 1px solid #444; }
body.dark .info-intro { color: #bbb; }
body.dark .info-details dt { color: #ddd; }
body.dark .info-close:hover { background: rgba(255, 255, 255, 0.1); }
body.dark .short-info-overlay { background: #1c1c1c; border-color: #444; color: #eee; box-shadow: 0 8px 24px rgba(0, 0, 0, 0.55); }
body.dark .short-info-overlay .short-info-close:hover { background: rgba(255, 255, 255, 0.1); }
</style>
</head>
<body>
<h1 class="site-title">
<img src="/potatomesh-logo.svg" alt="" aria-hidden="true" />
<span class="site-title-text"><%= site_name %></span>
</h1>
<div class="row meta">
<div class="meta-info">
<div class="refresh-row">
<p id="refreshInfo" class="refresh-info" aria-live="polite"><%= default_channel %> (<%= default_frequency %>) — active nodes: …</p>
<div class="refresh-actions">
<label class="auto-refresh-toggle"><input type="checkbox" id="autoRefresh" checked /> Auto-refresh every <%= refresh_interval_seconds %> seconds</label>
<button id="refreshBtn" type="button">Refresh now</button>
<span id="status" class="pill">loading…</span>
</div>
</div>
</div>
<div class="controls">
<label><input type="checkbox" id="fitBounds" checked /> Auto-fit map</label>
<input type="text" id="filterInput" placeholder="Filter nodes" />
<button id="themeToggle" type="button" aria-label="Toggle dark mode">🌙</button>
<button id="infoBtn" type="button" aria-haspopup="dialog" aria-controls="infoOverlay" aria-label="Show site information">️ Info</button>
</div>
</div>
<div id="infoOverlay" class="info-overlay" role="dialog" aria-modal="true" aria-labelledby="infoTitle" hidden>
<div class="info-dialog" tabindex="-1">
<button type="button" class="info-close" id="infoClose" aria-label="Close site information">×</button>
<h2 id="infoTitle" class="info-title">About <%= site_name %></h2>
<p class="info-intro">Quick facts about this PotatoMesh instance.</p>
<dl class="info-details">
<dt>Default channel</dt>
<dd><%= default_channel %></dd>
<dt>Frequency</dt>
<dd><%= default_frequency %></dd>
<dt>Map center</dt>
<dd><%= format("%.5f, %.5f", map_center_lat, map_center_lon) %></dd>
<dt>Visible range</dt>
<dd>Nodes within roughly <%= max_node_distance_km %> km of the center are shown.</dd>
<dt>Auto-refresh</dt>
<dd>Updates every <%= refresh_interval_seconds %> seconds.</dd>
<% if matrix_room && !matrix_room.empty? %>
<dt>Matrix room</dt>
<dd><a href="https://matrix.to/#/<%= matrix_room %>" target="_blank" rel="noreferrer noopener"><%= matrix_room %></a></dd>
<% end %>
</dl>
</div>
</div>
<div class="map-row">
<div id="chat" aria-label="Chat log"></div>
<div id="map" role="region" aria-label="Nodes map"></div>
</div>
<table id="nodes">
<thead>
<tr>
<th><button type="button" class="sort-button" data-sort-key="node_id" data-sort-label="Node ID">Node ID <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="short_name" data-sort-label="Short Name">Short <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="long_name" data-sort-label="Long Name">Long Name <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="last_heard" data-sort-label="Last Seen">Last Seen <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="role" data-sort-label="Role">Role <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="hw_model" data-sort-label="Hardware Model">HW Model <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="battery_level" data-sort-label="Battery Level">Battery <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="voltage" data-sort-label="Voltage">Voltage <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="uptime_seconds" data-sort-label="Uptime">Uptime <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="channel_utilization" data-sort-label="Channel Utilization">Channel Util <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="air_util_tx" data-sort-label="Air Utilization (Tx)">Air Util Tx <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="latitude" data-sort-label="Latitude">Latitude <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="longitude" data-sort-label="Longitude">Longitude <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="altitude" data-sort-label="Altitude">Altitude <span class="sort-indicator" aria-hidden="true"></span></button></th>
<th><button type="button" class="sort-button" data-sort-key="position_time" data-sort-label="Last Position">Last Position <span class="sort-indicator" aria-hidden="true"></span></button></th>
</tr>
</thead>
<tbody></tbody>
</table>
<div id="shortInfoOverlay" class="short-info-overlay" role="dialog" hidden>
<button type="button" class="short-info-close" aria-label="Close node details">×</button>
<div class="short-info-content"></div>
</div>
<footer>
PotatoMesh GitHub: <a href="https://github.com/l5yth/potato-mesh" target="_blank">l5yth/potato-mesh</a>
<% if matrix_room && !matrix_room.empty? %>
— <%= site_name %> Matrix:
<a href="https://matrix.to/#/<%= matrix_room %>" target="_blank"><%= matrix_room %></a>
<% end %>
</footer>
<script>
const statusEl = document.getElementById('status');
const fitBoundsEl = document.getElementById('fitBounds');
const autoRefreshEl = document.getElementById('autoRefresh');
const refreshBtn = document.getElementById('refreshBtn');
const filterInput = document.getElementById('filterInput');
const themeToggle = document.getElementById('themeToggle');
const infoBtn = document.getElementById('infoBtn');
const infoOverlay = document.getElementById('infoOverlay');
const infoClose = document.getElementById('infoClose');
const infoDialog = infoOverlay ? infoOverlay.querySelector('.info-dialog') : null;
const shortInfoOverlay = document.getElementById('shortInfoOverlay');
const shortInfoClose = shortInfoOverlay ? shortInfoOverlay.querySelector('.short-info-close') : null;
const shortInfoContent = shortInfoOverlay ? shortInfoOverlay.querySelector('.short-info-content') : null;
const titleEl = document.querySelector('title');
const headerEl = document.querySelector('h1');
const headerTitleTextEl = headerEl ? headerEl.querySelector('.site-title-text') : null;
const chatEl = document.getElementById('chat');
const refreshInfo = document.getElementById('refreshInfo');
const baseTitle = document.title;
const nodesTable = document.getElementById('nodes');
const sortButtons = nodesTable ? Array.from(nodesTable.querySelectorAll('thead .sort-button[data-sort-key]')) : [];
const tableSorters = {
node_id: { getValue: n => n.node_id, compare: compareString, hasValue: hasStringValue, defaultDirection: 'asc' },
short_name: { getValue: n => n.short_name, compare: compareString, hasValue: hasStringValue, defaultDirection: 'asc' },
long_name: { getValue: n => n.long_name, compare: compareString, hasValue: hasStringValue, defaultDirection: 'asc' },
last_heard: { getValue: n => n.last_heard, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
role: { getValue: n => n.role, compare: compareString, hasValue: hasStringValue, defaultDirection: 'asc' },
hw_model: { getValue: n => n.hw_model, compare: compareString, hasValue: hasStringValue, defaultDirection: 'asc' },
battery_level: { getValue: n => n.battery_level, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
voltage: { getValue: n => n.voltage, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
uptime_seconds: { getValue: n => n.uptime_seconds, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
channel_utilization: { getValue: n => n.channel_utilization, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
air_util_tx: { getValue: n => n.air_util_tx, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
latitude: { getValue: n => n.latitude, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'asc' },
longitude: { getValue: n => n.longitude, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'asc' },
altitude: { getValue: n => n.altitude, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' },
position_time: { getValue: n => n.position_time, compare: compareNumber, hasValue: hasNumberValue, defaultDirection: 'desc' }
};
let sortState = {
key: 'last_heard',
direction: tableSorters.last_heard ? tableSorters.last_heard.defaultDirection : 'desc'
};
let allNodes = [];
let shortInfoAnchor = null;
const seenNodeIds = new Set();
const seenMessageIds = new Set();
let lastChatDate;
const NODE_LIMIT = 1000;
const CHAT_LIMIT = 1000;
const REFRESH_MS = <%= refresh_interval_seconds * 1000 %>;
refreshInfo.textContent = `<%= default_channel %> (<%= default_frequency %>) — active nodes: …`;
let refreshTimer = null;
function hasStringValue(value) {
if (value == null) return false;
return String(value).trim().length > 0;
}
function hasNumberValue(value) {
if (value == null || value === '') return false;
const num = typeof value === 'number' ? value : Number(value);
return Number.isFinite(num);
}
function compareString(a, b) {
const strA = (a == null ? '' : String(a)).trim();
const strB = (b == null ? '' : String(b)).trim();
const hasA = strA.length > 0;
const hasB = strB.length > 0;
if (!hasA && !hasB) return 0;
if (!hasA) return 1;
if (!hasB) return -1;
return strA.localeCompare(strB, undefined, { numeric: true, sensitivity: 'base' });
}
function compareNumber(a, b) {
const numA = typeof a === 'number' ? a : Number(a);
const numB = typeof b === 'number' ? b : Number(b);
const validA = Number.isFinite(numA);
const validB = Number.isFinite(numB);
if (validA && validB) {
if (numA === numB) return 0;
return numA < numB ? -1 : 1;
}
if (validA) return -1;
if (validB) return 1;
return 0;
}
function sortNodes(nodes) {
if (!Array.isArray(nodes)) return [];
const config = tableSorters[sortState.key];
if (!config) return nodes.slice();
const dir = sortState.direction === 'asc' ? 1 : -1;
const getter = config.getValue;
const hasValue = config.hasValue;
const compare = config.compare;
const arr = nodes.slice();
arr.sort((a, b) => {
const valueA = getter(a);
const valueB = getter(b);
const presentA = hasValue ? hasValue(valueA) : valueA != null && valueA !== '';
const presentB = hasValue ? hasValue(valueB) : valueB != null && valueB !== '';
if (!presentA && !presentB) return 0;
if (!presentA) return 1;
if (!presentB) return -1;
const result = compare(valueA, valueB);
return result * dir;
});
return arr;
}
function updateSortIndicators() {
if (!nodesTable || !sortButtons.length) return;
nodesTable.querySelectorAll('thead th').forEach(th => th.removeAttribute('aria-sort'));
sortButtons.forEach(button => {
const indicator = button.querySelector('.sort-indicator');
if (indicator) indicator.textContent = '';
button.removeAttribute('data-sort-active');
button.setAttribute('aria-pressed', 'false');
const label = button.dataset.sortLabel || button.textContent.trim();
button.setAttribute('aria-label', `Sort by ${label}`);
});
const activeButton = sortButtons.find(button => button.dataset.sortKey === sortState.key);
if (!activeButton) return;
const indicator = activeButton.querySelector('.sort-indicator');
if (indicator) indicator.textContent = sortState.direction === 'asc' ? '▲' : '▼';
const th = activeButton.closest('th');
if (th) {
th.setAttribute('aria-sort', sortState.direction === 'asc' ? 'ascending' : 'descending');
}
activeButton.setAttribute('data-sort-active', 'true');
activeButton.setAttribute('aria-pressed', 'true');
const label = activeButton.dataset.sortLabel || activeButton.textContent.trim();
const directionLabel = sortState.direction === 'asc' ? 'ascending' : 'descending';
const nextDirection = sortState.direction === 'asc' ? 'descending' : 'ascending';
activeButton.setAttribute('aria-label', `${label}, sorted ${directionLabel}. Activate to sort ${nextDirection}.`);
}
if (sortButtons.length) {
sortButtons.forEach(button => {
button.addEventListener('click', () => {
const key = button.dataset.sortKey;
if (!key) return;
if (sortState.key === key) {
sortState = { key, direction: sortState.direction === 'asc' ? 'desc' : 'asc' };
} else {
const config = tableSorters[key];
const dir = config && config.defaultDirection ? config.defaultDirection : 'asc';
sortState = { key, direction: dir };
}
applyFilter();
});
});
}
updateSortIndicators();
function restartAutoRefresh() {
if (refreshTimer) {
clearInterval(refreshTimer);
refreshTimer = null;
}
if (autoRefreshEl && autoRefreshEl.checked) {
refreshTimer = setInterval(refresh, REFRESH_MS);
}
}
const MAP_CENTER = L.latLng(<%= map_center_lat %>, <%= map_center_lon %>);
const MAX_NODE_DISTANCE_KM = <%= max_node_distance_km %>;
const roleColors = Object.freeze({
CLIENT: '#A8D5BA',
CLIENT_HIDDEN: '#B8DCA9',
CLIENT_MUTE: '#D2E3A2',
TRACKER: '#E8E6A1',
SENSOR: '#F4E3A3',
LOST_AND_FOUND: '#F9D4A6',
REPEATER: '#F7B7A3',
ROUTER_LATE: '#F29AA3',
ROUTER: '#E88B94'
});
// --- Map setup ---
const map = L.map('map', { worldCopyJump: true });
const lightTiles = L.tileLayer('https://tiles.stadiamaps.com/tiles/alidade_smooth/{z}/{x}/{y}.png', {
maxZoom: 18,
attribution: '&copy; OpenStreetMap contributors &amp; Stadia Maps'
});
const darkTiles = L.tileLayer('https://tiles.stadiamaps.com/tiles/alidade_smooth_dark/{z}/{x}/{y}.png', {
maxZoom: 18,
attribution: '&copy; OpenStreetMap contributors &amp; Stadia Maps'
});
let tiles = lightTiles.addTo(map);
// Default view until first data arrives
map.setView(MAP_CENTER, 10);
const markersLayer = L.layerGroup().addTo(map);
const legend = L.control({ position: 'bottomright' });
legend.onAdd = function () {
const div = L.DomUtil.create('div', 'legend');
for (const [role, color] of Object.entries(roleColors)) {
div.innerHTML += `<div><span style="background:${color}"></span>${role}</div>`;
}
return div;
};
legend.addTo(map);
themeToggle.addEventListener('click', () => {
const dark = document.body.classList.toggle('dark');
themeToggle.textContent = dark ? '☀️' : '🌙';
map.removeLayer(tiles);
tiles = dark ? darkTiles : lightTiles;
tiles.addTo(map);
});
let lastFocusBeforeInfo = null;
function openInfoOverlay() {
if (!infoOverlay || !infoDialog) return;
lastFocusBeforeInfo = document.activeElement;
infoOverlay.hidden = false;
document.body.style.setProperty('overflow', 'hidden');
infoDialog.focus();
}
function closeInfoOverlay() {
if (!infoOverlay || !infoDialog) return;
infoOverlay.hidden = true;
document.body.style.removeProperty('overflow');
const target = lastFocusBeforeInfo && typeof lastFocusBeforeInfo.focus === 'function' ? lastFocusBeforeInfo : infoBtn;
if (target && typeof target.focus === 'function') {
target.focus();
}
lastFocusBeforeInfo = null;
}
if (infoBtn && infoOverlay && infoClose) {
infoBtn.addEventListener('click', openInfoOverlay);
infoClose.addEventListener('click', closeInfoOverlay);
infoOverlay.addEventListener('click', event => {
if (event.target === infoOverlay) {
closeInfoOverlay();
}
});
document.addEventListener('keydown', event => {
if (event.key === 'Escape' && !infoOverlay.hidden) {
closeInfoOverlay();
}
});
}
if (shortInfoClose) {
shortInfoClose.addEventListener('click', event => {
event.preventDefault();
event.stopPropagation();
closeShortInfoOverlay();
});
}
document.addEventListener('click', event => {
const shortTarget = event.target.closest('.short-name');
if (shortTarget && shortTarget.dataset && shortTarget.dataset.nodeInfo) {
event.preventDefault();
event.stopPropagation();
let info = null;
try {
info = JSON.parse(shortTarget.dataset.nodeInfo);
} catch (err) {
console.warn('Failed to parse node info payload', err);
}
if (!info) return;
if (!info.shortName && shortTarget.textContent) {
info.shortName = shortTarget.textContent.replace(/\u00a0/g, ' ').trim();
}
if (!info.role) {
info.role = 'CLIENT';
}
if (shortInfoOverlay && !shortInfoOverlay.hidden && shortInfoAnchor === shortTarget) {
closeShortInfoOverlay();
} else {
openShortInfoOverlay(shortTarget, info);
}
return;
}
if (shortInfoOverlay && !shortInfoOverlay.hidden && !shortInfoOverlay.contains(event.target)) {
closeShortInfoOverlay();
}
});
document.addEventListener('keydown', event => {
if (event.key === 'Escape' && shortInfoOverlay && !shortInfoOverlay.hidden) {
closeShortInfoOverlay();
}
});
window.addEventListener('resize', () => {
if (shortInfoOverlay && !shortInfoOverlay.hidden) {
requestAnimationFrame(positionShortInfoOverlay);
}
});
// --- Helpers ---
function escapeHtml(str) {
return String(str)
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#39;');
}
function renderShortHtml(short, role, longName, nodeData = null){
const safeTitle = longName ? escapeHtml(String(longName)) : '';
const titleAttr = safeTitle ? ` title="${safeTitle}"` : '';
const resolvedRole = role || (nodeData && nodeData.role) || 'CLIENT';
let infoAttr = '';
if (nodeData && typeof nodeData === 'object') {
const info = {
nodeId: nodeData.node_id ?? nodeData.nodeId ?? '',
shortName: short != null ? String(short) : (nodeData.short_name ?? ''),
longName: nodeData.long_name ?? longName ?? '',
role: resolvedRole,
hwModel: nodeData.hw_model ?? nodeData.hwModel ?? '',
battery: nodeData.battery_level ?? nodeData.battery ?? null,
voltage: nodeData.voltage ?? null,
uptime: nodeData.uptime_seconds ?? nodeData.uptime ?? null,
channel: nodeData.channel_utilization ?? nodeData.channel ?? null,
airUtil: nodeData.air_util_tx ?? nodeData.airUtil ?? null,
};
infoAttr = ` data-node-info="${escapeHtml(JSON.stringify(info))}"`;
}
if (!short) {
return `<span class="short-name" style="background:#ccc"${titleAttr}${infoAttr}>?&nbsp;&nbsp;&nbsp;</span>`;
}
const padded = escapeHtml(String(short).padStart(4, ' ')).replace(/ /g, '&nbsp;');
const color = roleColors[resolvedRole] || roleColors.CLIENT;
return `<span class="short-name" style="background:${color}"${titleAttr}${infoAttr}>${padded}</span>`;
}
function formatShortInfoUptime(value) {
if (value == null || value === '') return '';
const num = Number(value);
if (!Number.isFinite(num)) return '';
return num === 0 ? '0s' : timeHum(num);
}
function shortInfoValueOrDash(value) {
return value != null && value !== '' ? String(value) : '—';
}
function closeShortInfoOverlay() {
if (!shortInfoOverlay) return;
shortInfoOverlay.hidden = true;
shortInfoOverlay.style.visibility = 'visible';
shortInfoAnchor = null;
}
function positionShortInfoOverlay() {
if (!shortInfoOverlay || shortInfoOverlay.hidden || !shortInfoAnchor) return;
if (!document.body.contains(shortInfoAnchor)) {
closeShortInfoOverlay();
return;
}
const rect = shortInfoAnchor.getBoundingClientRect();
const overlayRect = shortInfoOverlay.getBoundingClientRect();
const viewportWidth = document.documentElement.clientWidth;
const viewportHeight = document.documentElement.clientHeight;
let left = rect.left + window.scrollX;
let top = rect.top + window.scrollY;
const maxLeft = window.scrollX + viewportWidth - overlayRect.width - 8;
const maxTop = window.scrollY + viewportHeight - overlayRect.height - 8;
left = Math.max(window.scrollX + 8, Math.min(left, maxLeft));
top = Math.max(window.scrollY + 8, Math.min(top, maxTop));
shortInfoOverlay.style.left = `${left}px`;
shortInfoOverlay.style.top = `${top}px`;
shortInfoOverlay.style.visibility = 'visible';
}
function openShortInfoOverlay(target, info) {
if (!shortInfoOverlay || !shortInfoContent || !info) return;
const lines = [];
const longNameValue = shortInfoValueOrDash(info.longName ?? '');
lines.push(`<strong>${escapeHtml(longNameValue)}</strong>`);
const shortParts = [];
shortParts.push(renderShortHtml(info.shortName, info.role, info.longName));
const nodeIdValue = shortInfoValueOrDash(info.nodeId ?? '');
if (nodeIdValue !== '—') {
shortParts.push(`<span class="mono">${escapeHtml(nodeIdValue)}</span>`);
}
if (shortParts.length) {
lines.push(shortParts.join(' '));
}
lines.push(`Role: ${escapeHtml(shortInfoValueOrDash(info.role || 'CLIENT'))}`);
lines.push(`Model: ${escapeHtml(shortInfoValueOrDash(fmtHw(info.hwModel)))}`);
lines.push(`Battery: ${escapeHtml(shortInfoValueOrDash(fmtAlt(info.battery, '%')))}`);
lines.push(`Voltage: ${escapeHtml(shortInfoValueOrDash(fmtAlt(info.voltage, 'V')))}`);
lines.push(`Uptime: ${escapeHtml(shortInfoValueOrDash(formatShortInfoUptime(info.uptime)))}`);
lines.push(`Channel Util: ${escapeHtml(shortInfoValueOrDash(fmtTx(info.channel)))}`);
lines.push(`Air Util Tx: ${escapeHtml(shortInfoValueOrDash(fmtTx(info.airUtil)))}`);
shortInfoContent.innerHTML = lines.join('<br/>');
shortInfoAnchor = target;
shortInfoOverlay.hidden = false;
shortInfoOverlay.style.visibility = 'hidden';
requestAnimationFrame(positionShortInfoOverlay);
}
function appendChatEntry(div) {
chatEl.appendChild(div);
while (chatEl.childElementCount > CHAT_LIMIT) {
chatEl.removeChild(chatEl.firstChild);
}
chatEl.scrollTop = chatEl.scrollHeight;
}
function maybeAddDateDivider(ts) {
if (!ts) return;
const d = new Date(ts * 1000);
const key = `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}`;
if (lastChatDate !== key) {
lastChatDate = key;
const midnight = new Date(d);
midnight.setHours(0, 0, 0, 0);
const div = document.createElement('div');
div.className = 'chat-entry-date';
div.textContent = `-- ${formatDate(midnight)} --`;
appendChatEntry(div);
}
}
function addNewNodeChatEntry(n) {
maybeAddDateDivider(n.first_heard);
const div = document.createElement('div');
const ts = formatTime(new Date(n.first_heard * 1000));
div.className = 'chat-entry-node';
const short = renderShortHtml(n.short_name, n.role, n.long_name, n);
const longName = escapeHtml(n.long_name || '');
div.innerHTML = `[${ts}] ${short} <em>New node: ${longName}</em>`;
appendChatEntry(div);
}
function addNewMessageChatEntry(m) {
maybeAddDateDivider(m.rx_time);
const div = document.createElement('div');
const ts = formatTime(new Date(m.rx_time * 1000));
const short = renderShortHtml(m.node?.short_name, m.node?.role, m.node?.long_name, m.node);
const text = escapeHtml(m.text || '');
div.className = 'chat-entry-msg';
div.innerHTML = `[${ts}] ${short} ${text}`;
appendChatEntry(div);
}
function pad(n) { return String(n).padStart(2, "0"); }
function formatTime(d) {
return pad(d.getHours()) + ":" +
pad(d.getMinutes()) + ":" +
pad(d.getSeconds());
}
function formatDate(d) {
return d.getFullYear() + "-" +
pad(d.getMonth() + 1) + "-" +
pad(d.getDate());
}
function fmtHw(v) {
return v && v !== "UNSET" ? String(v) : "";
}
function fmtCoords(v, d = 5) {
if (v == null || v === '') return "";
const n = Number(v);
return Number.isFinite(n) ? n.toFixed(d) : "";
}
function fmtAlt(v, s) {
return (v == null || v === '') ? "" : `${v}${s}`;
}
function fmtTx(v, d = 3) {
if (v == null || v === '') return "";
const n = Number(v);
return Number.isFinite(n) ? `${n.toFixed(d)}%` : "";
}
function timeHum(unixSec) {
if (!unixSec) return "";
if (unixSec < 0) return "0s";
if (unixSec < 60) return `${unixSec}s`;
if (unixSec < 3600) return `${Math.floor(unixSec/60)}m ${Math.floor((unixSec%60))}s`;
if (unixSec < 86400) return `${Math.floor(unixSec/3600)}h ${Math.floor((unixSec%3600)/60)}m`;
return `${Math.floor(unixSec/86400)}d ${Math.floor((unixSec%86400)/3600)}h`;
}
function timeAgo(unixSec, nowSec = Date.now()/1000) {
if (!unixSec) return "";
const diff = Math.floor(nowSec - Number(unixSec));
if (diff < 0) return "0s";
if (diff < 60) return `${diff}s`;
if (diff < 3600) return `${Math.floor(diff/60)}m ${Math.floor((diff%60))}s`;
if (diff < 86400) return `${Math.floor(diff/3600)}h ${Math.floor((diff%3600)/60)}m`;
return `${Math.floor(diff/86400)}d ${Math.floor((diff%86400)/3600)}h`;
}
async function fetchNodes(limit = NODE_LIMIT) {
const r = await fetch(`/api/nodes?limit=${limit}`, { cache: 'no-store' });
if (!r.ok) throw new Error('HTTP ' + r.status);
return r.json();
}
async function fetchMessages(limit = NODE_LIMIT) {
const r = await fetch(`/api/messages?limit=${limit}`, { cache: 'no-store' });
if (!r.ok) throw new Error('HTTP ' + r.status);
return r.json();
}
function computeDistances(nodes) {
for (const n of nodes) {
const latRaw = n.latitude;
const lonRaw = n.longitude;
if (latRaw == null || latRaw === '' || lonRaw == null || lonRaw === '') {
n.distance_km = null;
continue;
}
const lat = Number(latRaw);
const lon = Number(lonRaw);
if (!Number.isFinite(lat) || !Number.isFinite(lon)) {
n.distance_km = null;
continue;
}
n.distance_km = L.latLng(lat, lon).distanceTo(MAP_CENTER) / 1000;
}
}
function renderTable(nodes, nowSec) {
const tb = document.querySelector('#nodes tbody');
const frag = document.createDocumentFragment();
for (const n of nodes) {
const tr = document.createElement('tr');
tr.innerHTML = `
<td class="mono">${n.node_id || ""}</td>
<td>${renderShortHtml(n.short_name, n.role, n.long_name, n)}</td>
<td>${n.long_name || ""}</td>
<td>${timeAgo(n.last_heard, nowSec)}</td>
<td>${n.role || "CLIENT"}</td>
<td>${fmtHw(n.hw_model)}</td>
<td>${fmtAlt(n.battery_level, "%")}</td>
<td>${fmtAlt(n.voltage, "V")}</td>
<td>${timeHum(n.uptime_seconds)}</td>
<td>${fmtTx(n.channel_utilization)}</td>
<td>${fmtTx(n.air_util_tx)}</td>
<td>${fmtCoords(n.latitude)}</td>
<td>${fmtCoords(n.longitude)}</td>
<td>${fmtAlt(n.altitude, "m")}</td>
<td class="mono">${n.pos_time_iso ? `${timeAgo(n.position_time, nowSec)}` : ""}</td>`;
frag.appendChild(tr);
}
tb.replaceChildren(frag);
if (shortInfoOverlay && shortInfoAnchor && !document.body.contains(shortInfoAnchor)) {
closeShortInfoOverlay();
}
}
function renderMap(nodes, nowSec) {
markersLayer.clearLayers();
const pts = [];
for (const n of nodes) {
const latRaw = n.latitude, lonRaw = n.longitude;
if (latRaw == null || latRaw === '' || lonRaw == null || lonRaw === '') continue;
const lat = Number(latRaw), lon = Number(lonRaw);
if (!Number.isFinite(lat) || !Number.isFinite(lon)) continue;
if (n.distance_km != null && n.distance_km > MAX_NODE_DISTANCE_KM) continue;
const color = roleColors[n.role] || '#3388ff';
const marker = L.circleMarker([lat, lon], {
radius: 9,
color: '#000',
weight: 1,
fillColor: color,
fillOpacity: 0.7,
opacity: 0.7
});
const lines = [
`<b>${n.long_name || ''}</b>`,
`${renderShortHtml(n.short_name, n.role, n.long_name, n)} <span class="mono">${n.node_id || ''}</span>`,
n.hw_model ? `Model: ${fmtHw(n.hw_model)}` : null,
`Role: ${n.role || 'CLIENT'}`,
(n.battery_level != null ? `Battery: ${fmtAlt(n.battery_level, "%")}, ${fmtAlt(n.voltage, "V")}` : null),
(n.last_heard ? `Last seen: ${timeAgo(n.last_heard, nowSec)}` : null),
(n.pos_time_iso ? `Last Position: ${timeAgo(n.position_time, nowSec)}` : null),
(n.uptime_seconds ? `Uptime: ${timeHum(n.uptime_seconds)}` : null),
].filter(Boolean);
marker.bindPopup(lines.join('<br/>'));
marker.addTo(markersLayer);
pts.push([lat, lon]);
}
if (pts.length && fitBoundsEl.checked) {
const b = L.latLngBounds(pts);
map.fitBounds(b.pad(0.2), { animate: false });
}
}
function applyFilter() {
const rawQuery = filterInput ? filterInput.value : '';
const q = rawQuery.trim().toLowerCase();
const filteredNodes = !q ? allNodes.slice() : allNodes.filter(n => {
return [n.node_id, n.short_name, n.long_name]
.filter(value => value != null && value !== '')
.some(value => String(value).toLowerCase().includes(q));
});
const sortedNodes = sortNodes(filteredNodes);
const nowSec = Date.now()/1000;
renderTable(sortedNodes, nowSec);
renderMap(sortedNodes, nowSec);
updateCount(sortedNodes, nowSec);
updateRefreshInfo(sortedNodes, nowSec);
updateSortIndicators();
}
if (filterInput) {
filterInput.addEventListener('input', applyFilter);
}
async function refresh() {
try {
statusEl.textContent = 'refreshing…';
const nodes = await fetchNodes();
computeDistances(nodes);
const newNodes = [];
for (const n of nodes) {
if (n.node_id && !seenNodeIds.has(n.node_id)) {
newNodes.push(n);
}
}
const messages = await fetchMessages();
const newMessages = [];
for (const m of messages) {
if (m.id && !seenMessageIds.has(m.id)) {
newMessages.push(m);
}
}
const entries = [];
for (const n of newNodes) entries.push({ type: 'node', ts: n.first_heard ?? 0, item: n });
for (const m of newMessages) entries.push({ type: 'msg', ts: m.rx_time ?? 0, item: m });
entries.sort((a, b) => {
if (a.ts !== b.ts) return a.ts - b.ts;
return a.type === 'node' && b.type === 'msg' ? -1 : a.type === 'msg' && b.type === 'node' ? 1 : 0;
});
for (const e of entries) {
if (e.type === 'node') {
addNewNodeChatEntry(e.item);
if (e.item.node_id) seenNodeIds.add(e.item.node_id);
} else {
addNewMessageChatEntry(e.item);
if (e.item.id) seenMessageIds.add(e.item.id);
}
}
allNodes = nodes;
applyFilter();
statusEl.textContent = 'updated ' + new Date().toLocaleTimeString();
} catch (e) {
statusEl.textContent = 'error: ' + e.message;
console.error(e);
}
}
refresh();
restartAutoRefresh();
refreshBtn.addEventListener('click', refresh);
if (autoRefreshEl) {
autoRefreshEl.addEventListener('change', () => {
restartAutoRefresh();
if (autoRefreshEl.checked) {
refresh();
}
});
}
function updateCount(nodes, nowSec) {
const dayAgoSec = nowSec - 86400;
const count = nodes.filter(n => n.last_heard && Number(n.last_heard) >= dayAgoSec).length;
const text = `${baseTitle} (${count})`;
titleEl.textContent = text;
if (headerTitleTextEl) {
headerTitleTextEl.textContent = text;
} else if (headerEl) {
headerEl.textContent = text;
}
}
function updateRefreshInfo(nodes, nowSec) {
const windows = [
{ label: 'hour', secs: 3600 },
{ label: 'day', secs: 86400 },
{ label: 'week', secs: 7 * 86400 },
];
const counts = windows.map(w => {
const c = nodes.filter(n => n.last_heard && nowSec - Number(n.last_heard) <= w.secs).length;
return `${c}/${w.label}`;
}).join(', ');
refreshInfo.textContent = `<%= default_channel %> (<%= default_frequency %>) — active nodes: ${counts}.`;
}
</script>
</body>
</html>