89 Commits

Author SHA1 Message Date
pablorevilla-meshtastic
20c2a3dc62 Update to stats page 2026-02-13 15:48:40 -08:00
pablorevilla-meshtastic
4a7fa1df08 fix error for sqlite 2026-02-13 14:20:12 -08:00
pablorevilla-meshtastic
685dbc9505 Added the ability to track gateways and present them in varous pages 2026-02-13 14:14:46 -08:00
pablorevilla-meshtastic
9aacceda28 fix location of edges now that the node is in a different spot 2026-02-12 13:29:51 -08:00
pablorevilla-meshtastic
a7051e7d26 More fixes for maps overlay 2026-02-12 12:01:51 -08:00
pablorevilla-meshtastic
7926e81562 documents updte 2026-02-12 11:34:19 -08:00
pablorevilla-meshtastic
2002e093af Added visivility on maps of nodes that overlap 2026-02-12 11:33:24 -08:00
pablorevilla-meshtastic
fc44f49f2d Work on updating protobufs and .gitignore 2026-02-11 17:30:53 -08:00
pablorevilla-meshtastic
89fbc6aeca changes 2026-02-11 07:00:21 -08:00
pablorevilla-meshtastic
20e3f9c104 Added Observed coverage to the node.html page 2026-02-10 15:33:59 -08:00
pablorevilla-meshtastic
17fa92d4cf modified the colors 2026-02-10 12:23:17 -08:00
pablorevilla-meshtastic
a48a3a4141 Updating anb fixing errors 2026-02-10 11:07:22 -08:00
pablorevilla-meshtastic
7d5b638eac changes to coverage page 2026-02-10 10:27:07 -08:00
pablorevilla-meshtastic
5f5fe0da90 Merge branch 'pr-128' into 3.0.5 2026-02-10 10:04:40 -08:00
Amy Nagle
dd98814b2c Normalize port number lists 2026-02-10 10:33:54 -05:00
pablorevilla-meshtastic
4dd999178c more details 2026-02-09 21:31:34 -08:00
pablorevilla-meshtastic
01dce2a5e0 Update to check for new requirements 2026-02-09 21:29:53 -08:00
pablorevilla-meshtastic
9622092c17 Add basic coverege support. 2026-02-09 21:17:22 -08:00
pablorevilla-meshtastic
29da1487d4 Merge branch 'pr-127' into 3.0.5 2026-02-09 07:48:33 -08:00
Mike Weaver
357fb530e2 Add database health check to include last import time
Retrieve the last import time from the database during health check.

This is useful to setup monitoring for meshview instances that are no longer receiving messages.
2026-02-08 03:07:46 -07:00
pablorevilla-meshtastic
b43683a259 Added First Seen time to node.html and /api/nodes 2026-02-06 11:41:15 -08:00
pablorevilla-meshtastic
59379649e2 Add version number 2026-02-06 11:23:07 -08:00
pablorevilla-meshtastic
a62bc350c0 fix: update store.py 2026-02-05 18:25:23 -08:00
pablorevilla-meshtastic
82ff4bb0df Merge PR 124: QR/import, impersonation detection, node public keys 2026-02-05 18:02:49 -08:00
pablorevilla-meshtastic
c454f2ef3a started work on new traceroute template 2026-02-05 06:53:49 -08:00
pablorevilla-meshtastic
b93f640233 load language dictionaries into memory to reduce call to file system 2026-01-31 10:42:47 -08:00
pablorevilla-meshtastic
018e16e9fa updates to language dictionaries 2026-01-26 10:40:20 -08:00
pablorevilla-meshtastic
41397072af Added unmaped packets detials to map.htnl 2026-01-26 09:15:01 -08:00
pablorevilla-meshtastic
43be448100 fix typo in container workflow file 2026-01-25 21:06:46 -08:00
pablorevilla-meshtastic
8c7f181002 Merge branch 'master' of github.com:pablorevilla-meshtastic/meshview 2026-01-25 20:47:03 -08:00
pablorevilla-meshtastic
5195868719 Made latest to follow master 2026-01-25 20:43:43 -08:00
Pablo Revilla
a473e32c59 Add initial server and configuration settings 2026-01-25 13:39:14 -08:00
pablorevilla-meshtastic
be51dc9c55 working on container 2026-01-25 13:34:26 -08:00
pablorevilla-meshtastic
bea6c8cd8e Fix how we show the actual channel on the packet list. 2026-01-24 23:31:57 -08:00
pablorevilla-meshtastic
351c35ef42 Fix issues with decryption of secondary keys 2026-01-24 21:38:14 -08:00
pablorevilla-meshtastic
7f722b6f12 Updated Readme.md 2026-01-24 11:20:34 -08:00
pablorevilla-meshtastic
52f1a1e788 updated the version number and date 2026-01-24 11:14:06 -08:00
pablorevilla-meshtastic
f44a78730a Added the ablility to skip packets with specific from_id and have secondary enccryption key for mqtt_reader. 2026-01-23 21:49:03 -08:00
pablorevilla-meshtastic
a9a5e046ea more container test 2026-01-23 13:02:34 -08:00
pablorevilla-meshtastic
37386f9e28 change to container.yml 2026-01-23 11:58:48 -08:00
pablorevilla-meshtastic
b66bfb1ee9 Fix error on container build and update README 2026-01-23 11:42:03 -08:00
pablorevilla-meshtastic
caf9cd1596 Updated list of sites runing meshview 2026-01-22 07:42:24 -08:00
pablorevilla-meshtastic
a4ebd2b23c work on net.html to limit packets to last 12 hours instead of 48 hours. 2026-01-21 20:11:17 -08:00
pablorevilla-meshtastic
5676ade6b7 fix api query so that weekly mesh works. 2026-01-21 17:19:19 -08:00
pablorevilla-meshtastic
319f8eac06 optimization 2026-01-20 14:48:33 -08:00
pablorevilla-meshtastic
d85132133a fix bug 2026-01-20 11:27:42 -08:00
pablorevilla-meshtastic
b6d8af409c fix bug on backwards compatibility 2026-01-20 10:10:39 -08:00
pablorevilla-meshtastic
896a0980d5 Update Scripts for PortgreSQL 2026-01-15 16:24:42 -08:00
pablorevilla-meshtastic
7d395e5e27 Correct documentation error 2026-01-15 14:42:18 -08:00
pablorevilla-meshtastic
c3cc01d7e7 Docuement Update 2026-01-15 14:30:04 -08:00
pablorevilla-meshtastic
ecbadc6087 configure "Wal" for sqlite 2026-01-15 14:10:49 -08:00
pablorevilla-meshtastic
ff30623bdf Documentation updte 2026-01-15 11:55:07 -08:00
pablorevilla-meshtastic
a43433ccb4 Update documentation 2026-01-15 11:51:03 -08:00
pablorevilla-meshtastic
4d9db2a52c Update instructions 2026-01-15 11:49:25 -08:00
pablorevilla-meshtastic
e30b59851f Update to 2026-01-15 11:39:24 -08:00
pablorevilla-meshtastic
36dd91be63 Merge branch 'db_updates' 2026-01-15 09:04:09 -08:00
pablorevilla-meshtastic
c9639d851b Fix Time function on store.py 2026-01-15 08:48:22 -08:00
Pablo Revilla
4516c84128 Modify cleanup.sh to use import_time_us for queries
Updated cleanup script to use import_time_us for deletions.
2026-01-14 22:11:52 -08:00
pablorevilla-meshtastic
fa98f56318 Made a cople of changes to the time handling and database config. 2026-01-12 20:10:19 -08:00
pablorevilla-meshtastic
f85e783e8c Adding code to work with multiple databases types. 2026-01-12 14:18:51 -08:00
Pablo Revilla
a882bc22dd Update README with version 3.0.2 details
Added notes about database changes for version 3.0.2.
2026-01-12 10:38:55 -08:00
pablorevilla-meshtastic
e12e3a2a41 Database change to remove import time columns 2026-01-09 13:30:14 -08:00
pablorevilla-meshtastic
da31794d8d Bump version to 3.0.2 and update release date to 2026-1-9 2026-01-09 11:49:58 -08:00
pablorevilla-meshtastic
9912f6b181 testing commit message functionality 2026-01-08 18:39:01 -08:00
pablorevilla-meshtastic
cb4cc281c6 fix speed of node list rendering 2026-01-08 17:38:56 -08:00
pablorevilla-meshtastic
571559114d Add node status indicator and improve favorites handling in nodelist 2026-01-08 17:38:12 -08:00
pablorevilla-meshtastic
df26df07f1 Changes to node.html. fix some of the data 2026-01-08 14:59:45 -08:00
pablorevilla-meshtastic
ffc7340bc9 Changes to nodelist.html. fix some of the data 2026-01-07 17:19:32 -08:00
pablorevilla-meshtastic
1d58aaba83 Changes to nodelist.html. fix some of the data 2026-01-07 13:35:58 -08:00
pablorevilla-meshtastic
b2bb9345fe Changes to nodelist.html. fix some of the data 2026-01-07 13:29:56 -08:00
pablorevilla-meshtastic
9686622b56 Changes to node.html. fix some of the data 2026-01-07 10:01:02 -08:00
pablorevilla-meshtastic
f7644a9573 Changes to node.html. fix some of the data 2026-01-07 09:48:26 -08:00
Pablo Revilla
e48e9464d7 Modify packet.html to add distance 2026-01-03 21:48:19 -08:00
Pablo Revilla
b72bc5d52b Modify packet.html to add distance 2026-01-03 21:44:26 -08:00
Pablo Revilla
1220f0bcbd Modify node.html to add statistics 2026-01-03 21:28:33 -08:00
Pablo Revilla
539410d5bb Modify node.html to add statistics 2026-01-03 21:26:39 -08:00
Pablo Revilla
383b576d18 Modify node.html to add statistics 2026-01-03 21:12:24 -08:00
Pablo Revilla
64a55a3ef3 Modify node.html to add statistics 2026-01-03 20:51:17 -08:00
Pablo Revilla
9408201e57 Modify node.html to add statistics 2026-01-03 19:27:00 -08:00
Pablo Revilla
f75d6bf749 Modify node.html to add statistics 2026-01-03 19:00:39 -08:00
Pablo Revilla
924d223866 Modify node.html to add statistics 2026-01-03 18:13:57 -08:00
Pablo Revilla
e9dcca1f19 Modify node.html to add statistics 2025-12-31 11:58:45 -08:00
Pablo Revilla
00cc2abd23 Modify node.html to add statistics 2025-12-31 11:56:18 -08:00
Pablo Revilla
b76477167d Modify top.html to add paging 2025-12-31 11:13:52 -08:00
Pablo Revilla
b41b249a6d Modify top.html to add paging 2025-12-31 10:38:13 -08:00
Pablo Revilla
71fcda2dd6 Modify top.html to add paging 2025-12-30 09:27:51 -08:00
Pablo Revilla
c4453fbb31 Modify packet.html to sort by hop count. 2025-12-24 10:54:09 -08:00
Pablo Revilla
79fa3f66a8 Fix chart on node.html. 2025-12-24 10:06:17 -08:00
Pablo Revilla
0ce64ac975 Fix chart on node.html. 2025-12-10 09:56:30 -08:00
46 changed files with 3960 additions and 1064 deletions

View File

@@ -2,6 +2,7 @@ name: Build container
on:
push:
workflow_dispatch:
jobs:
docker:
@@ -23,7 +24,8 @@ jobs:
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=match,pattern=v\d.\d.\d,value=latest
# publish :latest from the default branch
type=raw,value=latest,enable={{is_default_branch}}
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
@@ -49,4 +51,4 @@ jobs:
platforms: linux/amd64,linux/arm64
# optional cache (speeds up rebuilds)
cache-from: type=gha
cache-to: type=gha,mode=max
cache-to: type=gha,mode=max

2
.gitignore vendored
View File

@@ -2,7 +2,6 @@ env/*
__pycache__/*
meshview/__pycache__/*
alembic/__pycache__/*
meshtastic/protobuf/*
# Database files
packets.db
@@ -45,3 +44,4 @@ __pycache__/
# OS
.DS_Store
Thumbs.db
packets.db-journal

View File

@@ -35,7 +35,7 @@ RUN uv pip install --no-cache-dir --upgrade pip \
COPY --chown=${APP_USER}:${APP_USER} . .
# Patch config
RUN patch sample.config.ini < container/config.patch
COPY --chown=${APP_USER}:${APP_USER} container/config.ini /app/sample.config.ini
# Clean
RUN rm -rf /app/.git* && \
@@ -77,4 +77,3 @@ CMD ["--pid_dir", "/tmp", "--py_exec", "/opt/venv/bin/python", "--config", "/etc
EXPOSE 8081
VOLUME [ "/etc/meshview", "/var/lib/meshview", "/var/log/meshview" ]

View File

@@ -128,7 +128,11 @@ username =
password =
[database]
connection_string = sqlite+aiosqlite:///var/lib/meshview/packets.db
# SQLAlchemy async connection string.
# Examples:
# sqlite+aiosqlite:///var/lib/meshview/packets.db
# postgresql+asyncpg://user:pass@host:5432/meshview
connection_string = sqlite+aiosqlite:////var/lib/meshview/packets.db
```
### Database Backups

162
README.md
View File

@@ -4,6 +4,16 @@
The project serves as a real-time monitoring and diagnostic tool for the Meshtastic mesh network. It provides detailed insights into network activity, including message traffic, node positions, and telemetry data.
### Version 3.0.4 — Late January 2026
- Database: multiDB support, PostgreSQL scripts, WAL config for SQLite, cleanup query timing fixes, removal of import time columns, and various timehandling fixes.
- UI/UX: extensive updates to node.html, nodelist.html, top.html, and packet.html (paging, stats, distance, status/favorites), plus net view changes to 12hour window.
- API/logic: weekly mesh query fix, node list performance improvement, backwardscompatibility and other bug fixes.
- MQTT reader: configurable skipnode list and secondary decryption keys.
- Docs/ops: multiple documentation updates, updated site list, container workflow fixes/tests, README updates.
### Version 3.0.2 — January 2026
- Changes to the Database to will make it so that there is a need for space when updating to the latest. SQlite requires to rebuild the database when droping a column. ( we are droping some of the old columns) so make sure you have 1.2x the size of the db of space in your environment. Depending on how big your db is it would take a long time.
### Version 3.0.1 — December 2025
#### 🌐 Multi-Language Support (i18n)
@@ -80,24 +90,32 @@ See [README-Docker.md](README-Docker.md) for container deployment and [docs/](do
Samples of currently running instances:
- https://meshview.bayme.sh (SF Bay Area)
- https://www.svme.sh (Sacramento Valley)
- https://meshview.nyme.sh (New York)
- https://meshview.socalmesh.org (LA Area)
- https://map.wpamesh.net (Western Pennsylvania)
- https://meshview.chicagolandmesh.org (Chicago)
- https://meshview.mt.gt (Canadaverse)
- https://meshview.bayme.sh (SF Bay Area - USA)
- https://www.svme.sh (Sacramento Valley - USA)
- https://meshview.nyme.sh (New York - USA)
- https://meshview.socalmesh.org (Los Angenles - USA)
- https://map.wpamesh.net (Western Pennsylvania - USA)
- https://meshview.chicagolandmesh.org (Chicago - USA)
- https://meshview.freq51.net/ (Salt Lake City - USA)
- https://meshview.mt.gt (Canada)
- https://canadaverse.org (Canada)
- https://meshview.meshtastic.es (Spain)
- https://view.mtnme.sh (North Georgia / East Tennessee)
- https://view.mtnme.sh (North Georgia / East Tennessee - USA)
- https://meshview.lsinfra.de (Hessen - Germany)
- https://map.nswmesh.au (Sydney - Australia)
- https://meshview.pvmesh.org (Pioneer Valley, Massachusetts)
- https://meshview.louisianamesh.org (Louisiana)
- https://meshview.meshcolombia.co/ (Colombia)
- https://meshview-salzburg.jmt.gr/ (Salzburg / Austria)
- https://meshview.pvmesh.org (Pioneer Valley, Massachusetts - USA)
- https://meshview.louisianamesh.org (Louisiana - USA)
- https://www.swlamesh.com (Southwest Louisiana- USA)
- https://meshview.meshcolombia.co (Colombia)
- https://meshview-salzburg.jmt.gr (Salzburg / Austria)
- https://map.cromesh.eu (Coatia)
- https://view.meshdresden.eu (Dresden / Germany)
- https://meshview.meshoregon.com (Oregon - USA)
- https://meshview.gamesh.net (Georgia - USA)
---
### Updating from 2.x to 3.x
We are adding the use of Alembic. If using GitHub
Update your codebase by running the pull command
@@ -271,7 +289,10 @@ password = large4cats
# Database Configuration
# -------------------------
[database]
# SQLAlchemy connection string. This one uses SQLite with asyncio support.
# SQLAlchemy async connection string.
# Examples:
# sqlite+aiosqlite:///packets.db
# postgresql+asyncpg://user:pass@host:5432/meshview
connection_string = sqlite+aiosqlite:///packets.db
@@ -305,6 +326,20 @@ db_cleanup_logfile = dbcleanup.log
---
## NOTE (PostgreSQL setup)**
If you want to use PostgreSQL instead of SQLite:
Install PostgreSQL for your OS.
Create a user and database:
```
`CREATE USER meshview WITH PASSWORD 'change_me';`
`CREATE DATABASE meshview OWNER meshview;`
```
Update `config.ini` example:
```
`connection_string = postgresql+asyncpg://meshview:change_me@localhost:5432/meshview`
```
## Running Meshview
Start the database manager:
@@ -474,16 +509,15 @@ db_cleanup_logfile = dbcleanup.log
```
Once changes are done you need to restart the script for changes to load.
### Alternatively we can do it via your OS
### Alternatively we can do it via your OS (This example is Ubuntu like OS)
- Create and save bash script below. (Modify /path/to/file/ to the correct path)
- Name it cleanup.sh
- Make it executable.
```bash
#!/bin/bash
#!/bin/bash
DB_FILE="/path/to/file/packets.db"
# Stop DB service
sudo systemctl stop meshview-db.service
sudo systemctl stop meshview-web.service
@@ -492,10 +526,22 @@ sleep 5
echo "Run cleanup..."
# Run cleanup queries
sqlite3 "$DB_FILE" <<EOF
DELETE FROM packet WHERE import_time < datetime('now', '-14 day');
DELETE FROM packet_seen WHERE import_time < datetime('now', '-14 day');
DELETE FROM traceroute WHERE import_time < datetime('now', '-14 day');
DELETE FROM node WHERE last_update < datetime('now', '-14 day') OR last_update IS NULL OR last_update = '';
DELETE FROM packet
WHERE import_time_us IS NOT NULL
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'packet deleted: ' || changes();
DELETE FROM packet_seen
WHERE import_time_us IS NOT NULL
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'packet_seen deleted: ' || changes();
DELETE FROM traceroute
WHERE import_time_us IS NOT NULL
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'traceroute deleted: ' || changes();
DELETE FROM node
WHERE last_seen_us IS NULL
OR last_seen_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'node deleted: ' || changes();
VACUUM;
EOF
@@ -505,6 +551,80 @@ sudo systemctl start meshview-web.service
echo "Database cleanup completed on $(date)"
```
- If you are using PostgreSQL, use this version instead (adjust credentials/DB name):
```bash
#!/bin/bash
set -euo pipefail
DB="postgresql://meshview@localhost:5432/meshview"
RETENTION_DAYS=14
BATCH_SIZE=100
PSQL="/usr/bin/psql"
echo "[$(date)] Starting batched cleanup..."
while true; do
DELETED=$(
$PSQL "$DB" -At -v ON_ERROR_STOP=1 <<EOF
WITH cutoff AS (
SELECT (EXTRACT(EPOCH FROM (NOW() - INTERVAL '${RETENTION_DAYS} days')) * 1000000)::bigint AS ts
),
old_packets AS (
SELECT id
FROM packet, cutoff
WHERE import_time_us IS NOT NULL
AND import_time_us < cutoff.ts
ORDER BY id
LIMIT ${BATCH_SIZE}
),
ps_del AS (
DELETE FROM packet_seen
WHERE packet_id IN (SELECT id FROM old_packets)
RETURNING 1
),
tr_del AS (
DELETE FROM traceroute
WHERE packet_id IN (SELECT id FROM old_packets)
RETURNING 1
),
p_del AS (
DELETE FROM packet
WHERE id IN (SELECT id FROM old_packets)
RETURNING 1
)
SELECT COUNT(*) FROM p_del;
EOF
)
if [[ "$DELETED" -eq 0 ]]; then
break
fi
sleep 0.1
done
echo "[$(date)] Packet cleanup complete"
echo "[$(date)] Cleaning old nodes..."
$PSQL "$DB" -v ON_ERROR_STOP=1 <<EOF
DELETE FROM node
WHERE last_seen_us IS NOT NULL
AND last_seen_us < (
EXTRACT(EPOCH FROM (NOW() - INTERVAL '${RETENTION_DAYS} days')) * 1000000
);
EOF
echo "[$(date)] Node cleanup complete"
$PSQL "$DB" -c "VACUUM (ANALYZE) packet_seen;"
$PSQL "$DB" -c "VACUUM (ANALYZE) traceroute;"
$PSQL "$DB" -c "VACUUM (ANALYZE) packet;"
$PSQL "$DB" -c "VACUUM (ANALYZE) node;"
echo "[$(date)] Cleanup finished"
```
- Schedule running the script on a regular basis.
- In this example it runs every night at 2:00am.

View File

@@ -0,0 +1,27 @@
"""Add is_mqtt_gateway to node
Revision ID: 23dad03d2e42
Revises: a0c9c13e118f
Create Date: 2026-02-13 00:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "23dad03d2e42"
down_revision: str | None = "a0c9c13e118f"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
op.add_column("node", sa.Column("is_mqtt_gateway", sa.Boolean(), nullable=True))
def downgrade() -> None:
op.drop_column("node", "is_mqtt_gateway")

View File

@@ -0,0 +1,65 @@
"""Drop import_time columns.
Revision ID: 9f3b1a8d2c4f
Revises: 2b5a61bb2b75
Create Date: 2026-01-09 09:55:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "9f3b1a8d2c4f"
down_revision: str | None = "2b5a61bb2b75"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
conn = op.get_bind()
inspector = sa.inspect(conn)
packet_indexes = {idx["name"] for idx in inspector.get_indexes("packet")}
packet_columns = {col["name"] for col in inspector.get_columns("packet")}
with op.batch_alter_table("packet", schema=None) as batch_op:
if "idx_packet_import_time" in packet_indexes:
batch_op.drop_index("idx_packet_import_time")
if "idx_packet_from_node_time" in packet_indexes:
batch_op.drop_index("idx_packet_from_node_time")
if "import_time" in packet_columns:
batch_op.drop_column("import_time")
packet_seen_columns = {col["name"] for col in inspector.get_columns("packet_seen")}
with op.batch_alter_table("packet_seen", schema=None) as batch_op:
if "import_time" in packet_seen_columns:
batch_op.drop_column("import_time")
traceroute_indexes = {idx["name"] for idx in inspector.get_indexes("traceroute")}
traceroute_columns = {col["name"] for col in inspector.get_columns("traceroute")}
with op.batch_alter_table("traceroute", schema=None) as batch_op:
if "idx_traceroute_import_time" in traceroute_indexes:
batch_op.drop_index("idx_traceroute_import_time")
if "import_time" in traceroute_columns:
batch_op.drop_column("import_time")
def downgrade() -> None:
with op.batch_alter_table("traceroute", schema=None) as batch_op:
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
batch_op.create_index("idx_traceroute_import_time", ["import_time"], unique=False)
with op.batch_alter_table("packet_seen", schema=None) as batch_op:
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
with op.batch_alter_table("packet", schema=None) as batch_op:
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
batch_op.create_index("idx_packet_import_time", [sa.text("import_time DESC")], unique=False)
batch_op.create_index(
"idx_packet_from_node_time",
["from_node_id", sa.text("import_time DESC")],
unique=False,
)

View File

@@ -0,0 +1,43 @@
"""Add node_public_key table
Revision ID: a0c9c13e118f
Revises: d4d7b0c2e1a4
Create Date: 2026-02-06 00:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "a0c9c13e118f"
down_revision: str | None = "d4d7b0c2e1a4"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
op.create_table(
"node_public_key",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("node_id", sa.BigInteger(), nullable=False),
sa.Column("public_key", sa.String(), nullable=False),
sa.Column("first_seen_us", sa.BigInteger(), nullable=True),
sa.Column("last_seen_us", sa.BigInteger(), nullable=True),
)
op.create_index("idx_node_public_key_node_id", "node_public_key", ["node_id"], unique=False)
op.create_index(
"idx_node_public_key_public_key",
"node_public_key",
["public_key"],
unique=False,
)
def downgrade() -> None:
op.drop_index("idx_node_public_key_public_key", table_name="node_public_key")
op.drop_index("idx_node_public_key_node_id", table_name="node_public_key")
op.drop_table("node_public_key")

View File

@@ -0,0 +1,94 @@
"""Add last_update_us to node and migrate data.
Revision ID: b7c3c2e3a1f0
Revises: 9f3b1a8d2c4f
Create Date: 2026-01-12 10:12:00.000000
"""
from collections.abc import Sequence
from datetime import UTC, datetime
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b7c3c2e3a1f0"
down_revision: str | None = "9f3b1a8d2c4f"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def _parse_datetime(value):
if value is None:
return None
if isinstance(value, datetime):
dt = value
elif isinstance(value, str):
text = value.replace("Z", "+00:00")
try:
dt = datetime.fromisoformat(text)
except ValueError:
return None
else:
return None
if dt.tzinfo is None:
return dt.replace(tzinfo=UTC)
return dt.astimezone(UTC)
def upgrade() -> None:
conn = op.get_bind()
op.add_column("node", sa.Column("last_update_us", sa.BigInteger(), nullable=True))
op.create_index("idx_node_last_update_us", "node", ["last_update_us"], unique=False)
node = sa.table(
"node",
sa.column("id", sa.String()),
sa.column("last_update", sa.DateTime()),
sa.column("last_update_us", sa.BigInteger()),
)
rows = conn.execute(sa.select(node.c.id, node.c.last_update)).all()
for node_id, last_update in rows:
dt = _parse_datetime(last_update)
if dt is None:
continue
last_update_us = int(dt.timestamp() * 1_000_000)
conn.execute(
sa.update(node).where(node.c.id == node_id).values(last_update_us=last_update_us)
)
if conn.dialect.name == "sqlite":
with op.batch_alter_table("node", schema=None) as batch_op:
batch_op.drop_column("last_update")
else:
op.drop_column("node", "last_update")
def downgrade() -> None:
conn = op.get_bind()
op.add_column("node", sa.Column("last_update", sa.DateTime(), nullable=True))
node = sa.table(
"node",
sa.column("id", sa.String()),
sa.column("last_update", sa.DateTime()),
sa.column("last_update_us", sa.BigInteger()),
)
rows = conn.execute(sa.select(node.c.id, node.c.last_update_us)).all()
for node_id, last_update_us in rows:
if last_update_us is None:
continue
dt = datetime.fromtimestamp(last_update_us / 1_000_000, tz=UTC).replace(tzinfo=None)
conn.execute(sa.update(node).where(node.c.id == node_id).values(last_update=dt))
if conn.dialect.name == "sqlite":
with op.batch_alter_table("node", schema=None) as batch_op:
batch_op.drop_index("idx_node_last_update_us")
batch_op.drop_column("last_update_us")
else:
op.drop_index("idx_node_last_update_us", table_name="node")
op.drop_column("node", "last_update_us")

View File

@@ -0,0 +1,34 @@
"""Drop last_update_us from node.
Revision ID: d4d7b0c2e1a4
Revises: b7c3c2e3a1f0
Create Date: 2026-01-12 10:20:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "d4d7b0c2e1a4"
down_revision: str | None = "b7c3c2e3a1f0"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
conn = op.get_bind()
if conn.dialect.name == "sqlite":
with op.batch_alter_table("node", schema=None) as batch_op:
batch_op.drop_index("idx_node_last_update_us")
batch_op.drop_column("last_update_us")
else:
op.drop_index("idx_node_last_update_us", table_name="node")
op.drop_column("node", "last_update_us")
def downgrade() -> None:
op.add_column("node", sa.Column("last_update_us", sa.BigInteger(), nullable=True))
op.create_index("idx_node_last_update_us", "node", ["last_update_us"], unique=False)

90
container/config.ini Normal file
View File

@@ -0,0 +1,90 @@
# -------------------------
# Server Configuration
# -------------------------
[server]
# The address to bind the server to. Use * to listen on all interfaces.
bind = 0.0.0.0
# Port to run the web server on.
port = 8081
# Path to TLS certificate (leave blank to disable HTTPS).
tls_cert =
# Path for the ACME challenge if using Let's Encrypt.
acme_challenge =
# -------------------------
# Site Appearance & Behavior
# -------------------------
[site]
domain =
language = en
title = Bay Area Mesh
message = Real time data from around the bay area and beyond.
starting = /chat
nodes = True
conversations = True
everything = True
graphs = True
stats = True
net = True
map = True
top = True
map_top_left_lat = 39
map_top_left_lon = -123
map_bottom_right_lat = 36
map_bottom_right_lon = -121
map_interval = 3
firehose_interal = 3
weekly_net_message = Weekly Mesh check-in. We will keep it open on every Wednesday from 5:00pm for checkins. The message format should be (LONG NAME) - (CITY YOU ARE IN) #BayMeshNet.
net_tag = #BayMeshNet
# -------------------------
# MQTT Broker Configuration
# -------------------------
[mqtt]
server = mqtt.meshtastic.org
topics = ["msh/US/bayarea/#", "msh/US/CA/mrymesh/#", "msh/US/CA/sacvalley"]
port = 1883
username = meshdev
password = large4cats
skip_node_ids =
secondary_keys =
# -------------------------
# Database Configuration
# -------------------------
[database]
connection_string = sqlite+aiosqlite:////var/lib/meshview/packets.db
# -------------------------
# Database Cleanup Configuration
# -------------------------
[cleanup]
enabled = False
days_to_keep = 14
hour = 2
minute = 00
vacuum = False
backup_enabled = False
backup_dir = ./backups
backup_hour = 2
backup_minute = 00
# -------------------------
# Logging Configuration
# -------------------------
[logging]
access_log = False
db_cleanup_logfile = /var/log/meshview/dbcleanup.log

View File

@@ -1,36 +0,0 @@
# MeshView Docker Container
> **Note:** This directory contains legacy Docker build files.
>
> **For current Docker usage instructions, please see [README-Docker.md](../README-Docker.md) in the project root.**
## Current Approach
Pre-built container images are automatically built and published to GitHub Container Registry:
```bash
docker pull ghcr.io/pablorevilla-meshtastic/meshview:latest
```
See **[README-Docker.md](../README-Docker.md)** for:
- Quick start instructions
- Volume mount configuration
- Docker Compose examples
- Backup configuration
- Troubleshooting
## Legacy Build (Not Recommended)
If you need to build your own image for development:
```bash
# From project root
docker build -f Containerfile -t meshview:local .
```
The current Containerfile uses:
- **Base Image**: `python:3.13-slim` (Debian-based)
- **Build tool**: `uv` for fast dependency installation
- **User**: Non-root user `app` (UID 10001)
- **Exposed Port**: `8081`
- **Volumes**: `/etc/meshview`, `/var/lib/meshview`, `/var/log/meshview`

View File

@@ -1,82 +1,38 @@
# API Documentation
## 1. Chat API
Base URL: `http(s)://<host>`
### GET `/api/chat`
Returns the most recent chat messages.
All endpoints return JSON. Timestamps are either ISO 8601 strings or `*_us` values in
microseconds since epoch.
**Query Parameters**
- `limit` (optional, int): Maximum number of messages to return. Default: `100`.
**Response Example**
```json
{
"packets": [
{
"id": 123,
"import_time": "2025-07-22T12:45:00",
"from_node_id": 987654,
"from_node": "Alice",
"channel": "main",
"payload": "Hello, world!"
}
]
}
```
---
### GET `/api/chat/updates`
Returns chat messages imported after a given timestamp.
**Query Parameters**
- `last_time` (optional, ISO timestamp): Only messages imported after this time are returned.
**Response Example**
```json
{
"packets": [
{
"id": 124,
"import_time": "2025-07-22T12:50:00",
"from_node_id": 987654,
"from_node": "Alice",
"channel": "main",
"payload": "New message!"
}
],
"latest_import_time": "2025-07-22T12:50:00"
}
```
---
## 2. Nodes API
## 1. Nodes API
### GET `/api/nodes`
Returns a list of all nodes, with optional filtering by last seen.
Returns a list of nodes, with optional filtering.
**Query Parameters**
- `hours` (optional, int): Return nodes seen in the last N hours.
- `days` (optional, int): Return nodes seen in the last N days.
- `last_seen_after` (optional, ISO timestamp): Return nodes seen after this time.
Query Parameters
- `node_id` (optional, int): Exact node ID.
- `role` (optional, string): Node role.
- `channel` (optional, string): Channel name.
- `hw_model` (optional, string): Hardware model.
- `days_active` (optional, int): Nodes seen within the last N days.
**Response Example**
Response Example
```json
{
"nodes": [
{
"id": 42,
"node_id": 1234,
"long_name": "Alice",
"short_name": "A",
"channel": "main",
"last_seen": "2025-07-22T12:40:00",
"hardware": "T-Beam",
"hw_model": "T-Beam",
"firmware": "1.2.3",
"role": "client",
"last_lat": 37.7749,
"last_long": -122.4194
"last_lat": 377749000,
"last_long": -1224194000,
"channel": "main",
"last_seen_us": 1736370123456789
}
]
}
@@ -84,45 +40,58 @@ Returns a list of all nodes, with optional filtering by last seen.
---
## 3. Packets API
## 2. Packets API
### GET `/api/packets`
Returns a list of packets with optional filters.
Returns packets with optional filters.
**Query Parameters**
- `limit` (optional, int): Maximum number of packets to return. Default: `200`.
- `since` (optional, ISO timestamp): Only packets imported after this timestamp are returned.
Query Parameters
- `packet_id` (optional, int): Return exactly one packet (overrides other filters).
- `limit` (optional, int): Max packets to return, clamped 1-1000. Default: `50`.
- `since` (optional, int): Only packets imported after this microsecond timestamp.
- `portnum` (optional, int): Filter by port number.
- `contains` (optional, string): Payload substring filter.
- `from_node_id` (optional, int): Filter by sender node ID.
- `to_node_id` (optional, int): Filter by recipient node ID.
- `node_id` (optional, int): Legacy filter matching either from or to node ID.
**Response Example**
Response Example
```json
{
"packets": [
{
"id": 123,
"import_time_us": 1736370123456789,
"channel": "main",
"from_node_id": 5678,
"to_node_id": 91011,
"portnum": 1,
"import_time": "2025-07-22T12:45:00",
"payload": "Hello, Bob!"
"long_name": "Alice",
"payload": "Hello, Bob!",
"to_long_name": "Bob",
"reply_id": 122
}
]
],
"latest_import_time": 1736370123456789
}
```
---
Notes
- For `portnum=1` (text messages), packets are filtered to remove sequence-only payloads.
- `latest_import_time` is returned when available for incremental polling (microseconds).
---
## 4. Channels API
## 3. Channels API
### GET `/api/channels`
Returns a list of channels seen in a given time period.
Returns channels seen in a time period.
**Query Parameters**
- `period_type` (optional, string): Time granularity (`hour` or `day`). Default: `hour`.
Query Parameters
- `period_type` (optional, string): `hour` or `day`. Default: `hour`.
- `length` (optional, int): Number of periods to look back. Default: `24`.
**Response Example**
Response Example
```json
{
"channels": ["LongFast", "MediumFast", "ShortFast"]
@@ -131,29 +100,21 @@ Returns a list of channels seen in a given time period.
---
## 5. Statistics API
## 4. Stats API
### GET `/api/stats`
Returns packet statistics aggregated by time periods, with optional filtering.
Retrieve packet statistics aggregated by time periods, with optional filtering.
---
## Query Parameters
| Parameter | Type | Required | Default | Description |
|--------------|---------|----------|----------|-------------------------------------------------------------------------------------------------|
| `period_type` | string | No | `hour` | Time granularity of the stats. Allowed values: `hour`, `day`. |
| `length` | integer | No | 24 | Number of periods to include (hours or days). |
| `channel` | string | No | — | Filter results by channel name (case-insensitive). |
| `portnum` | integer | No | — | Filter results by port number. |
| `to_node` | integer | No | — | Filter results to packets sent **to** this node ID. |
| `from_node` | integer | No | — | Filter results to packets sent **from** this node ID. |
---
## Response
Query Parameters
- `period_type` (optional, string): `hour` or `day`. Default: `hour`.
- `length` (optional, int): Number of periods to include. Default: `24`.
- `channel` (optional, string): Filter by channel (case-insensitive).
- `portnum` (optional, int): Filter by port number.
- `to_node` (optional, int): Filter by destination node ID.
- `from_node` (optional, int): Filter by source node ID.
- `node` (optional, int): If provided, return combined `sent` and `seen` totals for that node.
Response Example (series)
```json
{
"period_type": "hour",
@@ -163,65 +124,117 @@ Retrieve packet statistics aggregated by time periods, with optional filtering.
"to_node": 12345678,
"from_node": 87654321,
"data": [
{ "period": "2025-08-08 14:00", "count": 10 },
{ "period": "2025-08-08 15:00", "count": 7 }
]
}
```
Response Example (`node` totals)
```json
{
"node_id": 12345678,
"period_type": "hour",
"length": 24,
"sent": 42,
"seen": 58
}
```
---
### GET `/api/stats/count`
Returns total packet counts, optionally filtered.
Query Parameters
- `packet_id` (optional, int): Filter packet_seen by packet ID.
- `period_type` (optional, string): `hour` or `day`.
- `length` (optional, int): Number of periods to include.
- `channel` (optional, string): Filter by channel.
- `from_node` (optional, int): Filter by source node ID.
- `to_node` (optional, int): Filter by destination node ID.
Response Example
```json
{
"total_packets": 12345,
"total_seen": 67890
}
```
---
### GET `/api/stats/top`
Returns nodes sorted by packets seen, with pagination.
Query Parameters
- `period_type` (optional, string): `hour` or `day`. Default: `day`.
- `length` (optional, int): Number of periods to include. Default: `1`.
- `channel` (optional, string): Filter by channel.
- `limit` (optional, int): Max nodes to return. Default: `20`, max `100`.
- `offset` (optional, int): Pagination offset. Default: `0`.
Response Example
```json
{
"total": 250,
"limit": 20,
"offset": 0,
"nodes": [
{
"period": "2025-08-08 14:00",
"count": 10
},
{
"period": "2025-08-08 15:00",
"count": 7
"node_id": 1234,
"long_name": "Alice",
"short_name": "A",
"channel": "main",
"sent": 100,
"seen": 240,
"avg": 2.4
}
// more entries...
]
}
```
---
## 6. Edges API
## 5. Edges API
### GET `/api/edges`
Returns network edges (connections between nodes) based on traceroutes and neighbor info.
Traceroute edges are collected over the last 12 hours. Neighbor edges are based on
port 71 packets.
**Query Parameters**
- `type` (optional, string): Filter by edge type (`traceroute` or `neighbor`). If omitted, returns both types.
Query Parameters
- `type` (optional, string): `traceroute` or `neighbor`. If omitted, returns both.
- `node_id` (optional, int): Filter edges to only those touching a node.
**Response Example**
Response Example
```json
{
"edges": [
{
"from": 12345678,
"to": 87654321,
"type": "traceroute"
},
{
"from": 11111111,
"to": 22222222,
"type": "neighbor"
}
{ "from": 12345678, "to": 87654321, "type": "traceroute" },
{ "from": 11111111, "to": 22222222, "type": "neighbor" }
]
}
```
---
## 7. Configuration API
## 6. Config API
### GET `/api/config`
Returns the current site configuration (safe subset exposed to clients).
Returns a safe subset of server configuration.
**Response Example**
Response Example
```json
{
"site": {
"domain": "meshview.example.com",
"domain": "example.com",
"language": "en",
"title": "Bay Area Mesh",
"message": "Real time data from around the bay area",
"title": "Meshview",
"message": "",
"starting": "/chat",
"nodes": "true",
"conversations": "true",
"chat": "true",
"everything": "true",
"graphs": "true",
"stats": "true",
@@ -236,11 +249,11 @@ Returns the current site configuration (safe subset exposed to clients).
"firehose_interval": 3,
"weekly_net_message": "Weekly Mesh check-in message.",
"net_tag": "#BayMeshNet",
"version": "2.0.8 ~ 10-22-25"
"version": "3.0.0"
},
"mqtt": {
"server": "mqtt.bayme.sh",
"topics": ["msh/US/bayarea/#"]
"server": "mqtt.example.com",
"topics": ["msh/region/#"]
},
"cleanup": {
"enabled": "false",
@@ -254,91 +267,126 @@ Returns the current site configuration (safe subset exposed to clients).
---
## 8. Language/Translations API
## 7. Language API
### GET `/api/lang`
Returns translation strings for the UI.
Returns translation strings.
**Query Parameters**
- `lang` (optional, string): Language code (e.g., `en`, `es`). Defaults to site language setting.
- `section` (optional, string): Specific section to retrieve translations for.
Query Parameters
- `lang` (optional, string): Language code (e.g., `en`, `es`). Default from config or `en`.
- `section` (optional, string): Return only one section (e.g., `nodelist`, `firehose`).
**Response Example (full)**
Response Example
```json
{
"chat": {
"title": "Chat",
"send": "Send"
},
"map": {
"title": "Map",
"zoom_in": "Zoom In"
}
}
```
**Response Example (section-specific)**
Request: `/api/lang?section=chat`
```json
{
"title": "Chat",
"send": "Send"
"title": "Meshview",
"search_placeholder": "Search..."
}
```
---
## 9. Health Check API
## 8. Packets Seen API
### GET `/api/packets_seen/{packet_id}`
Returns packet_seen entries for a packet.
Path Parameters
- `packet_id` (required, int): Packet ID.
Response Example
```json
{
"seen": [
{
"packet_id": 123,
"node_id": 456,
"rx_time": "2025-07-22T12:45:00",
"hop_limit": 7,
"hop_start": 0,
"channel": "main",
"rx_snr": 5.0,
"rx_rssi": -90,
"topic": "msh/region/#",
"import_time_us": 1736370123456789
}
]
}
```
---
## 9. Traceroute API
### GET `/api/traceroute/{packet_id}`
Returns traceroute details and derived paths for a packet.
Path Parameters
- `packet_id` (required, int): Packet ID.
Response Example
```json
{
"packet": {
"id": 123,
"from": 111,
"to": 222,
"channel": "main"
},
"traceroute_packets": [
{
"index": 0,
"gateway_node_id": 333,
"done": true,
"forward_hops": [111, 444, 222],
"reverse_hops": [222, 444, 111]
}
],
"unique_forward_paths": [
{ "path": [111, 444, 222], "count": 2 }
],
"unique_reverse_paths": [
[222, 444, 111]
],
"winning_paths": [
[111, 444, 222]
]
}
```
---
## 10. Health API
### GET `/health`
Health check endpoint for monitoring, load balancers, and orchestration systems.
Returns service health and database status.
**Response Example (Healthy)**
Response Example
```json
{
"status": "healthy",
"timestamp": "2025-11-03T14:30:00.123456Z",
"version": "3.0.0",
"git_revision": "6416978",
"timestamp": "2025-07-22T12:45:00+00:00",
"version": "3.0.3",
"git_revision": "abc1234",
"database": "connected",
"database_size": "853.03 MB",
"database_size_bytes": 894468096
}
```
**Response Example (Unhealthy)**
Status Code: `503 Service Unavailable`
```json
{
"status": "unhealthy",
"timestamp": "2025-11-03T14:30:00.123456Z",
"version": "2.0.8",
"git_revision": "6416978",
"database": "disconnected"
"database_size": "12.34 MB",
"database_size_bytes": 12939444
}
```
---
## 10. Version API
## 11. Version API
### GET `/version`
Returns detailed version information including semver, release date, and git revision.
Returns version metadata.
**Response Example**
Response Example
```json
{
"version": "2.0.8",
"release_date": "2025-10-22",
"git_revision": "6416978a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6q",
"git_revision_short": "6416978"
"version": "3.0.3",
"release_date": "2026-1-15",
"git_revision": "abc1234",
"git_revision_short": "abc1234"
}
```
---
## Notes
- All timestamps (`import_time`, `last_seen`) are returned in ISO 8601 format.
- `portnum` is an integer representing the packet type.
- `payload` is always a UTF-8 decoded string.
- Node IDs are integers (e.g., `12345678`).

37
docs/COVERAGE.md Normal file
View File

@@ -0,0 +1,37 @@
# Coverage
## Predicted coverage
Meshview can display a predicted coverage boundary for a node. This is a **model**
estimate, not a guarantee of real-world performance.
### How it works
The coverage boundary is computed using the Longley-Rice / ITM **area mode**
propagation model. Area mode estimates average path loss over generic terrain
and does not use a terrain profile. This means it captures general distance
effects, but **does not** account for terrain shadows, buildings, or foliage.
### What you are seeing
The UI draws a **perimeter** (not a heatmap) that represents the furthest
distance where predicted signal strength is above a threshold (default
`-120 dBm`). The model is run radially from the node in multiple directions,
and the last point above the threshold forms the outline.
### Key parameters
- **Frequency**: default `907 MHz`
- **Transmit power**: default `20 dBm`
- **Antenna heights**: default `5 m` (TX) and `1.5 m` (RX)
- **Reliability**: default `0.5` (median)
- **Terrain irregularity**: default `90 m` (average terrain)
### Limitations
- No terrain or building data is used (area mode only).
- Results are sensitive to power, height, and threshold.
- Environmental factors can cause large real-world deviations.

View File

@@ -0,0 +1 @@
e1a6b3a868d735da72cd6c94c574d655129d390a

View File

@@ -3,8 +3,8 @@
import subprocess
from pathlib import Path
__version__ = "3.0.1"
__release_date__ = "2025-12-4"
__version__ = "3.0.5"
__release_date__ = "2026-2-6"
def get_git_revision():

View File

@@ -6,7 +6,7 @@ parser = argparse.ArgumentParser(description="MeshView Configuration Loader")
parser.add_argument(
"--config", type=str, default="config.ini", help="Path to config.ini file (default: config.ini)"
)
args = parser.parse_args()
args, _ = parser.parse_known_args()
# Initialize config parser
config_parser = configparser.ConfigParser()

View File

@@ -1,3 +1,4 @@
from sqlalchemy.engine.url import make_url
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from meshview import models
@@ -9,10 +10,19 @@ async_session = None
def init_database(database_connection_string):
global engine, async_session
kwargs = {"echo": False}
# Ensure SQLite is opened in read-only mode
database_connection_string += "?mode=ro"
kwargs["connect_args"] = {"uri": True}
engine = create_async_engine(database_connection_string, **kwargs)
url = make_url(database_connection_string)
connect_args = {}
if url.drivername.startswith("sqlite"):
query = dict(url.query)
query.setdefault("mode", "ro")
url = url.set(query=query)
connect_args["uri"] = True
if connect_args:
kwargs["connect_args"] = connect_args
engine = create_async_engine(url, **kwargs)
async_session = async_sessionmaker(
bind=engine,
class_=AsyncSession,

13
meshview/deps.py Normal file
View File

@@ -0,0 +1,13 @@
import logging
from importlib.util import find_spec
logger = logging.getLogger(__name__)
def check_optional_deps() -> None:
if find_spec("pyitm") is None:
logger.warning(
"Optional dependency missing: pyitm. "
"Coverage prediction is disabled. "
"Run: ./env/bin/pip install -r requirements.txt"
)

View File

@@ -13,13 +13,40 @@
"go to node": "Go to Node",
"all": "All",
"portnum_options": {
"0": "Unknown",
"1": "Text Message",
"2": "Remote Hardware",
"3": "Position",
"4": "Node Info",
"5": "Routing",
"6": "Admin",
"7": "Text (Compressed)",
"8": "Waypoint",
"9": "Audio",
"10": "Detection Sensor",
"11": "Alert",
"12": "Key Verification",
"32": "Reply",
"33": "IP Tunnel",
"34": "Paxcounter",
"35": "Store Forward++",
"36": "Node Status",
"64": "Serial",
"65": "Store & Forward",
"66": "Range Test",
"67": "Telemetry",
"68": "ZPS",
"69": "Simulator",
"70": "Traceroute",
"71": "Neighbor Info"
}
"71": "Neighbor Info",
"72": "ATAK",
"73": "Map Report",
"74": "Power Stress",
"76": "Reticulum Tunnel",
"77": "Cayenne",
"256": "Private App",
"257": "ATAK Forwarder"
}
},
"chat": {
"chat_title": "Chats:",
@@ -53,8 +80,11 @@
"last_lat": "Last Latitude",
"last_long": "Last Longitude",
"channel": "Channel",
"mqtt_gateway": "MQTT",
"last_seen": "Last Seen",
"favorite": "Favorite",
"yes": "Yes",
"no": "No",
"time_just_now": "just now",
"time_min_ago": "min ago",
@@ -69,15 +99,21 @@
"view_packet_details": "More details"
},
"map": {
"show_routers_only": "Show Routers Only",
"share_view": "Share This View",
"reset_filters": "Reset Filters To Defaults",
"channel_label": "Channel:",
"map": {
"show_routers_only": "Show Routers Only",
"show_mqtt_only": "Show MQTT Gateways Only",
"share_view": "Share This View",
"reset_filters": "Reset Filters To Defaults",
"unmapped_packets_title": "Unmapped Packets",
"unmapped_packets_empty": "No recent unmapped packets.",
"channel_label": "Channel:",
"model_label": "Model:",
"role_label": "Role:",
"mqtt_gateway": "MQTT Gateway:",
"last_seen": "Last seen:",
"firmware": "Firmware:",
"yes": "Yes",
"no": "No",
"link_copied": "Link Copied!",
"legend_traceroute": "Traceroute (with arrows)",
"legend_neighbor": "Neighbor"
@@ -88,6 +124,7 @@
{
"mesh_stats_summary": "Mesh Statistics - Summary (all available in Database)",
"total_nodes": "Total Nodes",
"total_gateways": "Total Gateways",
"total_packets": "Total Packets",
"total_packets_seen": "Total Packets Seen",
"packets_per_day_all": "Packets per Day - All Ports (Last 14 Days)",
@@ -98,6 +135,10 @@
"hardware_breakdown": "Hardware Breakdown",
"role_breakdown": "Role Breakdown",
"channel_breakdown": "Channel Breakdown",
"gateway_channel_breakdown": "Gateway Channel Breakdown",
"gateway_role_breakdown": "Gateway Role Breakdown",
"gateway_firmware_breakdown": "Gateway Firmware Breakdown",
"no_gateways": "No gateways found",
"expand_chart": "Expand Chart",
"export_csv": "Export CSV",
"all_channels": "All Channels",
@@ -163,9 +204,11 @@
"hw_model": "Hardware Model",
"firmware": "Firmware",
"role": "Role",
"mqtt_gateway": "MQTT Gateway",
"channel": "Channel",
"latitude": "Latitude",
"longitude": "Longitude",
"first_update": "First Update",
"last_update": "Last Update",
"battery_voltage": "Battery & Voltage",
"air_channel": "Air & Channel Utilization",
@@ -179,7 +222,23 @@
"to": "To",
"port": "Port",
"direct_to_mqtt": "Direct to MQTT",
"all_broadcast": "All"
"all_broadcast": "All",
"statistics": "Statistics",
"last_24h": "24h",
"packets_sent": "Packets sent",
"times_seen": "Times seen",
"yes": "Yes",
"no": "No",
"copy_import_url": "Copy Import URL",
"show_qr_code": "Show QR Code",
"toggle_coverage": "Predicted Coverage",
"location_required": "Location required for coverage",
"coverage_help": "Coverage Help",
"share_contact_qr": "Share Contact QR",
"copy_url": "Copy URL",
"copied": "Copied!",
"potential_impersonation": "Potential Impersonation Detected",
"scan_qr_to_add": "Scan this QR code to add this node as a contact on another device."
},
"packet": {
"loading": "Loading packet information...",
@@ -205,4 +264,4 @@
}
}
}

View File

@@ -13,12 +13,39 @@
"go_to_node": "Ir al nodo",
"all": "Todos",
"portnum_options": {
"0": "Desconocido",
"1": "Mensaje de Texto",
"2": "Hardware Remoto",
"3": "Ubicación",
"4": "Información del Nodo",
"5": "Enrutamiento",
"6": "Administración",
"7": "Texto (Comprimido)",
"8": "Punto de Referencia",
"9": "Audio",
"10": "Sensor de Detección",
"11": "Alerta",
"12": "Verificación de Clave",
"32": "Respuesta",
"33": "Túnel IP",
"34": "Paxcounter",
"35": "Store Forward++",
"36": "Estado del Nodo",
"64": "Serial",
"65": "Store & Forward",
"66": "Prueba de Alcance",
"67": "Telemetría",
"68": "ZPS",
"69": "Simulador",
"70": "Traceroute",
"71": "Información de Vecinos"
"71": "Información de Vecinos",
"72": "ATAK",
"73": "Reporte de Mapa",
"74": "Prueba de Energía",
"76": "Túnel Reticulum",
"77": "Cayenne",
"256": "App Privada",
"257": "ATAK Forwarder"
}
},
@@ -51,8 +78,11 @@
"last_lat": "Última latitud",
"last_long": "Última longitud",
"channel": "Canal",
"mqtt_gateway": "MQTT",
"last_seen": "Última vez visto",
"favorite": "Favorito",
"yes": "Sí",
"no": "No",
"time_just_now": "justo ahora",
"time_min_ago": "min atrás",
"time_hr_ago": "h atrás",
@@ -67,14 +97,21 @@
},
"map": {
"filter_routers_only": "Mostrar solo enrutadores",
"share_view": "Compartir esta vista",
"reset_filters": "Restablecer filtros",
"channel_label": "Canal:",
"filter_routers_only": "Mostrar solo enrutadores",
"show_routers_only": "Mostrar solo enrutadores",
"show_mqtt_only": "Mostrar solo gateways MQTT",
"share_view": "Compartir esta vista",
"reset_filters": "Restablecer filtros",
"unmapped_packets_title": "Paquetes sin mapa",
"unmapped_packets_empty": "No hay paquetes sin mapa recientes.",
"channel_label": "Canal:",
"model_label": "Modelo:",
"role_label": "Rol:",
"mqtt_gateway": "Gateway MQTT:",
"last_seen": "Visto por última vez:",
"firmware": "Firmware:",
"yes": "Sí",
"no": "No",
"link_copied": "¡Enlace copiado!",
"legend_traceroute": "Ruta de traceroute (flechas de dirección)",
"legend_neighbor": "Vínculo de vecinos"
@@ -83,6 +120,7 @@
"stats": {
"mesh_stats_summary": "Estadísticas de la Malla - Resumen (completas en la base de datos)",
"total_nodes": "Nodos Totales",
"total_gateways": "Gateways Totales",
"total_packets": "Paquetes Totales",
"total_packets_seen": "Paquetes Totales Vistos",
"packets_per_day_all": "Paquetes por Día - Todos los Puertos (Últimos 14 Días)",
@@ -93,6 +131,10 @@
"hardware_breakdown": "Distribución de Hardware",
"role_breakdown": "Distribución de Roles",
"channel_breakdown": "Distribución de Canales",
"gateway_channel_breakdown": "Desglose de canales de gateways",
"gateway_role_breakdown": "Desglose de roles de gateways",
"gateway_firmware_breakdown": "Desglose de firmware de gateways",
"no_gateways": "No se encontraron gateways",
"expand_chart": "Ampliar Gráfico",
"export_csv": "Exportar CSV",
"all_channels": "Todos los Canales"
@@ -148,9 +190,11 @@
"hw_model": "Modelo de Hardware",
"firmware": "Firmware",
"role": "Rol",
"mqtt_gateway": "Gateway MQTT",
"channel": "Canal",
"latitude": "Latitud",
"longitude": "Longitud",
"first_update": "Primera Actualización",
"last_update": "Última Actualización",
"battery_voltage": "Batería y voltaje",
"air_channel": "Utilización del aire y del canal",
@@ -164,7 +208,23 @@
"to": "A",
"port": "Puerto",
"direct_to_mqtt": "Directo a MQTT",
"all_broadcast": "Todos"
"all_broadcast": "Todos",
"statistics": "Estadísticas",
"last_24h": "24h",
"packets_sent": "Paquetes enviados",
"times_seen": "Veces visto",
"yes": "Sí",
"no": "No",
"copy_import_url": "Copiar URL de importación",
"show_qr_code": "Mostrar código QR",
"toggle_coverage": "Cobertura predicha",
"location_required": "Se requiere ubicación para la cobertura",
"coverage_help": "Ayuda de cobertura",
"share_contact_qr": "Compartir contacto QR",
"copy_url": "Copiar URL",
"copied": "¡Copiado!",
"potential_impersonation": "Posible suplantación detectada",
"scan_qr_to_add": "Escanea este código QR para agregar este nodo como contacto en otro dispositivo."
},
"packet": {

View File

@@ -186,19 +186,24 @@ async def create_migration_status_table(engine: AsyncEngine) -> None:
text("""
CREATE TABLE IF NOT EXISTS migration_status (
id INTEGER PRIMARY KEY CHECK (id = 1),
in_progress BOOLEAN NOT NULL DEFAULT 0,
in_progress BOOLEAN NOT NULL DEFAULT FALSE,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
)
# Insert initial row if not exists
await conn.execute(
result = await conn.execute(
text("""
INSERT OR IGNORE INTO migration_status (id, in_progress)
VALUES (1, 0)
SELECT 1 FROM migration_status WHERE id = 1
""")
)
if result.first() is None:
await conn.execute(
text("""
INSERT INTO migration_status (id, in_progress)
VALUES (1, FALSE)
""")
)
async def set_migration_in_progress(engine: AsyncEngine, in_progress: bool) -> None:

View File

@@ -1,5 +1,3 @@
from datetime import datetime
from sqlalchemy import BigInteger, ForeignKey, Index, desc
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
@@ -22,7 +20,7 @@ class Node(Base):
last_lat: Mapped[int] = mapped_column(BigInteger, nullable=True)
last_long: Mapped[int] = mapped_column(BigInteger, nullable=True)
channel: Mapped[str] = mapped_column(nullable=True)
last_update: Mapped[datetime] = mapped_column(nullable=True)
is_mqtt_gateway: Mapped[bool] = mapped_column(nullable=True)
first_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
last_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
@@ -33,11 +31,7 @@ class Node(Base):
)
def to_dict(self):
return {
column.name: getattr(self, column.name)
for column in self.__table__.columns
if column.name != "last_update"
}
return {column.name: getattr(self, column.name) for column in self.__table__.columns}
class Packet(Base):
@@ -55,17 +49,13 @@ class Packet(Base):
overlaps="from_node",
)
payload: Mapped[bytes] = mapped_column(nullable=True)
import_time: Mapped[datetime] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
channel: Mapped[str] = mapped_column(nullable=True)
__table_args__ = (
Index("idx_packet_from_node_id", "from_node_id"),
Index("idx_packet_to_node_id", "to_node_id"),
Index("idx_packet_import_time", desc("import_time")),
Index("idx_packet_import_time_us", desc("import_time_us")),
# Composite index for /top endpoint performance - filters by from_node_id AND import_time
Index("idx_packet_from_node_time", "from_node_id", desc("import_time")),
Index("idx_packet_from_node_time_us", "from_node_id", desc("import_time_us")),
)
@@ -86,7 +76,6 @@ class PacketSeen(Base):
rx_snr: Mapped[float] = mapped_column(nullable=True)
rx_rssi: Mapped[int] = mapped_column(nullable=True)
topic: Mapped[str] = mapped_column(nullable=True)
import_time: Mapped[datetime] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
__table_args__ = (
@@ -108,11 +97,25 @@ class Traceroute(Base):
gateway_node_id: Mapped[int] = mapped_column(BigInteger, nullable=True)
done: Mapped[bool] = mapped_column(nullable=True)
route: Mapped[bytes] = mapped_column(nullable=True)
import_time: Mapped[datetime] = mapped_column(nullable=True)
route_return: Mapped[bytes] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
__table_args__ = (
Index("idx_traceroute_import_time", "import_time"),
Index("idx_traceroute_packet_id", "packet_id"),
Index("idx_traceroute_import_time_us", "import_time_us"),
)
class NodePublicKey(Base):
__tablename__ = "node_public_key"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
node_id: Mapped[int] = mapped_column(BigInteger, nullable=False)
public_key: Mapped[str] = mapped_column(nullable=False)
first_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
last_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
__table_args__ = (
Index("idx_node_public_key_node_id", "node_id"),
Index("idx_node_public_key_public_key", "public_key"),
)

View File

@@ -1,3 +1,5 @@
from sqlalchemy import event
from sqlalchemy.engine.url import make_url
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from meshview import models
@@ -5,9 +7,26 @@ from meshview import models
def init_database(database_connection_string):
global engine, async_session
engine = create_async_engine(
database_connection_string, echo=False, connect_args={"timeout": 900}
)
url = make_url(database_connection_string)
kwargs = {"echo": False}
if url.drivername.startswith("sqlite"):
kwargs["connect_args"] = {"timeout": 900} # seconds
engine = create_async_engine(url, **kwargs)
# Enforce SQLite pragmas on every new DB connection
if url.drivername.startswith("sqlite"):
@event.listens_for(engine.sync_engine, "connect")
def _set_sqlite_pragmas(dbapi_conn, _):
cursor = dbapi_conn.cursor()
cursor.execute("PRAGMA journal_mode=WAL;")
cursor.execute("PRAGMA busy_timeout=900000;") # ms
cursor.execute("PRAGMA synchronous=NORMAL;")
cursor.close()
async_session = async_sessionmaker(engine, expire_on_commit=False)

View File

@@ -8,9 +8,11 @@ import aiomqtt
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from google.protobuf.message import DecodeError
from meshtastic.protobuf.mesh_pb2 import Data
from meshtastic.protobuf.mqtt_pb2 import ServiceEnvelope
from meshview.config import CONFIG
KEY = base64.b64decode("1PG7OiApB1nwvP+rz05pAQ==")
PRIMARY_KEY = base64.b64decode("1PG7OiApB1nwvP+rz05pAQ==")
logging.basicConfig(
level=logging.INFO,
@@ -21,24 +23,94 @@ logging.basicConfig(
logger = logging.getLogger(__name__)
def decrypt(packet):
def _parse_skip_node_ids():
mqtt_config = CONFIG.get("mqtt", {})
raw_value = mqtt_config.get("skip_node_ids", "")
if not raw_value:
return set()
if isinstance(raw_value, str):
raw_value = raw_value.strip()
if not raw_value:
return set()
values = [v.strip() for v in raw_value.split(",") if v.strip()]
else:
values = [raw_value]
skip_ids = set()
for value in values:
try:
skip_ids.add(int(value, 0))
except (TypeError, ValueError):
logger.warning("Invalid node id in mqtt.skip_node_ids: %s", value)
return skip_ids
def _strip_quotes(value):
if len(value) >= 2 and value[0] == value[-1] and value[0] in ("'", '"'):
return value[1:-1]
return value
def _parse_secondary_keys():
mqtt_config = CONFIG.get("mqtt", {})
raw_value = mqtt_config.get("secondary_keys", "")
if not raw_value:
return []
if isinstance(raw_value, str):
raw_value = raw_value.strip()
if not raw_value:
return []
values = [v.strip() for v in raw_value.split(",") if v.strip()]
else:
values = [raw_value]
keys = []
for value in values:
try:
cleaned = _strip_quotes(str(value).strip())
if cleaned:
keys.append(base64.b64decode(cleaned))
except (TypeError, ValueError):
logger.warning("Invalid base64 key in mqtt.secondary_keys: %s", value)
return keys
SKIP_NODE_IDS = _parse_skip_node_ids()
SECONDARY_KEYS = _parse_secondary_keys()
logger.info("Primary key: %s", PRIMARY_KEY)
if SECONDARY_KEYS:
logger.info("Secondary keys: %s", SECONDARY_KEYS)
else:
logger.info("Secondary keys: []")
# Thank you to "Robert Grizzell" for the decryption code!
# https://github.com/rgrizzell
def decrypt(packet, key):
if packet.HasField("decoded"):
return
return True
packet_id = packet.id.to_bytes(8, "little")
from_node_id = getattr(packet, "from").to_bytes(8, "little")
nonce = packet_id + from_node_id
cipher = Cipher(algorithms.AES(KEY), modes.CTR(nonce))
cipher = Cipher(algorithms.AES(key), modes.CTR(nonce))
decryptor = cipher.decryptor()
raw_proto = decryptor.update(packet.encrypted) + decryptor.finalize()
try:
packet.decoded.ParseFromString(raw_proto)
data = Data()
data.ParseFromString(raw_proto)
packet.decoded.CopyFrom(data)
except DecodeError:
pass
return False
return True
async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_passwd):
identifier = str(random.getrandbits(16))
keyring = [PRIMARY_KEY, *SECONDARY_KEYS]
msg_count = 0
start_time = None
while True:
@@ -65,14 +137,14 @@ async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_pa
except DecodeError:
continue
decrypt(envelope.packet)
# print(envelope.packet.decoded)
for key in keyring:
if decrypt(envelope.packet, key):
break
if not envelope.packet.decoded:
continue
# Skip packets from specific node
# FIXME: make this configurable as a list of node IDs to skip
if getattr(envelope.packet, "from", None) == 2144342101:
# Skip packets from configured node IDs
if getattr(envelope.packet, "from", None) in SKIP_NODE_IDS:
continue
msg_count += 1

View File

@@ -1,14 +1,21 @@
import datetime
import logging
import re
import time
from sqlalchemy import select
from sqlalchemy import select, update
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
from sqlalchemy.exc import IntegrityError
from meshtastic.protobuf.config_pb2 import Config
from meshtastic.protobuf.mesh_pb2 import HardwareModel
from meshtastic.protobuf.portnums_pb2 import PortNum
from meshview import decode_payload, mqtt_database
from meshview.models import Node, Packet, PacketSeen, Traceroute
from meshview.models import Node, NodePublicKey, Packet, PacketSeen, Traceroute
logger = logging.getLogger(__name__)
MQTT_GATEWAY_CACHE: set[int] = set()
async def process_envelope(topic, env):
@@ -37,8 +44,7 @@ async def process_envelope(topic, env):
await session.execute(select(Node).where(Node.node_id == node_id))
).scalar_one_or_none()
now = datetime.datetime.now(datetime.UTC)
now_us = int(now.timestamp() * 1_000_000)
now_us = int(time.time() * 1_000_000)
if node:
node.node_id = node_id
@@ -50,7 +56,6 @@ async def process_envelope(topic, env):
node.last_lat = map_report.latitude_i
node.last_long = map_report.longitude_i
node.firmware = map_report.firmware_version
node.last_update = now
node.last_seen_us = now_us
if node.first_seen_us is None:
node.first_seen_us = now_us
@@ -66,7 +71,6 @@ async def process_envelope(topic, env):
firmware=map_report.firmware_version,
last_lat=map_report.latitude_i,
last_long=map_report.longitude_i,
last_update=now,
first_seen_us=now_us,
last_seen_us=now_us,
)
@@ -82,29 +86,43 @@ async def process_envelope(topic, env):
async with mqtt_database.async_session() as session:
# --- Packet insert with ON CONFLICT DO NOTHING
result = await session.execute(select(Packet).where(Packet.id == env.packet.id))
# FIXME: Not Used
# new_packet = False
packet = result.scalar_one_or_none()
if not packet:
# FIXME: Not Used
# new_packet = True
now = datetime.datetime.now(datetime.UTC)
now_us = int(now.timestamp() * 1_000_000)
stmt = (
sqlite_insert(Packet)
.values(
id=env.packet.id,
portnum=env.packet.decoded.portnum,
from_node_id=getattr(env.packet, "from"),
to_node_id=env.packet.to,
payload=env.packet.SerializeToString(),
import_time=now,
import_time_us=now_us,
channel=env.channel_id,
now_us = int(time.time() * 1_000_000)
packet_values = {
"id": env.packet.id,
"portnum": env.packet.decoded.portnum,
"from_node_id": getattr(env.packet, "from"),
"to_node_id": env.packet.to,
"payload": env.packet.SerializeToString(),
"import_time_us": now_us,
"channel": env.channel_id,
}
dialect = session.get_bind().dialect.name
stmt = None
if dialect == "sqlite":
stmt = (
sqlite_insert(Packet)
.values(**packet_values)
.on_conflict_do_nothing(index_elements=["id"])
)
.on_conflict_do_nothing(index_elements=["id"])
)
await session.execute(stmt)
elif dialect == "postgresql":
stmt = (
pg_insert(Packet)
.values(**packet_values)
.on_conflict_do_nothing(index_elements=["id"])
)
if stmt is not None:
await session.execute(stmt)
else:
try:
async with session.begin_nested():
session.add(Packet(**packet_values))
await session.flush()
except IntegrityError:
pass
# --- PacketSeen (no conflict handling here, normal insert)
@@ -115,6 +133,12 @@ async def process_envelope(topic, env):
else:
node_id = int(env.gateway_id[1:], 16)
if node_id not in MQTT_GATEWAY_CACHE:
MQTT_GATEWAY_CACHE.add(node_id)
await session.execute(
update(Node).where(Node.node_id == node_id).values(is_mqtt_gateway=True)
)
result = await session.execute(
select(PacketSeen).where(
PacketSeen.packet_id == env.packet.id,
@@ -123,8 +147,7 @@ async def process_envelope(topic, env):
)
)
if not result.scalar_one_or_none():
now = datetime.datetime.now(datetime.UTC)
now_us = int(now.timestamp() * 1_000_000)
now_us = int(time.time() * 1_000_000)
seen = PacketSeen(
packet_id=env.packet.id,
node_id=int(env.gateway_id[1:], 16),
@@ -135,7 +158,6 @@ async def process_envelope(topic, env):
hop_limit=env.packet.hop_limit,
hop_start=env.packet.hop_start,
topic=topic,
import_time=now,
import_time_us=now_us,
)
session.add(seen)
@@ -167,8 +189,7 @@ async def process_envelope(topic, env):
await session.execute(select(Node).where(Node.id == user.id))
).scalar_one_or_none()
now = datetime.datetime.now(datetime.UTC)
now_us = int(now.timestamp() * 1_000_000)
now_us = int(time.time() * 1_000_000)
if node:
node.node_id = node_id
@@ -177,7 +198,6 @@ async def process_envelope(topic, env):
node.hw_model = hw_model
node.role = role
node.channel = env.channel_id
node.last_update = now
node.last_seen_us = now_us
if node.first_seen_us is None:
node.first_seen_us = now_us
@@ -190,11 +210,32 @@ async def process_envelope(topic, env):
hw_model=hw_model,
role=role,
channel=env.channel_id,
last_update=now,
first_seen_us=now_us,
last_seen_us=now_us,
)
session.add(node)
if user.public_key:
public_key_hex = user.public_key.hex()
existing_key = (
await session.execute(
select(NodePublicKey).where(
NodePublicKey.node_id == node_id,
NodePublicKey.public_key == public_key_hex,
)
)
).scalar_one_or_none()
if existing_key:
existing_key.last_seen_us = now_us
else:
new_key = NodePublicKey(
node_id=node_id,
public_key=public_key_hex,
first_seen_us=now_us,
last_seen_us=now_us,
)
session.add(new_key)
except Exception as e:
print(f"Error processing NODEINFO_APP: {e}")
@@ -209,11 +250,9 @@ async def process_envelope(topic, env):
await session.execute(select(Node).where(Node.node_id == from_node_id))
).scalar_one_or_none()
if node:
now = datetime.datetime.now(datetime.UTC)
now_us = int(now.timestamp() * 1_000_000)
now_us = int(time.time() * 1_000_000)
node.last_lat = position.latitude_i
node.last_long = position.longitude_i
node.last_update = now
node.last_seen_us = now_us
if node.first_seen_us is None:
node.first_seen_us = now_us
@@ -223,21 +262,23 @@ async def process_envelope(topic, env):
if env.packet.decoded.portnum == PortNum.TRACEROUTE_APP:
packet_id = env.packet.id
if packet_id is not None:
now = datetime.datetime.now(datetime.UTC)
now_us = int(now.timestamp() * 1_000_000)
now_us = int(time.time() * 1_000_000)
session.add(
Traceroute(
packet_id=packet_id,
route=env.packet.decoded.payload,
done=not env.packet.decoded.want_response,
gateway_node_id=int(env.gateway_id[1:], 16),
import_time=now,
import_time_us=now_us,
)
)
await session.commit()
# if new_packet:
# await packet.awaitable_attrs.to_node
# await packet.awaitable_attrs.from_node
async def load_gateway_cache():
async with mqtt_database.async_session() as session:
result = await session.execute(
select(Node.node_id).where(Node.is_mqtt_gateway == True) # noqa: E712
)
MQTT_GATEWAY_CACHE.update(result.scalars().all())

146
meshview/radio/coverage.py Normal file
View File

@@ -0,0 +1,146 @@
import math
from functools import lru_cache
try:
from pyitm import itm
ITM_AVAILABLE = True
except Exception:
itm = None
ITM_AVAILABLE = False
DEFAULT_CLIMATE = 5 # Continental temperate
DEFAULT_GROUND = 0.005 # Average ground conductivity
DEFAULT_EPS_DIELECT = 15.0
DEFAULT_DELTA_H = 90.0
DEFAULT_RELIABILITY = 0.5
DEFAULT_MIN_DBM = -130.0
DEFAULT_MAX_DBM = -80.0
DEFAULT_THRESHOLD_DBM = -120.0
EARTH_RADIUS_KM = 6371.0
BEARING_STEP_DEG = 5
def destination_point(
lat: float, lon: float, bearing_deg: float, distance_km: float
) -> tuple[float, float]:
lat1 = math.radians(lat)
lon1 = math.radians(lon)
bearing = math.radians(bearing_deg)
d = distance_km / EARTH_RADIUS_KM
lat2 = math.asin(
math.sin(lat1) * math.cos(d) + math.cos(lat1) * math.sin(d) * math.cos(bearing)
)
lon2 = lon1 + math.atan2(
math.sin(bearing) * math.sin(d) * math.cos(lat1),
math.cos(d) - math.sin(lat1) * math.sin(lat2),
)
return math.degrees(lat2), math.degrees(lon2)
@lru_cache(maxsize=512)
def compute_coverage(
lat: float,
lon: float,
freq_mhz: float,
tx_dbm: float,
tx_height_m: float,
rx_height_m: float,
radius_km: float,
step_km: float,
reliability: float,
) -> list[tuple[float, float, float]]:
if not ITM_AVAILABLE:
return []
points = []
distance = max(step_km, 1.0)
while distance <= radius_km:
for bearing in range(0, 360, BEARING_STEP_DEG):
rx_lat, rx_lon = destination_point(lat, lon, bearing, distance)
try:
loss_db, _ = itm.area(
ModVar=2,
deltaH=DEFAULT_DELTA_H,
tht_m=tx_height_m,
rht_m=rx_height_m,
dist_km=distance,
TSiteCriteria=0,
RSiteCriteria=0,
eps_dielect=DEFAULT_EPS_DIELECT,
sgm_conductivity=DEFAULT_GROUND,
eno_ns_surfref=301,
frq_mhz=freq_mhz,
radio_climate=DEFAULT_CLIMATE,
pol=1,
pctTime=reliability,
pctLoc=0.5,
pctConf=0.5,
)
except itm.InputError:
continue
rx_dbm = tx_dbm - loss_db
points.append((rx_lat, rx_lon, rx_dbm))
distance += step_km
return points
@lru_cache(maxsize=512)
def compute_perimeter(
lat: float,
lon: float,
freq_mhz: float,
tx_dbm: float,
tx_height_m: float,
rx_height_m: float,
radius_km: float,
step_km: float,
reliability: float,
threshold_dbm: float,
) -> list[tuple[float, float]]:
if not ITM_AVAILABLE:
return []
perimeter = []
distance = max(step_km, 1.0)
for bearing in range(0, 360, BEARING_STEP_DEG):
last_point = None
dist = distance
while dist <= radius_km:
try:
loss_db, _ = itm.area(
ModVar=2,
deltaH=DEFAULT_DELTA_H,
tht_m=tx_height_m,
rht_m=rx_height_m,
dist_km=dist,
TSiteCriteria=0,
RSiteCriteria=0,
eps_dielect=DEFAULT_EPS_DIELECT,
sgm_conductivity=DEFAULT_GROUND,
eno_ns_surfref=301,
frq_mhz=freq_mhz,
radio_climate=DEFAULT_CLIMATE,
pol=1,
pctTime=reliability,
pctLoc=0.5,
pctConf=0.5,
)
except itm.InputError:
dist += step_km
continue
rx_dbm = tx_dbm - loss_db
if rx_dbm >= threshold_dbm:
last_point = destination_point(lat, lon, bearing, dist)
dist += step_km
if last_point:
perimeter.append(last_point)
return perimeter

View File

@@ -44,6 +44,7 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js" crossorigin></script>
<script src="https://unpkg.com/leaflet-polylinedecorator@1.6.0/dist/leaflet.polylinedecorator.js" crossorigin></script>
<script src="/static/portmaps.js"></script>
<script>
(async function(){
@@ -75,8 +76,8 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
return color;
}
function timeAgo(dateStr){
const diff = Date.now() - new Date(dateStr);
function timeAgoFromUs(us){
const diff = Date.now() - (us / 1000);
const s=Math.floor(diff/1000), m=Math.floor(s/60), h=Math.floor(m/60), d=Math.floor(h/24);
if(d>0) return d+'d'; if(h>0) return h+'h'; if(m>0) return m+'m'; return s+'s';
}
@@ -97,7 +98,7 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
const channels = new Set();
const activeBlinks = new Map();
const portMap = {1:"Text",67:"Telemetry",3:"Position",70:"Traceroute",4:"Node Info",71:"Neighbour Info",73:"Map Report"};
const portMap = window.PORT_LABEL_MAP;
nodes.forEach(node=>{
if(isInvalidCoord(node)) return;
@@ -118,7 +119,7 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
<b>Channel:</b> ${node.channel}<br>
<b>Model:</b> ${node.hw_model}<br>
<b>Role:</b> ${node.role}<br>`;
if(node.last_update) popupContent+=`<b>Last seen:</b> ${timeAgo(node.last_update)}<br>`;
if(node.last_seen_us) popupContent+=`<b>Last seen:</b> ${timeAgoFromUs(node.last_seen_us)}<br>`;
if(node.firmware) popupContent+=`<b>Firmware:</b> ${node.firmware}<br>`;
marker.on('click', e=>{

View File

@@ -0,0 +1,77 @@
// Shared port label/color definitions for UI pages.
// Port numbers defined in: https://github.com/meshtastic/protobufs/blob/master/meshtastic/portnums.proto
window.PORT_LABEL_MAP = {
0: "Unknown",
1: "Text",
2: "Remote Hardware",
3: "Position",
4: "Node Info",
5: "Routing",
6: "Admin",
7: "Text (Compressed)",
8: "Waypoint",
9: "Audio",
10: "Detection Sensor",
11: "Alert",
12: "Key Verification",
32: "Reply",
33: "IP Tunnel",
34: "Paxcounter",
35: "Store Forward++",
36: "Node Status",
64: "Serial",
65: "Store & Forward",
66: "Range Test",
67: "Telemetry",
68: "ZPS",
69: "Simulator",
70: "Traceroute",
71: "Neighbor",
72: "ATAK",
73: "Map Report",
74: "Power Stress",
76: "Reticulum Tunnel",
77: "Cayenne",
256: "Private App",
257: "ATAK Forwarder",
};
window.PORT_COLOR_MAP = {
0: "#6c757d", // gray - Unknown
1: "#1f77b4", // blue - Text
2: "#795548", // brown - Remote Hardware
3: "#2ca02c", // green - Position
4: "#ffbf00", // yellow - Node Info
5: "#ff7f0e", // orange - Routing
6: "#20c997", // teal - Admin
7: "#6a51a3", // purple - Text (Compressed)
8: "#fd7e14", // orange - Waypoint
9: "#e91e63", // pink - Audio
10: "#ff9800", // amber - Detection Sensor
11: "#f44336", // bright red - Alert
12: "#9c27b0", // purple - Key Verification
32: "#00bcd4", // cyan - Reply
33: "#607d8b", // blue-gray - IP Tunnel
34: "#8d6e63", // brown-gray - Paxcounter
35: "#8bc34a", // light green - Store Forward++
36: "#4caf50", // green - Node Status
64: "#9e9e9e", // gray - Serial
65: "#6610f2", // indigo - Store & Forward
66: "#cddc39", // lime - Range Test
67: "#17a2b8", // info blue - Telemetry
68: "#3f51b5", // indigo - ZPS
69: "#673ab7", // deep purple - Simulator
70: "#f44336", // bright red - Traceroute
71: "#e377c2", // pink - Neighbor
72: "#2196f3", // blue - ATAK
73: "#9999ff", // light purple - Map Report
74: "#ff5722", // deep orange - Power Stress
76: "#009688", // teal - Reticulum Tunnel
77: "#4db6ac", // teal accent - Cayenne
256: "#757575", // dark gray - Private App
257: "#1976d2", // blue - ATAK Forwarder
};
// Aliases for pages that expect different names.
window.PORT_MAP = window.PORT_LABEL_MAP;
window.PORT_COLORS = window.PORT_COLOR_MAP;

View File

@@ -1,10 +1,14 @@
from datetime import datetime, timedelta
from sqlalchemy import select, and_, or_, func, cast, Text
import logging
from datetime import datetime, timedelta, timezone
from sqlalchemy import Text, and_, cast, func, or_, select
from sqlalchemy.orm import lazyload
from meshview import database, models
from meshview.models import Node, Packet, PacketSeen, Traceroute
logger = logging.getLogger(__name__)
async def get_node(node_id):
async with database.async_session() as session:
@@ -91,8 +95,10 @@ async def get_packets_from(node_id=None, portnum=None, since=None, limit=500):
if portnum:
q = q.where(Packet.portnum == portnum)
if since:
q = q.where(Packet.import_time > (datetime.now() - since))
result = await session.execute(q.limit(limit).order_by(Packet.import_time.desc()))
now_us = int(datetime.now().timestamp() * 1_000_000)
start_us = now_us - int(since.total_seconds() * 1_000_000)
q = q.where(Packet.import_time_us > start_us)
result = await session.execute(q.limit(limit).order_by(Packet.import_time_us.desc()))
return result.scalars()
@@ -108,7 +114,7 @@ async def get_packets_seen(packet_id):
result = await session.execute(
select(PacketSeen)
.where(PacketSeen.packet_id == packet_id)
.order_by(PacketSeen.import_time.desc())
.order_by(PacketSeen.import_time_us.desc())
)
return result.scalars()
@@ -129,18 +135,21 @@ async def get_traceroute(packet_id):
result = await session.execute(
select(Traceroute)
.where(Traceroute.packet_id == packet_id)
.order_by(Traceroute.import_time)
.order_by(Traceroute.import_time_us)
)
return result.scalars()
async def get_traceroutes(since):
if isinstance(since, datetime):
since_us = int(since.timestamp() * 1_000_000)
else:
since_us = int(since)
async with database.async_session() as session:
stmt = (
select(Traceroute)
.join(Packet)
.where(Traceroute.import_time > since)
.order_by(Traceroute.import_time)
.where(Traceroute.import_time_us > since_us)
.order_by(Traceroute.import_time_us)
)
stream = await session.stream_scalars(stmt)
async for tr in stream:
@@ -148,6 +157,8 @@ async def get_traceroutes(since):
async def get_mqtt_neighbors(since):
now_us = int(datetime.now().timestamp() * 1_000_000)
start_us = now_us - int(since.total_seconds() * 1_000_000)
async with database.async_session() as session:
result = await session.execute(
select(PacketSeen, Packet)
@@ -155,7 +166,7 @@ async def get_mqtt_neighbors(since):
.where(
(PacketSeen.hop_limit == PacketSeen.hop_start)
& (PacketSeen.hop_start != 0)
& (PacketSeen.import_time > (datetime.now() - since))
& (PacketSeen.import_time_us > start_us)
)
.options(
lazyload(Packet.from_node),
@@ -168,9 +179,9 @@ async def get_mqtt_neighbors(since):
async def get_total_node_count(channel: str = None) -> int:
try:
async with database.async_session() as session:
q = select(func.count(Node.id)).where(
Node.last_update > datetime.now() - timedelta(days=1)
)
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
cutoff_us = now_us - 86400 * 1_000_000
q = select(func.count(Node.id)).where(Node.last_seen_us > cutoff_us)
if channel:
q = q.where(Node.channel == channel)
@@ -185,26 +196,32 @@ async def get_total_node_count(channel: str = None) -> int:
async def get_top_traffic_nodes():
try:
async with database.async_session() as session:
result = await session.execute(
text("""
SELECT
n.node_id,
n.long_name,
n.short_name,
n.channel,
COUNT(DISTINCT p.id) AS total_packets_sent,
COUNT(ps.packet_id) AS total_times_seen
FROM node n
LEFT JOIN packet p ON n.node_id = p.from_node_id
AND p.import_time >= DATETIME('now', 'localtime', '-24 hours')
LEFT JOIN packet_seen ps ON p.id = ps.packet_id
GROUP BY n.node_id, n.long_name, n.short_name
HAVING total_packets_sent > 0
ORDER BY total_times_seen DESC;
""")
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
cutoff_us = now_us - 86400 * 1_000_000
total_packets_sent = func.count(func.distinct(Packet.id)).label("total_packets_sent")
total_times_seen = func.count(PacketSeen.packet_id).label("total_times_seen")
stmt = (
select(
Node.node_id,
Node.long_name,
Node.short_name,
Node.channel,
total_packets_sent,
total_times_seen,
)
.select_from(Node)
.outerjoin(
Packet,
(Packet.from_node_id == Node.node_id) & (Packet.import_time_us >= cutoff_us),
)
.outerjoin(PacketSeen, PacketSeen.packet_id == Packet.id)
.group_by(Node.node_id, Node.long_name, Node.short_name, Node.channel)
.having(total_packets_sent > 0)
.order_by(total_times_seen.desc())
)
rows = result.fetchall()
rows = (await session.execute(stmt)).all()
nodes = [
{
@@ -227,33 +244,30 @@ async def get_top_traffic_nodes():
async def get_node_traffic(node_id: int):
try:
async with database.async_session() as session:
result = await session.execute(
text("""
SELECT
node.long_name, packet.portnum,
COUNT(*) AS packet_count
FROM packet
JOIN node ON packet.from_node_id = node.node_id
WHERE node.node_id = :node_id
AND packet.import_time >= DATETIME('now', 'localtime', '-24 hours')
GROUP BY packet.portnum
ORDER BY packet_count DESC;
"""),
{"node_id": node_id},
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
cutoff_us = now_us - 86400 * 1_000_000
packet_count = func.count().label("packet_count")
stmt = (
select(Node.long_name, Packet.portnum, packet_count)
.select_from(Packet)
.join(Node, Packet.from_node_id == Node.node_id)
.where(Node.node_id == node_id)
.where(Packet.import_time_us >= cutoff_us)
.group_by(Node.long_name, Packet.portnum)
.order_by(packet_count.desc())
)
# Map the result to include node.long_name and packet data
traffic_data = [
result = await session.execute(stmt)
return [
{
"long_name": row[0], # node.long_name
"portnum": row[1], # packet.portnum
"packet_count": row[2], # COUNT(*) as packet_count
"long_name": row.long_name,
"portnum": row.portnum,
"packet_count": row.packet_count,
}
for row in result.all()
]
return traffic_data
except Exception as e:
# Log the error or handle it as needed
print(f"Error fetching node traffic: {str(e)}")
@@ -282,7 +296,11 @@ async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_a
# Apply filters based on provided parameters
if node_id is not None:
query = query.where(Node.node_id == node_id)
try:
node_id_int = int(node_id)
except (TypeError, ValueError):
node_id_int = node_id
query = query.where(Node.node_id == node_id_int)
if role is not None:
query = query.where(Node.role == role.upper()) # Ensure role is uppercase
if channel is not None:
@@ -291,10 +309,12 @@ async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_a
query = query.where(Node.hw_model == hw_model)
if days_active is not None:
query = query.where(Node.last_update > datetime.now() - timedelta(days_active))
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
cutoff_us = now_us - int(timedelta(days_active).total_seconds() * 1_000_000)
query = query.where(Node.last_seen_us > cutoff_us)
# Exclude nodes where last_update is an empty string
query = query.where(Node.last_update != "")
# Exclude nodes with missing last_seen_us
query = query.where(Node.last_seen_us.is_not(None))
# Order results by long_name in ascending order
query = query.order_by(Node.short_name.asc())
@@ -305,7 +325,7 @@ async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_a
return nodes # Return the list of nodes
except Exception:
print("error reading DB") # Consider using logging instead of print
logger.exception("error reading DB")
return [] # Return an empty list in case of failure
@@ -317,22 +337,36 @@ async def get_packet_stats(
to_node: int | None = None,
from_node: int | None = None,
):
now = datetime.now()
now = datetime.now(timezone.utc) # noqa: UP017
if period_type == "hour":
start_time = now - timedelta(hours=length)
time_format = '%Y-%m-%d %H:00'
time_format_sqlite = "%Y-%m-%d %H:00"
time_format_pg = "YYYY-MM-DD HH24:00"
elif period_type == "day":
start_time = now - timedelta(days=length)
time_format = '%Y-%m-%d'
time_format_sqlite = "%Y-%m-%d"
time_format_pg = "YYYY-MM-DD"
else:
raise ValueError("period_type must be 'hour' or 'day'")
async with database.async_session() as session:
dialect = session.get_bind().dialect.name
if dialect == "postgresql":
period_expr = func.to_char(
func.to_timestamp(Packet.import_time_us / 1_000_000.0),
time_format_pg,
)
else:
period_expr = func.strftime(
time_format_sqlite,
func.datetime(Packet.import_time_us / 1_000_000, "unixepoch"),
)
q = select(
func.strftime(time_format, Packet.import_time).label('period'),
func.count().label('count'),
).where(Packet.import_time >= start_time)
period_expr.label("period"),
func.count().label("count"),
).where(Packet.import_time_us >= int(start_time.timestamp() * 1_000_000))
# Filters
if channel:

View File

@@ -115,6 +115,7 @@
</div>
<script src="/static/portmaps.js"></script>
<script>
/* ======================================================
FIREHOSE TRANSLATION SYSTEM (isolated from base)
@@ -177,41 +178,8 @@ function nodeName(id) {
/* ======================================================
PORT COLORS & NAMES
====================================================== */
const PORT_MAP = {
0: "UNKNOWN APP",
1: "Text Message",
3: "Position",
4: "Node Info",
5: "Routing",
6: "Administration",
8: "Waypoint",
65: "Store Forward",
67: "Telemetry",
70: "Trace Route",
71: "Neighbor Info"
};
const PORT_COLORS = {
0: "#6c757d",
1: "#007bff",
3: "#28a745",
4: "#ffc107",
5: "#dc3545",
6: "#20c997",
65: "#6610f2",
67: "#17a2b8",
68: "#fd7e14",
69: "#6f42c1",
70: "#ff4444",
71: "#ff66cc",
72: "#00cc99",
73: "#9999ff",
74: "#cc00cc",
75: "#ffbb33",
76: "#00bcd4",
77: "#8bc34a",
78: "#795548"
};
const PORT_MAP = window.PORT_MAP || {};
const PORT_COLORS = window.PORT_COLORS || {};
function portLabel(portnum, payload, linksHtml) {
const name = PORT_MAP[portnum] || "Unknown";
@@ -233,13 +201,37 @@ function portLabel(portnum, payload, linksHtml) {
/* ======================================================
TIME FORMAT
====================================================== */
function formatLocalTime(importTimeUs) {
const ms = importTimeUs / 1000;
return new Date(ms).toLocaleTimeString([], {
function formatTimes(importTimeUs) {
const ms = Number(importTimeUs) / 1000;
if (!Number.isFinite(ms)) {
return { local: "—", utc: "—", epoch: "—" };
}
const date = new Date(ms);
const local = date.toLocaleTimeString([], {
hour: "2-digit",
minute: "2-digit",
second: "2-digit"
second: "2-digit",
timeZoneName: "short"
});
const utc = date.toLocaleTimeString([], {
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZone: "UTC",
timeZoneName: "short"
});
return { local, utc, epoch: String(importTimeUs) };
}
function logPacketTimes(packet) {
const times = formatTimes(packet.import_time_us);
console.log(
"[firehose] packet time",
"id=" + packet.id,
"epoch_us=" + times.epoch,
"local=" + times.local,
"utc=" + times.utc
);
}
/* ======================================================
@@ -261,7 +253,7 @@ async function fetchUpdates() {
if (updatesPaused) return;
const url = new URL("/api/packets", window.location.origin);
url.searchParams.set("limit", 50);
url.searchParams.set("limit", 100);
if (lastImportTimeUs)
url.searchParams.set("since", lastImportTimeUs);
@@ -277,6 +269,7 @@ async function fetchUpdates() {
const list = document.getElementById("packet_list");
for (const pkt of packets.reverse()) {
logPacketTimes(pkt);
/* FROM — includes translation */
const from =
@@ -336,7 +329,9 @@ async function fetchUpdates() {
const html = `
<tr class="packet-row">
<td>${formatLocalTime(pkt.import_time_us)}</td>
<td>
${formatTimes(pkt.import_time_us).local}<br>
</td>
<td>
<span class="toggle-btn">▶</span>

View File

@@ -24,12 +24,70 @@
#reset-filters-button:active { background-color:#c41e0d; }
.blinking-tooltip { background:white;color:black;border:1px solid black;border-radius:4px;padding:2px 5px; }
#map-wrapper {
position: relative;
width: 100%;
height: calc(100vh - 270px);
}
#map {
width: 100%;
height: 100%;
}
#unmapped-packets {
position: absolute;
bottom: 30px;
right: 15px;
z-index: 600;
width: 220px;
padding: 6px 8px;
background: rgba(255, 255, 255, 0.95);
border: 1px solid #ddd;
border-radius: 6px;
font-size: 12px;
text-align: left;
box-shadow: 0 0 10px rgba(0,0,0,0.2);
pointer-events: auto;
}
#unmapped-packets h3 {
margin: 0 0 6px;
font-size: 12px;
font-weight: 600;
color: #000;
}
#unmapped-list {
list-style: none;
padding: 0;
margin: 0;
max-height: 120px;
overflow-y: auto;
}
#unmapped-list li {
display: flex;
gap: 6px;
padding: 3px 0;
border-bottom: 1px dotted #e0e0e0;
}
#unmapped-list li:last-child { border-bottom: none; }
.unmapped-node { font-weight: 400; color: #000; }
.unmapped-empty { color: #666; font-style: italic; }
</style>
{% endblock %}
{% block body %}
<div id="map" style="width:100%; height:calc(100vh - 270px)"></div>
<div id="map-wrapper">
<div id="map"></div>
<div id="unmapped-packets">
<h3 data-translate-lang="unmapped_packets_title">Unmapped Packets</h3>
<ul id="unmapped-list">
<li class="unmapped-empty" data-translate-lang="unmapped_packets_empty">
No recent unmapped packets.
</li>
</ul>
</div>
</div>
<div id="map-legend"
class="legend"
@@ -52,6 +110,8 @@
<div id="filter-container">
<input type="checkbox" class="filter-checkbox" id="filter-routers-only">
<span data-translate-lang="show_routers_only">Show Routers Only</span>
<input type="checkbox" class="filter-checkbox" id="filter-mqtt-only">
<span data-translate-lang="show_mqtt_only">Show MQTT Gateways Only</span>
</div>
<div style="text-align:center;margin-top:5px;">
@@ -70,6 +130,7 @@
<script src="https://unpkg.com/leaflet-polylinedecorator@1.6.0/dist/leaflet.polylinedecorator.js"
integrity="sha384-FhPn/2P/fJGhQLeNWDn9B/2Gml2bPOrKJwFqJXgR3xOPYxWg5mYQ5XZdhUSugZT0"
crossorigin></script>
<script src="/static/portmaps.js"></script>
<script>
/* ======================================================
@@ -117,16 +178,11 @@ var nodes = [], markers = {}, markerById = {}, nodeMap = new Map();
var edgeLayer = L.layerGroup().addTo(map), selectedNodeId = null;
var activeBlinks = new Map(), lastImportTime = null;
var mapInterval = 0;
var unmappedPackets = [];
const UNMAPPED_LIMIT = 50;
const UNMAPPED_TTL_MS = 5000;
const portMap = {
1:"Text",
67:"Telemetry",
3:"Position",
70:"Traceroute",
4:"Node Info",
71:"Neighbour Info",
73:"Map Report"
};
const portMap = window.PORT_LABEL_MAP;
const palette = ["#e6194b","#4363d8","#f58231","#911eb4","#46f0f0","#f032e6","#bcf60c","#fabebe",
"#008080","#e6beff","#9a6324","#fffac8","#800000","#aaffc3","#808000","#ffd8b1",
@@ -140,8 +196,8 @@ map.on("popupopen", function (e) {
if (popupEl) applyTranslationsMap(popupEl);
});
function timeAgo(date){
const diff = Date.now() - new Date(date);
function timeAgoFromUs(us){
const diff = Date.now() - (us / 1000);
const s = Math.floor(diff/1000), m = Math.floor(s/60),
h = Math.floor(m/60), d = Math.floor(h/24);
return d>0?d+"d":h>0?h+"h":m>0?m+"m":s+"s";
@@ -154,11 +210,37 @@ function hashToColor(str){
return c;
}
function hashToUnit(str){
let h = 2166136261;
for(let i=0;i<str.length;i++){
h ^= str.charCodeAt(i);
h = Math.imul(h, 16777619);
}
return (h >>> 0) / 0xffffffff;
}
function jitterLatLng(lat, lon, key){
const meters = 15; // small, visually separates overlaps
const angle = hashToUnit(String(key)) * Math.PI * 2;
const r = meters * (0.3 + 0.7 * hashToUnit(`r:${key}`));
const dLat = (r * Math.cos(angle)) / 111320;
const dLon = (r * Math.sin(angle)) / (111320 * Math.cos(lat * Math.PI / 180));
return [lat + dLat, lon + dLon];
}
function isInvalidCoord(n){
return !n || !n.lat || !n.long || n.lat === 0 || n.long === 0 ||
Number.isNaN(n.lat) || Number.isNaN(n.long);
}
function getNodeLatLng(n){
const marker = markerById[n.key];
if(marker){
return marker.getLatLng();
}
return { lat: n.lat, lng: n.long };
}
/* ======================================================
PACKET FETCHING (unchanged)
====================================================== */
@@ -191,7 +273,11 @@ function fetchNewPackets(){
const marker = markerById[pkt.from_node_id];
const nodeData = nodeMap.get(pkt.from_node_id);
if(marker && nodeData) blinkNode(marker,nodeData.long_name,pkt.portnum);
if(marker && nodeData) {
blinkNode(marker,nodeData.long_name,pkt.portnum);
} else {
addUnmappedPacket(pkt, nodeData);
}
});
lastImportTime = latest;
@@ -289,7 +375,8 @@ fetch('/api/nodes?days_active=3')
hw_model: n.hw_model || "",
role: n.role || "",
firmware: n.firmware || "",
last_update: n.last_update || "",
last_seen_us: n.last_seen_us || null,
is_mqtt_gateway: n.is_mqtt_gateway === true,
isRouter: (n.role||"").toLowerCase().includes("router")
}));
@@ -313,7 +400,8 @@ function renderNodesOnMap(){
const color = hashToColor(node.channel);
const marker = L.circleMarker([node.lat,node.long], {
const [jLat, jLon] = jitterLatLng(node.lat, node.long, node.key);
const marker = L.circleMarker([jLat,jLon], {
radius: node.isRouter ? 9 : 7,
color: "white",
fillColor: color,
@@ -331,10 +419,13 @@ function renderNodesOnMap(){
<b data-translate-lang="channel_label"></b> ${node.channel}<br>
<b data-translate-lang="model_label"></b> ${node.hw_model}<br>
<b data-translate-lang="role_label"></b> ${node.role}<br>
<b data-translate-lang="mqtt_gateway"></b> ${
node.is_mqtt_gateway ? (mapTranslations.yes || "Yes") : (mapTranslations.no || "No")
}<br>
${
node.last_update
? `<b data-translate-lang="last_seen"></b> ${timeAgo(node.last_update)}<br>`
node.last_seen_us
? `<b data-translate-lang="last_seen"></b> ${timeAgoFromUs(node.last_seen_us)}<br>`
: ""
}
@@ -354,6 +445,70 @@ function renderNodesOnMap(){
setTimeout(() => applyTranslationsMap(), 50);
}
/* ======================================================
UNMAPPED PACKETS LIST
====================================================== */
function addUnmappedPacket(pkt, nodeData){
if(nodeData && !isInvalidCoord(nodeData)) return;
const now = Date.now();
const entry = {
id: pkt.id,
key: `${pkt.id ?? "x"}-${pkt.import_time_us ?? now}-${Math.random().toString(16).slice(2)}`,
import_time_us: pkt.import_time_us || 0,
from_node_id: pkt.from_node_id,
long_name: pkt.long_name || (nodeData?.long_name || ""),
portnum: pkt.portnum,
payload: (pkt.payload || "").trim(),
expires_at: now + UNMAPPED_TTL_MS
};
unmappedPackets.unshift(entry);
pruneUnmappedPackets(now);
if(unmappedPackets.length > UNMAPPED_LIMIT){
unmappedPackets = unmappedPackets.slice(0, UNMAPPED_LIMIT);
}
renderUnmappedPackets();
setTimeout(() => {
pruneUnmappedPackets(Date.now());
renderUnmappedPackets();
}, UNMAPPED_TTL_MS + 50);
}
function pruneUnmappedPackets(now){
unmappedPackets = unmappedPackets.filter(p => p.expires_at > now);
}
function renderUnmappedPackets(){
pruneUnmappedPackets(Date.now());
const list = document.getElementById("unmapped-list");
list.innerHTML = "";
if(unmappedPackets.length === 0){
const empty = document.createElement("li");
empty.className = "unmapped-empty";
empty.dataset.translateLang = "unmapped_packets_empty";
empty.textContent = "No recent unmapped packets.";
list.appendChild(empty);
return;
}
unmappedPackets.forEach(p=>{
const li = document.createElement("li");
const node = document.createElement("span");
node.className = "unmapped-node";
const type = portMap[p.portnum] || `Port ${p.portnum ?? "?"}`;
const name = p.long_name || `Node ${p.from_node_id ?? "?"}`;
node.textContent = `${name} (${type})`;
li.appendChild(node);
list.appendChild(li);
});
}
/* ======================================================
⭐ NEW: DYNAMIC EDGE LOADING
====================================================== */
@@ -374,7 +529,9 @@ async function onNodeClick(node){
if(!f || !t || isInvalidCoord(f) || isInvalidCoord(t)) return;
const color = edge.type === "neighbor" ? "gray" : "orange";
const line = L.polyline([[f.lat, f.long], [t.lat, t.long]], {
const fLatLng = getNodeLatLng(f);
const tLatLng = getNodeLatLng(t);
const line = L.polyline([[fLatLng.lat, fLatLng.lng], [tLatLng.lat, tLatLng.lng]], {
color, weight: 3
}).addTo(edgeLayer);
@@ -482,10 +639,14 @@ function createChannelFilters(){
});
const routerOnly=document.getElementById("filter-routers-only");
const mqttOnly=document.getElementById("filter-mqtt-only");
routerOnly.checked = saved["routersOnly"] || false;
mqttOnly.checked = saved["mqttOnly"] || false;
routerOnly.addEventListener("change", saveFiltersToLocalStorage);
routerOnly.addEventListener("change", updateNodeVisibility);
mqttOnly.addEventListener("change", saveFiltersToLocalStorage);
mqttOnly.addEventListener("change", updateNodeVisibility);
updateNodeVisibility();
}
@@ -496,12 +657,14 @@ function saveFiltersToLocalStorage(){
state[ch] = document.getElementById(`filter-channel-${ch}`).checked;
});
state["routersOnly"] = document.getElementById("filter-routers-only").checked;
state["mqttOnly"] = document.getElementById("filter-mqtt-only").checked;
localStorage.setItem("mapFilters", JSON.stringify(state));
}
function updateNodeVisibility(){
const routerOnly = document.getElementById("filter-routers-only").checked;
const mqttOnly = document.getElementById("filter-mqtt-only").checked;
const activeChannels = [...channelSet].filter(ch =>
document.getElementById(`filter-channel-${ch}`).checked
);
@@ -511,6 +674,7 @@ function updateNodeVisibility(){
if(marker){
const visible =
(!routerOnly || n.isRouter) &&
(!mqttOnly || n.is_mqtt_gateway) &&
activeChannels.includes(n.channel);
visible ? map.addLayer(marker) : map.removeLayer(marker);
@@ -541,6 +705,7 @@ function shareCurrentView() {
function resetFiltersToDefaults(){
document.getElementById("filter-routers-only").checked = false;
document.getElementById("filter-mqtt-only").checked = false;
channelSet.forEach(ch => {
document.getElementById(`filter-channel-${ch}`).checked = true;
});

View File

@@ -141,7 +141,7 @@ document.addEventListener("DOMContentLoaded", async () => {
</span>
<span class="col-3 nodename">
<a href="/packet_list/${packet.from_node_id}">
<a href="/node/${packet.from_node_id}">
${escapeHtml(fromName)}
</a>
</span>
@@ -178,7 +178,7 @@ document.addEventListener("DOMContentLoaded", async () => {
const sinceUs = Math.floor(sixDaysAgoMs * 1000);
const url =
`/api/packets?portnum=1&contains=${encodeURIComponent(tag)}&since=${sinceUs}`;
`/api/packets?portnum=1&contains=${encodeURIComponent(tag)}&since=${sinceUs}&limit=1000`;
const resp = await fetch(url);
const data = await resp.json();

File diff suppressed because it is too large Load Diff

View File

@@ -2,23 +2,30 @@
{% block css %}
<style>
html, body {
overflow-x: auto !important;
}
table {
width: 80%;
/* FIX: allow table to keep natural width so scrolling works */
width: max-content;
min-width: 100%;
border-collapse: collapse;
margin: 1em auto;
}
/* Ensure table centered visually */
/* Desktop scroll wrapper */
#node-list {
display: flex;
justify-content: center;
width: 100%;
overflow-x: auto; /* allows horizontal scroll */
overflow-y: hidden;
/* !!! removed display:flex because it prevents scrolling */
}
#node-list table {
margin-left: auto;
margin-right: auto;
width: max-content; /* table keeps its natural width */
min-width: 100%; /* won't shrink smaller than viewport */
}
th, td {
padding: 10px;
border: 1px solid #333;
@@ -96,6 +103,21 @@ select, .export-btn, .search-box, .clear-btn {
font-weight: bold;
color: white;
}
.node-status {
margin-left: 10px;
padding: 2px 8px;
border-radius: 12px;
border: 1px solid #2a6a8a;
background: #0d2a3a;
color: #9fd4ff;
font-size: 0.9em;
display: inline-block;
opacity: 0;
transition: opacity 0.15s ease-in-out;
}
.node-status.active {
opacity: 1;
}
/* Favorite stars */
.favorite-star {
@@ -134,16 +156,20 @@ select, .export-btn, .search-box, .clear-btn {
/* --------------------------------------------- */
@media (max-width: 768px) {
/* Hide desktop table */
/* Hide desktop view */
#node-list table {
display: none;
}
/* Show mobile card list */
/* Show mobile cards */
#mobile-node-list {
display: block !important;
width: 100%;
padding: 0 10px;
/* If you want horizontal swiping, uncomment:
overflow-x: auto;
white-space: nowrap; */
}
.node-card {
@@ -188,7 +214,7 @@ select, .export-btn, .search-box, .clear-btn {
id="search-box"
class="search-box"
data-translate-lang="search_placeholder"
placeholder="Search by name or ID..."
placeholder="Search by name or ID or HEX ID..."
/>
<select id="role-filter">
@@ -224,6 +250,7 @@ select, .export-btn, .search-box, .clear-btn {
<span data-translate-lang="showing_nodes">Showing</span>
<span id="node-count">0</span>
<span data-translate-lang="nodes_suffix">nodes</span>
<span id="node-status" class="node-status" aria-live="polite"></span>
</div>
<!-- Desktop table -->
@@ -239,13 +266,14 @@ select, .export-btn, .search-box, .clear-btn {
<th data-translate-lang="last_lat">Last Latitude <span class="sort-icon"></span></th>
<th data-translate-lang="last_long">Last Longitude <span class="sort-icon"></span></th>
<th data-translate-lang="channel">Channel <span class="sort-icon"></span></th>
<th data-translate-lang="mqtt_gateway">MQTT</th>
<th data-translate-lang="last_seen">Last Seen <span class="sort-icon"></span></th>
<th data-translate-lang="favorite"></th>
</tr>
</thead>
<tbody id="node-table-body">
<tr>
<td colspan="10" style="text-align:center; color:white;" data-translate-lang="loading_nodes">
<td colspan="11" style="text-align:center; color:white;" data-translate-lang="loading_nodes">
Loading nodes...
</td>
</tr>
@@ -294,6 +322,11 @@ let allNodes = [];
let sortColumn = "short_name";
let sortAsc = true;
let showOnlyFavorites = false;
let favoritesSet = new Set();
let isBusy = false;
let statusHideTimer = null;
let statusShownAt = 0;
const minStatusMs = 300;
const headers = document.querySelectorAll("thead th");
const keyMap = [
@@ -301,28 +334,51 @@ const keyMap = [
"last_lat","last_long","channel","last_seen_us"
];
function getFavorites() {
const favorites = localStorage.getItem('nodelist_favorites');
return favorites ? JSON.parse(favorites) : [];
}
function saveFavorites(favs) {
localStorage.setItem('nodelist_favorites', JSON.stringify(favs));
}
function toggleFavorite(nodeId) {
let favs = getFavorites();
const idx = favs.indexOf(nodeId);
if (idx >= 0) favs.splice(idx, 1);
else favs.push(nodeId);
saveFavorites(favs);
}
function isFavorite(nodeId) {
return getFavorites().includes(nodeId);
function debounce(fn, delay = 250) {
let t;
return (...args) => {
clearTimeout(t);
t = setTimeout(() => fn(...args), delay);
};
}
function timeAgo(usTimestamp) {
if (!usTimestamp) return "N/A";
const ms = usTimestamp / 1000;
const diff = Date.now() - ms;
function nextFrame() {
return new Promise(resolve => requestAnimationFrame(() => resolve()));
}
function loadFavorites() {
const favorites = localStorage.getItem('nodelist_favorites');
if (!favorites) {
favoritesSet = new Set();
return;
}
try {
const parsed = JSON.parse(favorites);
favoritesSet = new Set(Array.isArray(parsed) ? parsed : []);
} catch (err) {
console.warn("Failed to parse favorites, resetting.", err);
favoritesSet = new Set();
}
}
function saveFavorites() {
localStorage.setItem('nodelist_favorites', JSON.stringify([...favoritesSet]));
}
function toggleFavorite(nodeId) {
if (favoritesSet.has(nodeId)) {
favoritesSet.delete(nodeId);
} else {
favoritesSet.add(nodeId);
}
saveFavorites();
}
function isFavorite(nodeId) {
return favoritesSet.has(nodeId);
}
function timeAgoFromMs(msTimestamp) {
if (!msTimestamp) return "N/A";
const diff = Date.now() - msTimestamp;
if (diff < 60000) return "just now";
const mins = Math.floor(diff / 60000);
@@ -339,6 +395,7 @@ function timeAgo(usTimestamp) {
document.addEventListener("DOMContentLoaded", async function() {
await loadTranslationsNodelist();
loadFavorites();
const tbody = document.getElementById("node-table-body");
const mobileList = document.getElementById("mobile-node-list");
@@ -349,52 +406,82 @@ document.addEventListener("DOMContentLoaded", async function() {
const firmwareFilter = document.getElementById("firmware-filter");
const searchBox = document.getElementById("search-box");
const countSpan = document.getElementById("node-count");
const statusSpan = document.getElementById("node-status");
const exportBtn = document.getElementById("export-btn");
const clearBtn = document.getElementById("clear-btn");
const favoritesBtn = document.getElementById("favorites-btn");
let lastIsMobile = (window.innerWidth <= 768);
try {
setStatus("Loading nodes…");
await nextFrame();
const res = await fetch("/api/nodes?days_active=3");
if (!res.ok) throw new Error("Failed to fetch nodes");
const data = await res.json();
allNodes = data.nodes.map(n => ({
...n,
firmware: n.firmware || n.firmware_version || ""
}));
allNodes = data.nodes.map(n => {
const firmware = n.firmware || n.firmware_version || "";
const last_seen_us = n.last_seen_us || 0;
const last_seen_ms = last_seen_us ? (last_seen_us / 1000) : 0;
return {
...n,
firmware,
last_seen_us,
last_seen_ms,
_search: [
n.node_id,
n.id,
n.long_name,
n.short_name
]
.filter(Boolean)
.join(" ")
.toLowerCase()
};
});
populateFilters(allNodes);
renderTable(allNodes);
applyFilters(); // ensures initial sort + render uses same path
updateSortIcons();
setStatus("");
} catch (err) {
tbody.innerHTML = `<tr>
<td colspan="10" style="text-align:center; color:red;">
<td colspan="11" style="text-align:center; color:red;">
${nodelistTranslations.error_loading_nodes || "Error loading nodes"}
</td></tr>`;
setStatus("");
return;
}
roleFilter.addEventListener("change", applyFilters);
channelFilter.addEventListener("change", applyFilters);
hwFilter.addEventListener("change", applyFilters);
firmwareFilter.addEventListener("change", applyFilters);
searchBox.addEventListener("input", applyFilters);
// Debounced only for search typing
searchBox.addEventListener("input", debounce(applyFilters, 250));
exportBtn.addEventListener("click", exportToCSV);
clearBtn.addEventListener("click", clearFilters);
favoritesBtn.addEventListener("click", toggleFavoritesFilter);
// Favorite star click handler
// Favorite star click handler (delegated)
document.addEventListener("click", e => {
if (e.target.classList.contains('favorite-star')) {
const nodeId = parseInt(e.target.dataset.nodeId);
const isFav = isFavorite(nodeId);
const nodeId = parseInt(e.target.dataset.nodeId, 10);
const fav = isFavorite(nodeId);
if (isFav) {
if (fav) {
e.target.classList.remove("active");
e.target.textContent = "☆";
} else {
e.target.classList.add("active");
e.target.textContent = "★";
}
toggleFavorite(nodeId);
applyFilters();
}
@@ -402,13 +489,26 @@ document.addEventListener("DOMContentLoaded", async function() {
headers.forEach((th, index) => {
th.addEventListener("click", () => {
let key = keyMap[index];
const key = keyMap[index];
// ignore clicks on the "favorite" (last header) which has no sort key
if (!key) return;
sortAsc = (sortColumn === key) ? !sortAsc : true;
sortColumn = key;
applyFilters();
});
});
// Re-render on breakpoint change so mobile/desktop view switches instantly
window.addEventListener("resize", debounce(() => {
const isMobile = (window.innerWidth <= 768);
if (isMobile !== lastIsMobile) {
lastIsMobile = isMobile;
applyFilters();
}
}, 150));
function populateFilters(nodes) {
const roles = new Set(), channels = new Set(), hws = new Set(), fws = new Set();
@@ -443,7 +543,9 @@ document.addEventListener("DOMContentLoaded", async function() {
applyFilters();
}
function applyFilters() {
async function applyFilters() {
setStatus("Updating…");
await nextFrame();
const searchTerm = searchBox.value.trim().toLowerCase();
let filtered = allNodes.filter(n => {
@@ -452,102 +554,117 @@ document.addEventListener("DOMContentLoaded", async function() {
const hwMatch = !hwFilter.value || n.hw_model === hwFilter.value;
const fwMatch = !firmwareFilter.value || n.firmware === firmwareFilter.value;
const searchMatch =
!searchTerm ||
(n.long_name && n.long_name.toLowerCase().includes(searchTerm)) ||
(n.short_name && n.short_name.toLowerCase().includes(searchTerm)) ||
n.node_id.toString().includes(searchTerm);
const searchMatch = !searchTerm || n._search.includes(searchTerm);
const favMatch = !showOnlyFavorites || isFavorite(n.node_id);
return roleMatch && channelMatch && hwMatch && fwMatch && searchMatch && favMatch;
});
// IMPORTANT: Always sort the filtered subset to preserve expected behavior
filtered = sortNodes(filtered, sortColumn, sortAsc);
renderTable(filtered);
updateSortIcons();
setStatus("");
}
function renderTable(nodes) {
tbody.innerHTML = "";
mobileList.innerHTML = "";
const isMobile = window.innerWidth <= 768;
const shouldRenderTable = !isMobile;
if (shouldRenderTable) {
tbody.innerHTML = "";
} else {
mobileList.innerHTML = "";
}
const tableFrag = shouldRenderTable ? document.createDocumentFragment() : null;
const mobileFrag = shouldRenderTable ? null : document.createDocumentFragment();
if (!nodes.length) {
tbody.innerHTML = `<tr>
<td colspan="10" style="text-align:center; color:white;">
if (shouldRenderTable) {
tbody.innerHTML = `<tr>
<td colspan="11" style="text-align:center; color:white;">
${nodelistTranslations.no_nodes_found || "No nodes found"}
</td>
</tr>`;
} else {
mobileList.innerHTML = `<div style="text-align:center; color:white;">
${nodelistTranslations.no_nodes_found || "No nodes found"}
</td>
</tr>`;
</div>`;
}
mobileList.innerHTML = `<div style="text-align:center; color:white;">No nodes found</div>`;
countSpan.textContent = 0;
return;
}
nodes.forEach(node => {
const isFav = isFavorite(node.node_id);
const star = isFav ? "★" : "☆";
const fav = isFavorite(node.node_id);
const star = fav ? "★" : "☆";
// DESKTOP TABLE ROW
const row = document.createElement("tr");
row.innerHTML = `
<td>${node.short_name || "N/A"}</td>
<td><a href="/node/${node.node_id}">${node.long_name || "N/A"}</a></td>
<td>${node.hw_model || "N/A"}</td>
<td>${node.firmware || "N/A"}</td>
<td>${node.role || "N/A"}</td>
<td>${node.last_lat ? (node.last_lat / 1e7).toFixed(7) : "N/A"}</td>
<td>${node.last_long ? (node.last_long / 1e7).toFixed(7) : "N/A"}</td>
<td>${node.channel || "N/A"}</td>
<td>${timeAgo(node.last_seen_us)}</td>
<td style="text-align:center;">
<span class="favorite-star ${isFav ? "active" : ""}" data-node-id="${node.node_id}">
${star}
</span>
</td>
`;
tbody.appendChild(row);
if (shouldRenderTable) {
// DESKTOP TABLE ROW
const row = document.createElement("tr");
row.innerHTML = `
<td>${node.short_name || "N/A"}</td>
<td><a href="/node/${node.node_id}">${node.long_name || "N/A"}</a></td>
<td>${node.hw_model || "N/A"}</td>
<td>${node.firmware || "N/A"}</td>
<td>${node.role || "N/A"}</td>
<td>${node.last_lat ? (node.last_lat / 1e7).toFixed(7) : "N/A"}</td>
<td>${node.last_long ? (node.last_long / 1e7).toFixed(7) : "N/A"}</td>
<td>${node.channel || "N/A"}</td>
<td>${node.is_mqtt_gateway ? (nodelistTranslations.yes || "Yes") : (nodelistTranslations.no || "No")}</td>
<td>${timeAgoFromMs(node.last_seen_ms)}</td>
<td style="text-align:center;">
<span class="favorite-star ${fav ? "active" : ""}" data-node-id="${node.node_id}">
${star}
</span>
</td>
`;
tableFrag.appendChild(row);
} else {
// MOBILE CARD VIEW
const card = document.createElement("div");
card.className = "node-card";
card.innerHTML = `
<div class="node-card-header">
<span>${node.short_name || node.long_name || node.node_id}</span>
<span class="favorite-star ${fav ? "active" : ""}" data-node-id="${node.node_id}">
${star}
</span>
</div>
// MOBILE CARD VIEW
const card = document.createElement("div");
card.className = "node-card";
card.innerHTML = `
<div class="node-card-header">
<span>${node.short_name || node.long_name || node.node_id}</span>
<span class="favorite-star ${isFav ? "active" : ""}" data-node-id="${node.node_id}">
${star}
</span>
</div>
<div class="node-card-field"><b>ID:</b> ${node.node_id}</div>
<div class="node-card-field"><b>Name:</b> ${node.long_name || "N/A"}</div>
<div class="node-card-field"><b>HW:</b> ${node.hw_model || "N/A"}</div>
<div class="node-card-field"><b>Firmware:</b> ${node.firmware || "N/A"}</div>
<div class="node-card-field"><b>Role:</b> ${node.role || "N/A"}</div>
<div class="node-card-field"><b>Location:</b>
${node.last_lat ? (node.last_lat / 1e7).toFixed(5) : "N/A"},
${node.last_long ? (node.last_long / 1e7).toFixed(5) : "N/A"}
</div>
<div class="node-card-field"><b>Channel:</b> ${node.channel || "N/A"}</div>
<div class="node-card-field"><b>Last Seen:</b> ${timeAgoFromMs(node.last_seen_ms)}</div>
<div class="node-card-field"><b>ID:</b> ${node.node_id}</div>
<div class="node-card-field"><b>Name:</b> ${node.long_name || "N/A"}</div>
<div class="node-card-field"><b>HW:</b> ${node.hw_model || "N/A"}</div>
<div class="node-card-field"><b>Firmware:</b> ${node.firmware || "N/A"}</div>
<div class="node-card-field"><b>Role:</b> ${node.role || "N/A"}</div>
<div class="node-card-field"><b>Location:</b>
${node.last_lat ? (node.last_lat / 1e7).toFixed(5) : "N/A"},
${node.last_long ? (node.last_long / 1e7).toFixed(5) : "N/A"}
</div>
<div class="node-card-field"><b>Channel:</b> ${node.channel}</div>
<div class="node-card-field"><b>Last Seen:</b> ${timeAgo(node.last_seen_us)}</div>
<a href="/node/${node.node_id}" style="color:#9fd4ff; text-decoration:underline; margin-top:5px; display:block;">
View Node →
</a>
`;
mobileList.appendChild(card);
<a href="/node/${node.node_id}" style="color:#9fd4ff; text-decoration:underline; margin-top:5px; display:block;">
View Node →
</a>
`;
mobileFrag.appendChild(card);
}
});
// Toggle correct view
if (isMobile) {
mobileList.style.display = "block";
} else {
mobileList.style.display = "none";
}
mobileList.style.display = isMobile ? "block" : "none";
countSpan.textContent = nodes.length;
if (shouldRenderTable) {
tbody.appendChild(tableFrag);
} else {
mobileList.appendChild(mobileFrag);
}
}
function clearFilters() {
@@ -556,6 +673,7 @@ document.addEventListener("DOMContentLoaded", async function() {
hwFilter.value = "";
firmwareFilter.value = "";
searchBox.value = "";
sortColumn = "short_name";
sortAsc = true;
showOnlyFavorites = false;
@@ -563,7 +681,7 @@ document.addEventListener("DOMContentLoaded", async function() {
favoritesBtn.textContent = "⭐ Show Favorites";
favoritesBtn.classList.remove("active");
renderTable(allNodes);
applyFilters();
updateSortIcons();
}
@@ -599,6 +717,10 @@ document.addEventListener("DOMContentLoaded", async function() {
B = B || 0;
}
// Normalize strings for stable sorting
if (typeof A === "string") A = A.toLowerCase();
if (typeof B === "string") B = B.toLowerCase();
if (A < B) return asc ? -1 : 1;
if (A > B) return asc ? 1 : -1;
return 0;
@@ -613,6 +735,41 @@ document.addEventListener("DOMContentLoaded", async function() {
keyMap[i] === sortColumn ? (sortAsc ? "▲" : "▼") : "";
});
}
function setStatus(message) {
if (!statusSpan) return;
if (statusHideTimer) {
clearTimeout(statusHideTimer);
statusHideTimer = null;
}
if (message) {
statusShownAt = Date.now();
console.log("[nodelist] status:", message);
statusSpan.textContent = message;
statusSpan.classList.add("active");
isBusy = true;
return;
}
const elapsed = Date.now() - statusShownAt;
const remaining = Math.max(0, minStatusMs - elapsed);
if (remaining > 0) {
statusHideTimer = setTimeout(() => {
statusHideTimer = null;
console.log("[nodelist] status: cleared");
statusSpan.textContent = "";
statusSpan.classList.remove("active");
isBusy = false;
}, remaining);
return;
}
console.log("[nodelist] status: cleared");
statusSpan.textContent = "";
statusSpan.classList.remove("active");
isBusy = false;
}
});
</script>
{% endblock %}

View File

@@ -2,6 +2,10 @@
{% block title %}Packet Details{% endblock %}
{% block head %}
<script src="/static/portmaps.js"></script>
{% endblock %}
{% block css %}
{{ super() }}
<style>
@@ -178,17 +182,7 @@ document.addEventListener("DOMContentLoaded", async () => {
const packetId = match[1];
/* PORT LABELS (NOT TRANSLATED) */
const PORT_NAMES = {
0:"UNKNOWN APP",
1:"Text",
3:"Position",
4:"Node Info",
5:"Routing",
6:"Admin",
67:"Telemetry",
70:"Traceroute",
71:"Neighbor"
};
const PORT_NAMES = window.PORT_LABEL_MAP;
/* ---------------------------------------------
Fetch packet
@@ -380,103 +374,150 @@ document.addEventListener("DOMContentLoaded", async () => {
}
/* ---------------------------------------------
Load packets_seen
----------------------------------------------*/
const seenRes = await fetch(`/api/packets_seen/${packetId}`);
const seenData = await seenRes.json();
const seenList = seenData.seen ?? [];
Load packets_seen
----------------------------------------------*/
const seenRes = await fetch(`/api/packets_seen/${packetId}`);
const seenData = await seenRes.json();
const seenList = seenData.seen ?? [];
const seenSorted = seenList.slice().sort((a,b)=>{
return (b.hop_start ?? -999) - (a.hop_start ?? -999);
});
/* ---------------------------------------------
Sort by hop count (highest first)
----------------------------------------------*/
const seenSorted = seenList.slice().sort((a,b)=>{
const ha = (a.hop_start ?? 0) - (a.hop_limit ?? 0);
const hb = (b.hop_start ?? 0) - (b.hop_limit ?? 0);
return hb - ha;
});
if (seenSorted.length){
seenContainer.classList.remove("d-none");
seenCountSpan.textContent = `(${seenSorted.length})`;
}
if (seenSorted.length){
seenContainer.classList.remove("d-none");
seenCountSpan.textContent = `(${seenSorted.length})`;
}
/* ---------------------------------------------
Render gateway table + map markers
----------------------------------------------*/
seenTableBody.innerHTML = seenSorted.map(s=>{
const node = nodeLookup[s.node_id];
const label = node?.long_name || s.node_id;
/* ---------------------------------------------
GROUP BY HOP COUNT
----------------------------------------------*/
const hopGroups = {};
const timeStr = s.import_time_us
? new Date(s.import_time_us/1000).toLocaleTimeString()
: "—";
seenSorted.forEach(s => {
const hopValue = Math.max(
0,
(s.hop_start ?? 0) - (s.hop_limit ?? 0)
);
if (!hopGroups[hopValue]) hopGroups[hopValue] = [];
hopGroups[hopValue].push(s);
});
if (node?.last_lat && node.last_long){
const rlat = node.last_lat/1e7;
const rlon = node.last_long/1e7;
allBounds.push([rlat, rlon]);
/* ---------------------------------------------
Render grouped gateway table + map markers
----------------------------------------------*/
seenTableBody.innerHTML = Object.keys(hopGroups)
.sort((a,b) => Number(a) - Number(b)) // 0 hop first
.map(hopKey => {
const hopValue = (s.hop_start ?? 0) - (s.hop_limit ?? 0);
const color = hopColor(hopValue);
const hopLabel =
hopKey === "0"
? (packetTranslations.direct || "Direct (0 hops)")
: `${hopKey} ${packetTranslations.hops || "hops"}`;
const marker = L.marker([rlat,rlon],{
icon: L.divIcon({
html: `
<div style="
background:${color};
width:24px; height:24px;
border-radius:50%;
display:flex;
align-items:center;
justify-content:center;
color:white;
font-size:11px;
font-weight:700;
border:2px solid rgba(0,0,0,0.35);
box-shadow:0 0 5px rgba(0,0,0,0.45);
">${hopValue}</div>`,
className: "",
iconSize:[24,24],
iconAnchor:[12,12]
})
}).addTo(map);
const rows = hopGroups[hopKey].map(s => {
const node = nodeLookup[s.node_id];
const label = node?.long_name || s.node_id;
let distKm = null, distMi = null;
if (srcLat && srcLon){
distKm = haversine(srcLat, srcLon, rlat, rlon);
distMi = distKm * 0.621371;
}
const timeStr = s.import_time_us
? new Date(s.import_time_us/1000).toLocaleTimeString()
: "—";
marker.bindPopup(`
/* ---------------- MAP MARKERS (UNCHANGED) ---------------- */
if (node?.last_lat && node.last_long){
const rlat = node.last_lat/1e7;
const rlon = node.last_long/1e7;
allBounds.push([rlat, rlon]);
let distanceKm = null;
if (srcLat && srcLon) {
distanceKm = haversine(srcLat, srcLon, rlat, rlon);
}
const distanceMi = distanceKm !== null ? distanceKm * 0.621371 : null;
const color = hopColor(hopKey);
const marker = L.marker([rlat,rlon],{
icon: L.divIcon({
html: `
<div style="
background:${color};
width:24px; height:24px;
border-radius:50%;
display:flex;
align-items:center;
justify-content:center;
color:white;
font-size:11px;
font-weight:700;
border:2px solid rgba(0,0,0,0.35);
box-shadow:0 0 5px rgba(0,0,0,0.45);
">${hopKey}</div>`,
className: "",
iconSize:[24,24],
iconAnchor:[12,12]
})
}).addTo(map);
marker.bindPopup(`
<div style="font-size:0.9em">
<b>${label}</b><br>
<span data-translate-lang="node_id_short">${packetTranslations.node_id_short || "Node ID"}</span>:
<span data-translate-lang="node_id_short">Node ID</span>:
<a href="/node/${s.node_id}">${s.node_id}</a><br>
HW: ${node?.hw_model ?? "—"}<br>
<span data-translate-lang="channel">${packetTranslations.channel || "Channel"}</span>: ${s.channel ?? "—"}<br><br>
<span data-translate-lang="channel">Channel</span>: ${s.channel ?? "—"}<br>
${
distanceKm !== null
? `<span data-translate-lang="distance">Distance</span>:
${distanceKm.toFixed(1)} km / ${distanceMi.toFixed(1)} mi<br>`
: ""
}
<b data-translate-lang="signal">${packetTranslations.signal || "Signal"}</b><br>
<br>
<b data-translate-lang="signal">Signal</b><br>
RSSI: ${s.rx_rssi ?? "—"}<br>
SNR: ${s.rx_snr ?? "—"}<br><br>
<b data-translate-lang="hops">${packetTranslations.hops || "Hops"}</b>: ${hopValue}<br>
<b data-translate-lang="distance">${packetTranslations.distance || "Distance"}:</b><br>
${
distKm
? `${distKm.toFixed(2)} km (${distMi.toFixed(2)} mi)`
: "—"
}
<b data-translate-lang="hops">Hops</b>: ${hopKey}
</div>
`);
}
}
return `
<tr>
<td><a href="/node/${s.node_id}">${label}</a></td>
<td>${s.rx_rssi ?? "—"}</td>
<td>${s.rx_snr ?? "—"}</td>
<td>${hopKey}</td>
<td>${s.channel ?? "—"}</td>
<td>${timeStr}</td>
</tr>
`;
}).join("");
return `
<tr>
<td><a href="/node/${s.node_id}">${label}</a></td>
<td>${s.rx_rssi ?? "—"}</td>
<td>${s.rx_snr ?? "—"}</td>
<td>${s.hop_start ?? "—"}${s.hop_limit ?? "—"}</td>
<td>${s.channel ?? "—"}</td>
<td>${timeStr}</td>
</tr>`;
<td colspan="6"
style="
background:#1f2327;
font-weight:700;
color:#9ecbff;
border-top:1px solid #444;
padding:8px 12px;
">
🔁 ${hopLabel} (${hopGroups[hopKey].length})
</td>
</tr>
${rows}
`;
}).join("");
/* ---------------------------------------------
Fit map around all markers
----------------------------------------------*/

View File

@@ -89,6 +89,7 @@
{% block head %}
<script src="https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js"></script>
<script src="/static/portmaps.js"></script>
{% endblock %}
{% block body %}
@@ -111,6 +112,10 @@
<p data-translate-lang="total_packets_seen">Total Packets Seen</p>
<div class="summary-count" id="summary_seen">0</div>
</div>
<div class="summary-card" style="flex:1;">
<p data-translate-lang="total_gateways">Total Gateways</p>
<div class="summary-count" id="summary_gateways">0</div>
</div>
</div>
<!-- Daily Charts -->
@@ -189,6 +194,28 @@
<button class="export-btn" data-chart="chart_channel" data-translate-lang="export_csv">Export CSV</button>
<div id="chart_channel" class="chart"></div>
</div>
<!-- Gateway breakdown charts -->
<div class="card-section">
<p class="section-header" data-translate-lang="gateway_channel_breakdown">Gateway Channel Breakdown</p>
<button class="expand-btn" data-chart="chart_gateway_channel" data-translate-lang="expand_chart">Expand Chart</button>
<button class="export-btn" data-chart="chart_gateway_channel" data-translate-lang="export_csv">Export CSV</button>
<div id="chart_gateway_channel" class="chart"></div>
</div>
<div class="card-section">
<p class="section-header" data-translate-lang="gateway_role_breakdown">Gateway Role Breakdown</p>
<button class="expand-btn" data-chart="chart_gateway_role" data-translate-lang="expand_chart">Expand Chart</button>
<button class="export-btn" data-chart="chart_gateway_role" data-translate-lang="export_csv">Export CSV</button>
<div id="chart_gateway_role" class="chart"></div>
</div>
<div class="card-section">
<p class="section-header" data-translate-lang="gateway_firmware_breakdown">Gateway Firmware Breakdown</p>
<button class="expand-btn" data-chart="chart_gateway_firmware" data-translate-lang="expand_chart">Expand Chart</button>
<button class="export-btn" data-chart="chart_gateway_firmware" data-translate-lang="export_csv">Export CSV</button>
<div id="chart_gateway_firmware" class="chart"></div>
</div>
</div>
<!-- Modal for expanded charts -->
@@ -205,14 +232,7 @@
</div>
<script>
const PORTNUM_LABELS = {
1: "Text Messages",
3: "Position",
4: "Node Info",
67: "Telemetry",
70: "Traceroute",
71: "Neighbor Info"
};
const PORTNUM_LABELS = window.PORT_LABEL_MAP;
// --- Fetch & Processing ---
async function fetchStats(period_type,length,portnum=null,channel=null){
@@ -345,6 +365,7 @@ function renderPieChart(elId,data,name){
return chart;
}
// --- Packet Type Pie Chart ---
async function fetchPacketTypeBreakdown(channel=null) {
const portnums = [1,3,4,67,70,71];
@@ -368,6 +389,7 @@ async function fetchPacketTypeBreakdown(channel=null) {
let chartHourlyAll, chartPortnum1, chartPortnum3, chartPortnum4, chartPortnum67, chartPortnum70, chartPortnum71;
let chartDailyAll, chartDailyPortnum1;
let chartHwModel, chartRole, chartChannel;
let chartGatewayChannel, chartGatewayRole, chartGatewayFirmware;
let chartPacketTypes;
async function init(){
@@ -414,10 +436,31 @@ async function init(){
chartRole=renderPieChart("chart_role",processCountField(nodes,"role"),"Role");
chartChannel=renderPieChart("chart_channel",processCountField(nodes,"channel"),"Channel");
const gateways = nodes.filter(n => n.is_mqtt_gateway);
chartGatewayChannel = renderPieChart(
"chart_gateway_channel",
processCountField(gateways, "channel"),
"Gateway Channel"
);
chartGatewayRole = renderPieChart(
"chart_gateway_role",
processCountField(gateways, "role"),
"Gateway Role"
);
chartGatewayFirmware = renderPieChart(
"chart_gateway_firmware",
processCountField(gateways, "firmware"),
"Gateway Firmware"
);
const summaryNodesEl = document.getElementById("summary_nodes");
if (summaryNodesEl) {
summaryNodesEl.textContent = nodes.length.toLocaleString();
}
const summaryGatewaysEl = document.getElementById("summary_gateways");
if (summaryGatewaysEl) {
summaryGatewaysEl.textContent = gateways.length.toLocaleString();
}
// Packet types pie
const packetTypesData = await fetchPacketTypeBreakdown();
@@ -464,6 +507,9 @@ window.addEventListener('resize',()=>{
chartHwModel,
chartRole,
chartChannel,
chartGatewayChannel,
chartGatewayRole,
chartGatewayFirmware,
chartPacketTypes
].forEach(c=>c?.resize());
});

View File

@@ -39,7 +39,8 @@
}
table th { background-color: #333; }
table tbody tr:nth-child(odd) { background-color: #272b2f; }
table tbody tr:nth-child(odd) { background-color: #272b2f; }
table tbody tr:nth-child(even) { background-color: #212529; }
table tbody tr:hover { background-color: #555; cursor: pointer; }
@@ -50,8 +51,15 @@
.node-link:hover { text-decoration: underline; }
.good-x { color: #81ff81; font-weight: bold; }
.ok-x { color: #e8e86d; font-weight: bold; }
.bad-x { color: #ff6464; font-weight: bold; }
.ok-x { color: #e8e86d; font-weight: bold; }
.bad-x { color: #ff6464; font-weight: bold; }
.pagination {
display: flex;
justify-content: center;
gap: 12px;
margin-top: 15px;
}
</style>
{% endblock %}
@@ -63,47 +71,42 @@
<div class="filter-bar">
<div>
<label for="channelFilter" data-translate-lang="channel">Channel:</label>
<select id="channelFilter" class="form-select form-select-sm" style="width:auto;"></select>
</div>
<div>
<label for="nodeSearch" data-translate-lang="search">Search:</label>
<input id="nodeSearch" type="text" class="form-control form-control-sm"
placeholder="Search nodes..."
data-translate-lang="search_placeholder"
style="width:180px; display:inline-block;">
<label data-translate-lang="channel">Channel:</label>
<select id="channelFilter"></select>
</div>
</div>
<!-- ⭐ ADDED NODE COUNT ⭐ -->
<div id="count-container" style="margin-bottom:10px; font-weight:bold;">
<div style="margin-bottom:10px;font-weight:bold;">
<span data-translate-lang="showing_nodes">Showing</span>
<span id="node-count">0</span>
<span data-translate-lang="nodes_suffix">nodes</span>
</div>
<div class="table-responsive">
<table id="nodesTable">
<thead>
<tr>
<th data-translate-lang="long_name">Long Name</th>
<th data-translate-lang="short_name">Short Name</th>
<th data-translate-lang="channel">Channel</th>
<th data-translate-lang="packets_sent">Sent (24h)</th>
<th data-translate-lang="times_seen">Seen (24h)</th>
<th data-translate-lang="avg_gateways">Avg Gateways</th>
</tr>
</thead>
<tbody></tbody>
</table>
<table id="nodesTable">
<thead>
<tr>
<th data-translate-lang="long_name">Long Name</th>
<th data-translate-lang="short_name">Short Name</th>
<th data-translate-lang="channel">Channel</th>
<th data-translate-lang="packets_sent">Sent (24h)</th>
<th data-translate-lang="times_seen">Seen (24h)</th>
<th data-translate-lang="avg_gateways">Avg Gateways</th>
</tr>
</thead>
<tbody></tbody>
</table>
<div class="pagination">
<button id="prevPage" class="btn btn-sm btn-secondary">Prev</button>
<span id="pageInfo"></span>
<button id="nextPage" class="btn btn-sm btn-secondary">Next</button>
</div>
</div>
<script>
/* ======================================================
TOP PAGE TRANSLATION (isolated from base)
TRANSLATIONS
====================================================== */
let topTranslations = {};
@@ -111,198 +114,127 @@ function applyTranslationsTop(dict, root=document) {
root.querySelectorAll("[data-translate-lang]").forEach(el => {
const key = el.dataset.translateLang;
if (!dict[key]) return;
// input placeholder support
if (el.tagName === "INPUT" && el.placeholder !== undefined) {
el.placeholder = dict[key];
} else {
el.textContent = dict[key];
}
el.textContent = dict[key];
});
}
async function loadTranslationsTop() {
try {
const cfg = await window._siteConfigPromise;
const lang = cfg?.site?.language || "en";
const res = await fetch(`/api/lang?lang=${lang}&section=top`);
topTranslations = await res.json();
applyTranslationsTop(topTranslations);
} catch (err) {
console.error("TOP translation load failed:", err);
}
const cfg = await window._siteConfigPromise;
const lang = cfg?.site?.language || "en";
const res = await fetch(`/api/lang?lang=${lang}&section=top`);
topTranslations = await res.json();
applyTranslationsTop(topTranslations);
}
/* ======================================================
PAGE LOGIC
CONFIG
====================================================== */
let allNodes = [];
async function loadChannels() {
try {
const res = await fetch("/api/channels");
const data = await res.json();
const channels = data.channels || [];
const select = document.getElementById("channelFilter");
// LongFast first
if (channels.includes("LongFast")) {
const opt = document.createElement("option");
opt.value = "LongFast";
opt.textContent = "LongFast";
select.appendChild(opt);
}
for (const ch of channels) {
if (ch === "LongFast") continue;
const opt = document.createElement("option");
opt.value = ch;
opt.textContent = ch;
select.appendChild(opt);
}
select.addEventListener("change", renderTable);
} catch (err) {
console.error("Error loading channels:", err);
}
}
async function loadNodes() {
try {
const res = await fetch("/api/nodes");
const data = await res.json();
allNodes = data.nodes || [];
} catch (err) {
console.error("Error loading nodes:", err);
}
}
async function fetchNodeStats(nodeId) {
try {
const res = await fetch(`/api/stats/count?from_node=${nodeId}&period_type=day&length=1`);
const data = await res.json();
const sent = data.total_packets || 0;
const seen = data.total_seen || 0;
const avg = seen / Math.max(sent, 1);
return { sent, seen, avg };
} catch (err) {
console.error("Stat error:", err);
return { sent: 0, seen: 0, avg: 0 };
}
}
const PAGE_SIZE = 20;
let currentPage = 0;
let totalRows = 0;
/* ======================================================
HELPERS
====================================================== */
function avgClass(v) {
if (v >= 10) return "good-x";
if (v >= 2) return "ok-x";
if (v >= 2) return "ok-x";
return "bad-x";
}
/* ======================================================
LOAD CHANNELS
====================================================== */
async function loadChannels() {
const res = await fetch("/api/channels");
const data = await res.json();
const sel = document.getElementById("channelFilter");
sel.innerHTML = "";
for (const ch of data.channels || []) {
const opt = document.createElement("option");
opt.value = ch;
opt.textContent = ch;
sel.appendChild(opt);
}
sel.value = "MediumFast";
}
/* ======================================================
FETCH + RENDER
====================================================== */
async function renderTable() {
const tbody = document.querySelector("#nodesTable tbody");
tbody.innerHTML = "";
const channel = document.getElementById("channelFilter").value;
const searchText = document.getElementById("nodeSearch").value.trim().toLowerCase();
const offset = currentPage * PAGE_SIZE;
// Filter by channel
let filtered = allNodes.filter(n => n.channel === channel);
const url = new URL("/api/stats/top", window.location.origin);
url.searchParams.set("limit", PAGE_SIZE);
url.searchParams.set("offset", offset);
if (channel) url.searchParams.set("channel", channel);
// Filter by search
if (searchText !== "") {
filtered = filtered.filter(n =>
(n.long_name && n.long_name.toLowerCase().includes(searchText)) ||
(n.short_name && n.short_name.toLowerCase().includes(searchText)) ||
String(n.node_id).includes(searchText)
);
const res = await fetch(url);
const data = await res.json();
totalRows = data.total || 0;
for (const n of data.nodes || []) {
const tr = document.createElement("tr");
tr.onclick = () => location.href = `/node/${n.node_id}`;
tr.innerHTML = `
<td>
<a class="node-link" href="/node/${n.node_id}"
onclick="event.stopPropagation()">
${n.long_name || n.node_id}
</a>
</td>
<td>${n.short_name || ""}</td>
<td>${n.channel || ""}</td>
<td>${n.sent}</td>
<td>${n.seen}</td>
<td><span class="${avgClass(n.avg)}">${n.avg.toFixed(1)}</span></td>
`;
tbody.appendChild(tr);
}
// Placeholder rows first
const rowRefs = filtered.map(n => {
const tr = document.createElement("tr");
tr.addEventListener("click", () => window.location.href = `/node/${n.node_id}`);
const totalPages = Math.max(1, Math.ceil(totalRows / PAGE_SIZE));
const tdLong = document.createElement("td");
const a = document.createElement("a");
a.href = `/node/${n.node_id}`;
a.textContent = n.long_name || n.node_id;
a.className = "node-link";
a.addEventListener("click", e => e.stopPropagation());
tdLong.appendChild(a);
document.getElementById("node-count").textContent = totalRows;
document.getElementById("pageInfo").textContent =
`Page ${currentPage + 1} / ${totalPages}`;
const tdShort = document.createElement("td");
tdShort.textContent = n.short_name || "";
const tdChannel = document.createElement("td");
tdChannel.textContent = n.channel || "";
const tdSent = document.createElement("td");
tdSent.textContent = "...";
const tdSeen = document.createElement("td");
tdSeen.textContent = "...";
const tdAvg = document.createElement("td");
tdAvg.textContent = "...";
tr.appendChild(tdLong);
tr.appendChild(tdShort);
tr.appendChild(tdChannel);
tr.appendChild(tdSent);
tr.appendChild(tdSeen);
tr.appendChild(tdAvg);
tbody.appendChild(tr);
return { node: n, tr, tdSent, tdSeen, tdAvg };
});
// Fetch stats
const statsList = await Promise.all(
rowRefs.map(ref => fetchNodeStats(ref.node.node_id))
);
// Update rows
let combined = rowRefs.map((ref, i) => {
const stats = statsList[i];
ref.tdSent.textContent = stats.sent;
ref.tdSeen.textContent = stats.seen;
ref.tdAvg.innerHTML =
`<span class="${avgClass(stats.avg)}">${stats.avg.toFixed(1)}</span>`;
return { tr: ref.tr, sent: stats.sent, seen: stats.seen };
});
// Remove nodes with no activity
combined = combined.filter(r => !(r.sent === 0 && r.seen === 0));
// Sort by seen
combined.sort((a, b) => b.seen - a.seen);
tbody.innerHTML = "";
for (const r of combined) tbody.appendChild(r.tr);
// ⭐ UPDATE COUNT ⭐
document.getElementById("node-count").textContent = combined.length;
document.getElementById("prevPage").disabled = currentPage === 0;
document.getElementById("nextPage").disabled = currentPage >= totalPages - 1;
}
/* ======================================================
INITIALIZE PAGE
INIT
====================================================== */
document.addEventListener("DOMContentLoaded", async () => {
await loadTranslationsTop(); // ⭐ MUST run first
await loadNodes();
await loadTranslationsTop();
await loadChannels();
await renderTable();
document.getElementById("channelFilter").value = "LongFast";
document.getElementById("nodeSearch").addEventListener("input", renderTable);
channelFilter.onchange = () => {
currentPage = 0;
renderTable();
};
renderTable();
prevPage.onclick = () => {
if (currentPage > 0) {
currentPage--;
renderTable();
}
};
nextPage.onclick = () => {
currentPage++;
renderTable();
};
});
</script>

View File

@@ -0,0 +1,138 @@
{% extends "base.html" %}
{% block head %}
<script src="https://cdn.jsdelivr.net/npm/echarts/dist/echarts.min.js"></script>
{% endblock %}
{% block css %}
#traceroute-graph {
width: 100%;
height: 85vh;
border: 1px solid #2a2f36;
background: linear-gradient(135deg, #0f1216 0%, #171b22 100%);
border-radius: 10px;
}
#traceroute-meta {
padding: 12px 16px;
color: #c8d0da;
}
#traceroute-error {
color: #ff6b6b;
}
{% endblock %}
{% block body %}
<div id="traceroute-meta">
<div><b>Traceroute</b> <span id="traceroute-title"></span></div>
<div id="traceroute-error"></div>
</div>
<div id="traceroute-graph"></div>
<script>
const el = document.getElementById("traceroute-graph");
const chart = echarts.init(el);
function packetIdFromPath() {
const parts = window.location.pathname.split("/").filter(Boolean);
return parts[parts.length - 1];
}
function addPathEdges(path, edges, style) {
for (let i = 0; i < path.length - 1; i++) {
edges.push({
source: String(path[i]),
target: String(path[i + 1]),
lineStyle: style
});
}
}
async function loadTraceroute() {
const packetId = packetIdFromPath();
document.getElementById("traceroute-title").textContent = `#${packetId}`;
const [res, nodesRes] = await Promise.all([
fetch(`/api/traceroute/${packetId}`),
fetch("/api/nodes"),
]);
if (!res.ok) {
document.getElementById("traceroute-error").textContent = "Traceroute not found.";
return;
}
const data = await res.json();
const nodesData = nodesRes.ok ? await nodesRes.json() : { nodes: [] };
const nodeShortNameById = new Map(
(nodesData.nodes || []).map(n => [String(n.node_id), n.short_name || n.long_name || String(n.node_id)])
);
const nodeLongNameById = new Map(
(nodesData.nodes || []).map(n => [String(n.node_id), n.long_name || n.short_name || String(n.node_id)])
);
const nodes = new Map();
const edges = [];
const forwardPaths = data?.winning_paths?.forward || [];
const reversePaths = data?.winning_paths?.reverse || [];
const originId = data?.packet?.from != null ? String(data.packet.from) : null;
const targetId = data?.packet?.to != null ? String(data.packet.to) : null;
forwardPaths.forEach(path => {
path.forEach(id => nodes.set(String(id), { name: String(id) }));
addPathEdges(path, edges, { color: "#ff5733", width: 3 });
});
reversePaths.forEach(path => {
path.forEach(id => nodes.set(String(id), { name: String(id) }));
addPathEdges(path, edges, { color: "#00c3ff", width: 2, type: "dashed" });
});
const graphNodes = Array.from(nodes.values()).map(n => {
const isOrigin = originId && n.name === originId;
const isTarget = targetId && n.name === targetId;
const color = isOrigin ? "#ff3b30" : isTarget ? "#34c759" : "#8aa4c8";
const size = isOrigin || isTarget ? 44 : 36;
return {
id: n.name,
name: nodeShortNameById.get(n.name) || n.name,
symbolSize: size,
itemStyle: { color },
label: {
show: true,
color: "#e7eef7",
fontWeight: "bold"
},
tooltip: {
formatter: () => nodeLongNameById.get(n.name) || n.name
}
};
});
const option = {
backgroundColor: "transparent",
tooltip: { trigger: "item" },
series: [
{
type: "graph",
layout: "force",
roam: true,
zoom: 1.2,
draggable: true,
force: { repulsion: 200, edgeLength: 80 },
data: graphNodes,
edges: edges,
lineStyle: { opacity: 0.8, curveness: 0.1 },
edgeSymbol: ["none", "arrow"],
edgeSymbolSize: 10
}
]
};
chart.setOption(option);
}
loadTraceroute();
window.addEventListener("resize", () => chart.resize());
</script>
{% endblock %}

View File

@@ -1,7 +1,10 @@
"""Main web server routes and page rendering for Meshview."""
import asyncio
import datetime
import logging
import os
import pathlib
import re
import ssl
from dataclasses import dataclass
@@ -12,12 +15,13 @@ from google.protobuf import text_format
from google.protobuf.message import Message
from jinja2 import Environment, PackageLoader, Undefined, select_autoescape
from markupsafe import Markup
import pathlib
from meshtastic.protobuf.portnums_pb2 import PortNum
from meshview import config, database, decode_payload, migrations, models, store
from meshview.__version__ import (
__version_string__,
)
from meshview.deps import check_optional_deps
from meshview.web_api import api
logging.basicConfig(
@@ -35,6 +39,7 @@ env = Environment(loader=PackageLoader("meshview"), autoescape=select_autoescape
# Start Database
database.init_database(CONFIG["database"]["connection_string"])
check_optional_deps()
BASE_DIR = os.path.dirname(__file__)
LANG_DIR = os.path.join(BASE_DIR, "lang")
@@ -45,22 +50,25 @@ with open(os.path.join(os.path.dirname(__file__), '1x1.png'), 'rb') as png:
@dataclass
class Packet:
"""UI-friendly packet wrapper for templates and API payloads."""
id: int
from_node_id: int
from_node: models.Node
to_node_id: int
to_node: models.Node
channel: str
portnum: int
data: str
raw_mesh_packet: object
raw_payload: object
payload: str
pretty_payload: Markup
import_time: datetime.datetime
import_time_us: int
@classmethod
def from_model(cls, packet):
"""Convert a Packet ORM model into a presentation-friendly Packet."""
mesh_packet, payload = decode_payload.decode(packet)
pretty_payload = None
@@ -97,11 +105,11 @@ class Packet:
from_node_id=packet.from_node_id,
to_node=packet.to_node,
to_node_id=packet.to_node_id,
channel=packet.channel,
portnum=packet.portnum,
data=text_mesh_packet,
payload=text_payload, # now always a string
pretty_payload=pretty_payload,
import_time=packet.import_time,
import_time_us=packet.import_time_us, # <-- include microseconds
raw_mesh_packet=mesh_packet,
raw_payload=payload,
@@ -109,6 +117,7 @@ class Packet:
async def build_trace(node_id):
"""Build a recent GPS trace list for a node using position packets."""
trace = []
for raw_p in await store.get_packets_from(
node_id, PortNum.POSITION_APP, since=datetime.timedelta(hours=24)
@@ -130,6 +139,7 @@ async def build_trace(node_id):
async def build_neighbors(node_id):
"""Return neighbor node metadata for the given node ID."""
packets = await store.get_packets_from(node_id, PortNum.NEIGHBORINFO_APP, limit=1)
packet = packets.first()
@@ -159,6 +169,7 @@ async def build_neighbors(node_id):
def node_id_to_hex(node_id):
"""Format a node_id in Meshtastic hex notation."""
if node_id is None or isinstance(node_id, Undefined):
return "Invalid node_id" # i... have no clue
if node_id == 4294967295:
@@ -168,6 +179,7 @@ def node_id_to_hex(node_id):
def format_timestamp(timestamp):
"""Normalize timestamps to ISO 8601 strings."""
if isinstance(timestamp, int):
timestamp = datetime.datetime.fromtimestamp(timestamp, datetime.UTC)
return timestamp.isoformat(timespec="milliseconds")
@@ -200,9 +212,11 @@ async def redirect_packet_list(request):
packet_id = request.match_info["packet_id"]
raise web.HTTPFound(location=f"/node/{packet_id}")
# Generic static HTML route
@routes.get("/{page}")
async def serve_page(request):
"""Serve static HTML pages from meshview/static."""
page = request.match_info["page"]
# default to index.html if no extension
@@ -217,6 +231,19 @@ async def serve_page(request):
return web.Response(text=content, content_type="text/html")
@routes.get("/docs/{doc}")
async def serve_doc(request):
"""Serve documentation files from docs/ (markdown)."""
doc = request.match_info["doc"]
docs_root = pathlib.Path(__file__).parent.parent / "docs"
doc_path = (docs_root / doc).resolve()
if not doc_path.is_file() or docs_root not in doc_path.parents:
raise web.HTTPNotFound(text="Document not found")
content = doc_path.read_text(encoding="utf-8")
return web.Response(text=content, content_type="text/markdown")
@routes.get("/net")
async def net(request):
@@ -303,6 +330,15 @@ async def stats(request):
)
@routes.get("/traceroute/{packet_id}")
async def traceroute_page(request):
template = env.get_template("traceroute.html")
return web.Response(
text=template.render(),
content_type="text/html",
)
# Keep !!
@routes.get("/graph/traceroute/{packet_id}")
async def graph_traceroute(request):
@@ -352,8 +388,8 @@ async def graph_traceroute(request):
# It seems some nodes add them self to the list before uplinking
path.append(tr.gateway_node_id)
if not tr.done and tr.gateway_node_id not in node_seen_time and tr.import_time:
node_seen_time[path[-1]] = tr.import_time
if not tr.done and tr.gateway_node_id not in node_seen_time and tr.import_time_us:
node_seen_time[path[-1]] = tr.import_time_us
mqtt_nodes.add(tr.gateway_node_id)
node_color[path[-1]] = '#' + hex(hash(tuple(path)))[3:9]
@@ -363,7 +399,7 @@ async def graph_traceroute(request):
for path in paths:
used_nodes.update(path)
import_times = [tr.import_time for tr in traceroutes if tr.import_time]
import_times = [tr.import_time_us for tr in traceroutes if tr.import_time_us]
if import_times:
first_time = min(import_times)
else:
@@ -378,7 +414,7 @@ async def graph_traceroute(request):
f'[{node.short_name}] {node.long_name}\n{node_id_to_hex(node_id)}\n{node.role}'
)
if node_id in node_seen_time:
ms = (node_seen_time[node_id] - first_time).total_seconds() * 1000
ms = (node_seen_time[node_id] - first_time) / 1000
node_name += f'\n {ms:.2f}ms'
style = 'dashed'
if node_id == dest:
@@ -396,7 +432,7 @@ async def graph_traceroute(request):
shape='box',
color=node_color.get(node_id, 'black'),
style=style,
href=f"/packet_list/{node_id}",
href=f"/node/{node_id}",
)
)
@@ -412,6 +448,7 @@ async def graph_traceroute(request):
async def run_server():
"""Start the aiohttp web server after migrations are complete."""
# Wait for database migrations to complete before starting web server
logger.info("Checking database schema status...")
database_url = CONFIG["database"]["connection_string"]
@@ -428,6 +465,7 @@ async def run_server():
logger.info("Database schema verified - starting web server")
app = web.Application()
app.router.add_static("/static/", pathlib.Path(__file__).parent / "static")
app.add_routes(api.routes) # Add API routes
app.add_routes(routes) # Add main web routes

View File

@@ -3,15 +3,28 @@
import datetime
import json
import logging
import math
import os
from aiohttp import web
from sqlalchemy import text
from sqlalchemy import func, select
from meshtastic.protobuf.portnums_pb2 import PortNum
from meshview import database, decode_payload, store
from meshview.__version__ import __version__, _git_revision_short, get_version_info
from meshview.config import CONFIG
from meshview.models import Node, NodePublicKey
from meshview.models import Packet as PacketModel
from meshview.models import PacketSeen as PacketSeenModel
from meshview.radio.coverage import (
DEFAULT_MAX_DBM,
DEFAULT_MIN_DBM,
DEFAULT_RELIABILITY,
DEFAULT_THRESHOLD_DBM,
ITM_AVAILABLE,
compute_coverage,
compute_perimeter,
)
logger = logging.getLogger(__name__)
@@ -19,11 +32,35 @@ logger = logging.getLogger(__name__)
Packet = None
SEQ_REGEX = None
LANG_DIR = None
_LANG_CACHE = {}
# Create dedicated route table for API endpoints
routes = web.RouteTableDef()
def _haversine_km(lat1, lon1, lat2, lon2):
r = 6371.0
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
dphi = math.radians(lat2 - lat1)
dlambda = math.radians(lon2 - lon1)
a = math.sin(dphi / 2.0) ** 2 + math.cos(phi1) * math.cos(phi2) * math.sin(dlambda / 2.0) ** 2
return 2 * r * math.asin(math.sqrt(a))
def _bearing_deg(lat1, lon1, lat2, lon2):
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
dlambda = math.radians(lon2 - lon1)
y = math.sin(dlambda) * math.cos(phi2)
x = math.cos(phi1) * math.sin(phi2) - math.sin(phi1) * math.cos(phi2) * math.cos(dlambda)
bearing = math.degrees(math.atan2(y, x))
return (bearing + 360.0) % 360.0
OBSERVED_MAX_DISTANCE_KM = 50.0
def init_api_module(packet_class, seq_regex, lang_dir):
"""Initialize API module with dependencies from main web module."""
global Packet, SEQ_REGEX, LANG_DIR
@@ -80,7 +117,9 @@ async def api_nodes(request):
"last_lat": getattr(n, "last_lat", None),
"last_long": getattr(n, "last_long", None),
"channel": n.channel,
"is_mqtt_gateway": getattr(n, "is_mqtt_gateway", None),
# "last_update": n.last_update.isoformat(),
"first_seen_us": n.first_seen_us,
"last_seen_us": n.last_seen_us,
}
)
@@ -126,15 +165,14 @@ async def api_packets(request):
"portnum": int(p.portnum) if p.portnum is not None else None,
"payload": (p.payload or "").strip(),
"import_time_us": p.import_time_us,
"import_time": p.import_time.isoformat() if p.import_time else None,
"channel": getattr(p.from_node, "channel", ""),
"channel": p.channel,
"long_name": getattr(p.from_node, "long_name", ""),
}
return web.json_response({"packets": [data]})
# --- Parse limit ---
try:
limit = min(max(int(limit_str), 1), 100)
limit = min(max(int(limit_str), 1), 1000)
except ValueError:
limit = 50
@@ -178,13 +216,17 @@ async def api_packets(request):
logger.warning(f"Invalid node_id: {node_id_str}")
# --- Fetch packets using explicit filters ---
contains_for_query = contains
if portnum == PortNum.TEXT_MESSAGE_APP and contains:
contains_for_query = None
packets = await store.get_packets(
from_node_id=from_node_id,
to_node_id=to_node_id,
node_id=node_id,
portnum=portnum,
after=since,
contains=contains,
contains=contains_for_query,
limit=limit,
)
@@ -208,8 +250,7 @@ async def api_packets(request):
packet_dict = {
"id": p.id,
"import_time_us": p.import_time_us,
"import_time": p.import_time.isoformat() if p.import_time else None,
"channel": getattr(p.from_node, "channel", ""),
"channel": p.channel,
"from_node_id": p.from_node_id,
"to_node_id": p.to_node_id,
"portnum": int(p.portnum),
@@ -228,20 +269,12 @@ async def api_packets(request):
packets_data.append(packet_dict)
# --- Latest import_time for incremental fetch ---
# --- Latest import_time_us for incremental fetch ---
latest_import_time = None
if packets_data:
for p in packets_data:
if p.get("import_time_us") and p["import_time_us"] > 0:
latest_import_time = max(latest_import_time or 0, p["import_time_us"])
elif p.get("import_time") and latest_import_time is None:
try:
dt = datetime.datetime.fromisoformat(
p["import_time"].replace("Z", "+00:00")
)
latest_import_time = int(dt.timestamp() * 1_000_000)
except Exception:
pass
response = {"packets": packets_data}
if latest_import_time is not None:
@@ -421,7 +454,7 @@ async def api_stats_count(request):
@routes.get("/api/edges")
async def api_edges(request):
since = datetime.datetime.now() - datetime.timedelta(hours=48)
since = datetime.datetime.now() - datetime.timedelta(hours=12)
filter_type = request.query.get("type")
# NEW → optional single-node filter
@@ -431,14 +464,10 @@ async def api_edges(request):
try:
node_filter = int(node_filter_str)
except ValueError:
return web.json_response(
{"error": "node_id must be integer"},
status=400
)
return web.json_response({"error": "node_id must be integer"}, status=400)
edges = {}
traceroute_count = 0
neighbor_packet_count = 0
edges_added_tr = 0
edges_added_neighbor = 0
@@ -463,8 +492,6 @@ async def api_edges(request):
# --- Neighbor edges ---
if filter_type in (None, "neighbor"):
packets = await store.get_packets(portnum=71)
neighbor_packet_count = len(packets)
for packet in packets:
try:
_, neighbor_info = decode_payload.decode(packet)
@@ -479,21 +506,16 @@ async def api_edges(request):
# Convert to list
edges_list = [
{"from": frm, "to": to, "type": edge_type}
for (frm, to), edge_type in edges.items()
{"from": frm, "to": to, "type": edge_type} for (frm, to), edge_type in edges.items()
]
# NEW → apply node_id filtering
if node_filter is not None:
edges_list = [
e for e in edges_list
if e["from"] == node_filter or e["to"] == node_filter
]
edges_list = [e for e in edges_list if e["from"] == node_filter or e["to"] == node_filter]
return web.json_response({"edges": edges_list})
@routes.get("/api/config")
async def api_config(request):
try:
@@ -607,9 +629,20 @@ async def api_lang(request):
if not os.path.exists(lang_file):
lang_file = os.path.join(LANG_DIR, "en.json")
# Load JSON translations
with open(lang_file, encoding="utf-8") as f:
translations = json.load(f)
# Cache by file + mtime to avoid re-reading on every request
try:
mtime = os.path.getmtime(lang_file)
except OSError:
mtime = None
cache_key = lang_file
cached = _LANG_CACHE.get(cache_key)
if cached and cached.get("mtime") == mtime:
translations = cached["translations"]
else:
with open(lang_file, encoding="utf-8") as f:
translations = json.load(f)
_LANG_CACHE[cache_key] = {"mtime": mtime, "translations": translations}
if section:
section = section.lower()
@@ -637,8 +670,14 @@ async def health_check(request):
# Check database connectivity
try:
async with database.async_session() as session:
await session.execute(text("SELECT 1"))
result = await session.execute(select(func.max(PacketModel.import_time_us)))
last_import_time_us = result.scalar()
health_status["database"] = "connected"
if last_import_time_us is not None:
now_us = int(datetime.datetime.now(datetime.UTC).timestamp() * 1_000_000)
health_status["seconds_since_last_message"] = round(
(now_us - last_import_time_us) / 1_000_000, 1
)
except Exception as e:
logger.error(f"Database health check failed: {e}")
health_status["database"] = "disconnected"
@@ -711,7 +750,6 @@ async def api_packets_seen(request):
"rx_snr": row.rx_snr,
"rx_rssi": row.rx_rssi,
"topic": row.topic,
"import_time": (row.import_time.isoformat() if row.import_time else None),
"import_time_us": row.import_time_us,
}
)
@@ -724,3 +762,394 @@ async def api_packets_seen(request):
{"error": "Internal server error"},
status=500,
)
@routes.get("/api/traceroute/{packet_id}")
async def api_traceroute(request):
packet_id = int(request.match_info['packet_id'])
traceroutes = list(await store.get_traceroute(packet_id))
packet = await store.get_packet(packet_id)
if not packet:
return web.json_response({"error": "Packet not found"}, status=404)
tr_groups = []
# --------------------------------------------
# Decode each traceroute entry
# --------------------------------------------
for idx, tr in enumerate(traceroutes):
route = decode_payload.decode_payload(PortNum.TRACEROUTE_APP, tr.route)
forward_list = list(route.route)
reverse_list = list(route.route_back)
tr_groups.append(
{
"index": idx,
"gateway_node_id": tr.gateway_node_id,
"done": tr.done,
"forward_hops": forward_list,
"reverse_hops": reverse_list,
}
)
# --------------------------------------------
# Compute UNIQUE paths + counts + winning path
# --------------------------------------------
from collections import Counter
forward_paths = []
reverse_paths = []
winning_forward_paths = []
winning_reverse_paths = []
for tr in tr_groups:
f = tuple(tr["forward_hops"])
r = tuple(tr["reverse_hops"])
if tr["forward_hops"]:
forward_paths.append(f)
if tr["reverse_hops"]:
reverse_paths.append(r)
if tr["done"]:
if tr["forward_hops"]:
winning_forward_paths.append(f)
if tr["reverse_hops"]:
winning_reverse_paths.append(r)
# Deduplicate
unique_forward_paths = sorted(set(forward_paths))
unique_reverse_paths = sorted(set(reverse_paths))
# Count occurrences
forward_counts = Counter(forward_paths)
# Convert for JSON output
unique_forward_paths_json = [
{"path": list(p), "count": forward_counts[p]} for p in unique_forward_paths
]
unique_reverse_paths_json = [list(p) for p in unique_reverse_paths]
from_node_id = packet.from_node_id
to_node_id = packet.to_node_id
winning_forward_with_endpoints = []
for path in set(winning_forward_paths):
full_path = list(path)
if from_node_id is not None and (not full_path or full_path[0] != from_node_id):
full_path = [from_node_id, *full_path]
if to_node_id is not None and (not full_path or full_path[-1] != to_node_id):
full_path = [*full_path, to_node_id]
winning_forward_with_endpoints.append(full_path)
winning_reverse_with_endpoints = []
for path in set(winning_reverse_paths):
full_path = list(path)
if to_node_id is not None and (not full_path or full_path[0] != to_node_id):
full_path = [to_node_id, *full_path]
if from_node_id is not None and (not full_path or full_path[-1] != from_node_id):
full_path = [*full_path, from_node_id]
winning_reverse_with_endpoints.append(full_path)
winning_paths_json = {
"forward": winning_forward_with_endpoints,
"reverse": winning_reverse_with_endpoints,
}
# --------------------------------------------
# Final API output
# --------------------------------------------
return web.json_response(
{
"packet": {
"id": packet.id,
"from": packet.from_node_id,
"to": packet.to_node_id,
"channel": packet.channel,
},
"traceroute_packets": tr_groups,
"unique_forward_paths": unique_forward_paths_json,
"unique_reverse_paths": unique_reverse_paths_json,
"winning_paths": winning_paths_json,
}
)
@routes.get("/api/stats/top")
async def api_stats_top(request):
"""
Returns nodes sorted by SEEN (high → low) with pagination.
"""
period_type = request.query.get("period_type", "day")
length = int(request.query.get("length", 1))
channel = request.query.get("channel")
limit = min(int(request.query.get("limit", 20)), 100)
offset = int(request.query.get("offset", 0))
multiplier = 3600 if period_type == "hour" else 86400
window_us = length * multiplier * 1_000_000
max_packet_import = select(func.max(PacketModel.import_time_us)).scalar_subquery()
max_seen_import = select(func.max(PacketSeenModel.import_time_us)).scalar_subquery()
sent_cte = (
select(PacketModel.from_node_id.label("node_id"), func.count().label("sent"))
.where(PacketModel.import_time_us >= max_packet_import - window_us)
.group_by(PacketModel.from_node_id)
.cte("sent")
)
seen_cte = (
select(PacketModel.from_node_id.label("node_id"), func.count().label("seen"))
.select_from(PacketSeenModel)
.join(PacketModel, PacketModel.id == PacketSeenModel.packet_id)
.where(PacketSeenModel.import_time_us >= max_seen_import - window_us)
.group_by(PacketModel.from_node_id)
.cte("seen")
)
query = (
select(
Node.node_id,
Node.long_name,
Node.short_name,
Node.channel,
func.coalesce(sent_cte.c.sent, 0).label("sent"),
func.coalesce(seen_cte.c.seen, 0).label("seen"),
)
.select_from(Node)
.outerjoin(sent_cte, sent_cte.c.node_id == Node.node_id)
.outerjoin(seen_cte, seen_cte.c.node_id == Node.node_id)
.order_by(func.coalesce(seen_cte.c.seen, 0).desc())
.limit(limit)
.offset(offset)
)
count_query = select(func.count()).select_from(Node)
if channel:
query = query.where(Node.channel == channel)
count_query = count_query.where(Node.channel == channel)
async with database.async_session() as session:
rows = (await session.execute(query)).all()
total = (await session.execute(count_query)).scalar() or 0
nodes = []
for r in rows:
avg = r.seen / max(r.sent, 1)
nodes.append(
{
"node_id": r.node_id,
"long_name": r.long_name,
"short_name": r.short_name,
"channel": r.channel,
"sent": r.sent,
"seen": r.seen,
"avg": round(avg, 2),
}
)
return web.json_response(
{
"total": total,
"limit": limit,
"offset": offset,
"nodes": nodes,
}
)
@routes.get("/api/node/{node_id}/qr")
async def api_node_qr(request):
"""
Generate a Meshtastic URL for importing the node as a contact.
Returns the URL that can be used to generate a QR code.
"""
try:
node_id_str = request.match_info["node_id"]
node_id = int(node_id_str, 0)
except (KeyError, ValueError):
return web.json_response({"error": "Invalid node_id"}, status=400)
node = await store.get_node(node_id)
if not node:
return web.json_response({"error": "Node not found"}, status=404)
try:
from meshtastic.protobuf.admin_pb2 import SharedContact
from meshtastic.protobuf.mesh_pb2 import User
user = User()
user.id = f"!{node_id:08x}"
if node.long_name:
user.long_name = node.long_name
if node.short_name:
user.short_name = node.short_name
if node.hw_model:
try:
from meshtastic.protobuf.mesh_pb2 import HardwareModel
hw_model_value = getattr(HardwareModel, node.hw_model.upper(), None)
if hw_model_value is not None:
user.hw_model = hw_model_value
except (AttributeError, TypeError):
pass
contact = SharedContact()
contact.node_num = node_id
contact.user.CopyFrom(user)
contact.manually_verified = False
contact_bytes = contact.SerializeToString()
import base64
contact_b64 = base64.b64encode(contact_bytes).decode("ascii")
contact_b64url = contact_b64.replace("+", "-").replace("/", "_").rstrip("=")
meshtastic_url = f"https://meshtastic.org/v/#{contact_b64url}"
return web.json_response(
{
"node_id": node_id,
"long_name": node.long_name,
"short_name": node.short_name,
"meshtastic_url": meshtastic_url,
}
)
except Exception as e:
import traceback
logger.error(f"Error generating QR URL for node {node_id}: {e}")
logger.error(traceback.format_exc())
return web.json_response({"error": f"Failed to generate URL: {str(e)}"}, status=500)
@routes.get("/api/node/{node_id}/impersonation-check")
async def api_node_impersonation_check(request):
"""
Check if a node has multiple different public keys, which could indicate impersonation.
"""
try:
node_id_str = request.match_info["node_id"]
node_id = int(node_id_str, 0)
except (KeyError, ValueError):
return web.json_response({"error": "Invalid node_id"}, status=400)
try:
async with database.async_session() as session:
result = await session.execute(
select(NodePublicKey.public_key).where(NodePublicKey.node_id == node_id).distinct()
)
public_keys = result.scalars().all()
unique_key_count = len(public_keys)
return web.json_response(
{
"node_id": node_id,
"unique_public_key_count": unique_key_count,
"potential_impersonation": unique_key_count > 1,
"public_keys": public_keys
if unique_key_count <= 3
else public_keys[:3] + ["..."],
"warning": "Multiple different public keys detected. This node may be getting impersonated."
if unique_key_count > 1
else None,
}
)
except Exception as e:
logger.error(f"Error checking impersonation for node {node_id}: {e}")
return web.json_response({"error": "Failed to check impersonation"}, status=500)
@routes.get("/api/coverage/{node_id}")
async def api_coverage(request):
try:
node_id = int(request.match_info["node_id"], 0)
except (KeyError, ValueError):
return web.json_response({"error": "Invalid node_id"}, status=400)
if not ITM_AVAILABLE:
return web.json_response(
{"error": "Coverage requires pyitm. Run: pip install -r requirements.txt"},
status=503,
)
def parse_float(name, default):
value = request.query.get(name)
if value is None:
return default
try:
return float(value)
except ValueError as exc:
raise web.HTTPBadRequest(
text=json.dumps({"error": f"{name} must be a number"}),
content_type="application/json",
) from exc
try:
freq_mhz = parse_float("freq_mhz", 907.0)
tx_dbm = parse_float("tx_dbm", 20.0)
tx_height_m = parse_float("tx_height_m", 5.0)
rx_height_m = parse_float("rx_height_m", 1.5)
radius_km = parse_float("radius_km", 40.0)
step_km = parse_float("step_km", 0.25)
reliability = parse_float("reliability", DEFAULT_RELIABILITY)
threshold_dbm = parse_float("threshold_dbm", DEFAULT_THRESHOLD_DBM)
except web.HTTPBadRequest as exc:
raise exc
node = await store.get_node(node_id)
if not node or not node.last_lat or not node.last_long:
return web.json_response({"error": "Node not found or missing location"}, status=404)
lat = node.last_lat * 1e-7
lon = node.last_long * 1e-7
mode = request.query.get("mode", "perimeter")
if mode == "perimeter":
perimeter = compute_perimeter(
lat=round(lat, 7),
lon=round(lon, 7),
freq_mhz=round(freq_mhz, 3),
tx_dbm=round(tx_dbm, 2),
tx_height_m=round(tx_height_m, 2),
rx_height_m=round(rx_height_m, 2),
radius_km=round(radius_km, 2),
step_km=round(step_km, 3),
reliability=round(reliability, 3),
threshold_dbm=round(threshold_dbm, 1),
)
return web.json_response(
{"mode": "perimeter", "threshold_dbm": threshold_dbm, "perimeter": perimeter}
)
points = compute_coverage(
lat=round(lat, 7),
lon=round(lon, 7),
freq_mhz=round(freq_mhz, 3),
tx_dbm=round(tx_dbm, 2),
tx_height_m=round(tx_height_m, 2),
rx_height_m=round(rx_height_m, 2),
radius_km=round(radius_km, 2),
step_km=round(step_km, 3),
reliability=round(reliability, 3),
)
min_dbm = DEFAULT_MIN_DBM
max_dbm = DEFAULT_MAX_DBM
if points:
vals = [p[2] for p in points]
min_dbm = min(min_dbm, min(vals))
max_dbm = max(max_dbm, max(vals))
return web.json_response(
{"mode": "heatmap", "min_dbm": min_dbm, "max_dbm": max_dbm, "points": points}
)

View File

@@ -48,7 +48,7 @@ dev = [
# Linting
target-version = "py313"
line-length = 100
extend-exclude = ["build", "dist", ".venv"]
extend-exclude = ["build", "dist", ".venv", "meshtastic/protobuf", "nanopb_pb2.py"]
[tool.ruff.lint]
select = ["E", "F", "I", "UP", "B"] # pick your rulesets
@@ -56,4 +56,4 @@ ignore = ["E501"] # example; let formatter handle line len
[tool.ruff.format]
quote-style = "preserve"
indent-style = "space"
indent-style = "space"

View File

@@ -24,6 +24,7 @@ MarkupSafe~=3.0.2
# Graphs / diagrams
pydot~=3.0.4
pyitm~=0.3
#############################
@@ -47,4 +48,4 @@ objgraph~=3.6.2
# Testing
pytest~=8.3.4
pytest-aiohttp~=1.0.5
pytest-asyncio~=0.24.0
pytest-asyncio~=0.24.0

View File

@@ -76,12 +76,22 @@ port = 1883
username = meshdev
password = large4cats
# Optional list of node IDs to ignore. Comma-separated.
skip_node_ids =
# Optional list of secondary AES keys (base64), comma-separated.
secondary_keys =
# -------------------------
# Database Configuration
# -------------------------
[database]
# SQLAlchemy connection string. This one uses SQLite with asyncio support.
# SQLAlchemy async connection string.
# Examples:
# sqlite+aiosqlite:///packets.db
# postgresql+asyncpg://user:pass@host:5432/meshview
connection_string = sqlite+aiosqlite:///packets.db

View File

@@ -0,0 +1,126 @@
#!/usr/bin/env python3
import argparse
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path
def run(cmd, cwd=None):
subprocess.run(cmd, cwd=cwd, check=True)
def main():
parser = argparse.ArgumentParser(description="Update Meshtastic protobufs")
parser.add_argument(
"--repo",
default="https://github.com/meshtastic/protobufs.git",
help="Meshtastic protobufs repo URL",
)
parser.add_argument(
"--ref",
default="master",
help="Git ref to fetch (branch, tag, or commit)",
)
parser.add_argument(
"--check",
action="store_true",
help="Only check if protobufs are up to date for the given ref",
)
args = parser.parse_args()
repo_root = Path(__file__).resolve().parents[1]
out_root = repo_root
with tempfile.TemporaryDirectory(prefix="meshtastic-protobufs-") as tmp:
tmp_path = Path(tmp)
print(f"Cloning {args.repo} ({args.ref}) into {tmp_path}...")
run(["git", "clone", "--depth", "1", "--branch", args.ref, args.repo, str(tmp_path)])
upstream_rev = (
subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=tmp_path).decode().strip()
)
rev_file = out_root / "meshtastic" / "protobuf" / "UPSTREAM_REV.txt"
current_rev = None
if rev_file.exists():
current_rev = rev_file.read_text(encoding="utf-8").strip()
if args.check:
if current_rev == upstream_rev:
print(f"Up to date: {current_rev}")
return 0
print(f"Out of date. Local: {current_rev or 'unknown'} / Upstream: {upstream_rev}")
return 1
proto_root = None
# Common locations in the meshtastic/protobufs repo
candidates = [
tmp_path / "meshtastic" / "protobuf",
tmp_path / "protobufs",
tmp_path / "protobuf",
tmp_path / "proto",
]
for candidate in candidates:
if candidate.exists() and list(candidate.glob("*.proto")):
proto_root = candidate
break
if proto_root is None:
# Fallback: search for any directory containing .proto files
for candidate in tmp_path.rglob("*.proto"):
proto_root = candidate.parent
break
if proto_root is None:
print("Proto root not found in cloned repo.", file=sys.stderr)
return 1
protos = sorted(proto_root.glob("*.proto"))
if not protos:
print(f"No .proto files found in {proto_root}", file=sys.stderr)
return 1
rel_protos = [str(p.relative_to(tmp_path)) for p in protos]
protoc = shutil.which("protoc")
if protoc:
cmd = [
protoc,
f"-I{tmp_path}",
f"--python_out={out_root}",
*rel_protos,
]
print("Running protoc...")
run(cmd, cwd=tmp_path)
else:
try:
import grpc_tools.protoc # noqa: F401
except Exception:
print(
"protoc not found. Install it with your package manager, "
"or install grpcio-tools and re-run.",
file=sys.stderr,
)
return 1
cmd = [
sys.executable,
"-m",
"grpc_tools.protoc",
f"-I{tmp_path}",
f"--python_out={out_root}",
*rel_protos,
]
print("Running grpc_tools.protoc...")
run(cmd, cwd=tmp_path)
rev_file.parent.mkdir(parents=True, exist_ok=True)
rev_file.write_text(upstream_rev + "\n", encoding="utf-8")
print("Protobufs updated in meshtastic/protobuf/.")
print("Review changes and commit them if desired.")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -7,9 +7,11 @@ import shutil
from pathlib import Path
from sqlalchemy import delete
from sqlalchemy.engine.url import make_url
from meshview import migrations, models, mqtt_database, mqtt_reader, mqtt_store
from meshview.config import CONFIG
from meshview.deps import check_optional_deps
# -------------------------
# Basic logging configuration
@@ -65,18 +67,16 @@ async def backup_database(database_url: str, backup_dir: str = ".") -> None:
backup_dir: Directory to store backups (default: current directory)
"""
try:
# Extract database file path from connection string
# Format: sqlite+aiosqlite:///path/to/db.db
if not database_url.startswith("sqlite"):
url = make_url(database_url)
if not url.drivername.startswith("sqlite"):
cleanup_logger.warning("Backup only supported for SQLite databases")
return
db_path = database_url.split("///", 1)[1] if "///" in database_url else None
if not db_path:
if not url.database or url.database == ":memory:":
cleanup_logger.error("Could not extract database path from connection string")
return
db_file = Path(db_path)
db_file = Path(url.database)
if not db_file.exists():
cleanup_logger.error(f"Database file not found: {db_file}")
return
@@ -153,11 +153,11 @@ async def daily_cleanup_at(
cleanup_logger.info("Waiting 60 seconds for backup to complete...")
await asyncio.sleep(60)
# Local-time cutoff as string for SQLite DATETIME comparison
cutoff = (datetime.datetime.now() - datetime.timedelta(days=days_to_keep)).strftime(
"%Y-%m-%d %H:%M:%S"
)
cleanup_logger.info(f"Running cleanup for records older than {cutoff}...")
cutoff_dt = (
datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_to_keep)
).replace(tzinfo=None)
cutoff_us = int(cutoff_dt.timestamp() * 1_000_000)
cleanup_logger.info(f"Running cleanup for records older than {cutoff_dt.isoformat()}...")
try:
async with db_lock: # Pause ingestion
@@ -168,7 +168,7 @@ async def daily_cleanup_at(
# Packet
# -------------------------
result = await session.execute(
delete(models.Packet).where(models.Packet.import_time < cutoff)
delete(models.Packet).where(models.Packet.import_time_us < cutoff_us)
)
cleanup_logger.info(f"Deleted {result.rowcount} rows from Packet")
@@ -176,7 +176,9 @@ async def daily_cleanup_at(
# PacketSeen
# -------------------------
result = await session.execute(
delete(models.PacketSeen).where(models.PacketSeen.import_time < cutoff)
delete(models.PacketSeen).where(
models.PacketSeen.import_time_us < cutoff_us
)
)
cleanup_logger.info(f"Deleted {result.rowcount} rows from PacketSeen")
@@ -184,7 +186,9 @@ async def daily_cleanup_at(
# Traceroute
# -------------------------
result = await session.execute(
delete(models.Traceroute).where(models.Traceroute.import_time < cutoff)
delete(models.Traceroute).where(
models.Traceroute.import_time_us < cutoff_us
)
)
cleanup_logger.info(f"Deleted {result.rowcount} rows from Traceroute")
@@ -192,17 +196,19 @@ async def daily_cleanup_at(
# Node
# -------------------------
result = await session.execute(
delete(models.Node).where(models.Node.last_update < cutoff)
delete(models.Node).where(models.Node.last_seen_us < cutoff_us)
)
cleanup_logger.info(f"Deleted {result.rowcount} rows from Node")
await session.commit()
if vacuum_db:
if vacuum_db and mqtt_database.engine.dialect.name == "sqlite":
cleanup_logger.info("Running VACUUM...")
async with mqtt_database.engine.begin() as conn:
await conn.exec_driver_sql("VACUUM;")
cleanup_logger.info("VACUUM completed.")
elif vacuum_db:
cleanup_logger.info("VACUUM skipped (not supported for this database).")
cleanup_logger.info("Cleanup completed successfully.")
cleanup_logger.info("Ingestion resumed after cleanup.")
@@ -232,6 +238,7 @@ async def load_database_from_mqtt(
# Main function
# -------------------------
async def main():
check_optional_deps()
logger = logging.getLogger(__name__)
# Initialize database
@@ -260,6 +267,9 @@ async def main():
await mqtt_database.create_tables()
logger.info("Database tables created")
# Load MQTT gateway cache after DB init/migrations
await mqtt_store.load_gateway_cache()
finally:
# Clear migration in progress flag
logger.info("Clearing migration status...")