mirror of
https://github.com/pablorevilla-meshtastic/meshview.git
synced 2026-03-04 23:27:46 +01:00
Compare commits
64 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f108197e5f | ||
|
|
20c2a3dc62 | ||
|
|
4a7fa1df08 | ||
|
|
685dbc9505 | ||
|
|
9aacceda28 | ||
|
|
a7051e7d26 | ||
|
|
7926e81562 | ||
|
|
2002e093af | ||
|
|
fc44f49f2d | ||
|
|
89fbc6aeca | ||
|
|
20e3f9c104 | ||
|
|
17fa92d4cf | ||
|
|
a48a3a4141 | ||
|
|
7d5b638eac | ||
|
|
5f5fe0da90 | ||
|
|
dd98814b2c | ||
|
|
4dd999178c | ||
|
|
01dce2a5e0 | ||
|
|
9622092c17 | ||
|
|
29da1487d4 | ||
|
|
357fb530e2 | ||
|
|
b43683a259 | ||
|
|
59379649e2 | ||
|
|
a62bc350c0 | ||
|
|
82ff4bb0df | ||
|
|
c454f2ef3a | ||
|
|
b93f640233 | ||
|
|
018e16e9fa | ||
|
|
41397072af | ||
|
|
43be448100 | ||
|
|
8c7f181002 | ||
|
|
5195868719 | ||
|
|
a473e32c59 | ||
|
|
be51dc9c55 | ||
|
|
bea6c8cd8e | ||
|
|
351c35ef42 | ||
|
|
7f722b6f12 | ||
|
|
52f1a1e788 | ||
|
|
f44a78730a | ||
|
|
a9a5e046ea | ||
|
|
37386f9e28 | ||
|
|
b66bfb1ee9 | ||
|
|
caf9cd1596 | ||
|
|
a4ebd2b23c | ||
|
|
5676ade6b7 | ||
|
|
319f8eac06 | ||
|
|
d85132133a | ||
|
|
b6d8af409c | ||
|
|
896a0980d5 | ||
|
|
7d395e5e27 | ||
|
|
c3cc01d7e7 | ||
|
|
ecbadc6087 | ||
|
|
ff30623bdf | ||
|
|
a43433ccb4 | ||
|
|
4d9db2a52c | ||
|
|
e30b59851f | ||
|
|
36dd91be63 | ||
|
|
c9639d851b | ||
|
|
4516c84128 | ||
|
|
fa98f56318 | ||
|
|
f85e783e8c | ||
|
|
a882bc22dd | ||
|
|
e12e3a2a41 | ||
|
|
da31794d8d |
6
.github/workflows/container.yml
vendored
6
.github/workflows/container.yml
vendored
@@ -2,6 +2,7 @@ name: Build container
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
@@ -23,7 +24,8 @@ jobs:
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=match,pattern=v\d.\d.\d,value=latest
|
||||
# publish :latest from the default branch
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
@@ -49,4 +51,4 @@ jobs:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# optional cache (speeds up rebuilds)
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -2,7 +2,6 @@ env/*
|
||||
__pycache__/*
|
||||
meshview/__pycache__/*
|
||||
alembic/__pycache__/*
|
||||
meshtastic/protobuf/*
|
||||
|
||||
# Database files
|
||||
packets.db
|
||||
@@ -45,3 +44,4 @@ __pycache__/
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
packets.db-journal
|
||||
|
||||
@@ -35,7 +35,7 @@ RUN uv pip install --no-cache-dir --upgrade pip \
|
||||
COPY --chown=${APP_USER}:${APP_USER} . .
|
||||
|
||||
# Patch config
|
||||
RUN patch sample.config.ini < container/config.patch
|
||||
COPY --chown=${APP_USER}:${APP_USER} container/config.ini /app/sample.config.ini
|
||||
|
||||
# Clean
|
||||
RUN rm -rf /app/.git* && \
|
||||
@@ -77,4 +77,3 @@ CMD ["--pid_dir", "/tmp", "--py_exec", "/opt/venv/bin/python", "--config", "/etc
|
||||
|
||||
EXPOSE 8081
|
||||
VOLUME [ "/etc/meshview", "/var/lib/meshview", "/var/log/meshview" ]
|
||||
|
||||
|
||||
@@ -128,7 +128,11 @@ username =
|
||||
password =
|
||||
|
||||
[database]
|
||||
connection_string = sqlite+aiosqlite:///var/lib/meshview/packets.db
|
||||
# SQLAlchemy async connection string.
|
||||
# Examples:
|
||||
# sqlite+aiosqlite:///var/lib/meshview/packets.db
|
||||
# postgresql+asyncpg://user:pass@host:5432/meshview
|
||||
connection_string = sqlite+aiosqlite:////var/lib/meshview/packets.db
|
||||
```
|
||||
|
||||
### Database Backups
|
||||
|
||||
172
README.md
172
README.md
@@ -4,6 +4,25 @@
|
||||
|
||||
The project serves as a real-time monitoring and diagnostic tool for the Meshtastic mesh network. It provides detailed insights into network activity, including message traffic, node positions, and telemetry data.
|
||||
|
||||
### Version 3.0.5 — February 2026
|
||||
- **IMPORTANT:** the predicted coverage feature requires the extra `pyitm` dependency. If it is not installed, the coverage API will return 503.
|
||||
- Ubuntu install (inside the venv): `./env/bin/pip install pyitm`
|
||||
- Coverage: predicted coverage overlay (Longley‑Rice area mode) with perimeter rendering and documentation.
|
||||
- Gateways: persistent gateway tracking (`is_mqtt_gateway`) and UI indicators in nodes, map popups, and stats.
|
||||
- Map UX: deterministic jitter for overlapping nodes; edges follow jittered positions.
|
||||
- Tooling: Meshtastic protobuf updater script with `--check` and `UPSTREAM_REV.txt` tracking.
|
||||
|
||||
|
||||
### Version 3.0.4 — Late January 2026
|
||||
- Database: multi‑DB support, PostgreSQL scripts, WAL config for SQLite, cleanup query timing fixes, removal of import time columns, and various time‑handling fixes.
|
||||
- UI/UX: extensive updates to node.html, nodelist.html, top.html, and packet.html (paging, stats, distance, status/favorites), plus net view changes to 12‑hour window.
|
||||
- API/logic: weekly mesh query fix, node list performance improvement, backwards‑compatibility and other bug fixes.
|
||||
- MQTT reader: configurable skip‑node list and secondary decryption keys.
|
||||
- Docs/ops: multiple documentation updates, updated site list, container workflow fixes/tests, README updates.
|
||||
|
||||
### Version 3.0.2 — January 2026
|
||||
- Changes to the Database to will make it so that there is a need for space when updating to the latest. SQlite requires to rebuild the database when droping a column. ( we are droping some of the old columns) so make sure you have 1.2x the size of the db of space in your environment. Depending on how big your db is it would take a long time.
|
||||
|
||||
### Version 3.0.1 — December 2025
|
||||
|
||||
#### 🌐 Multi-Language Support (i18n)
|
||||
@@ -80,25 +99,32 @@ See [README-Docker.md](README-Docker.md) for container deployment and [docs/](do
|
||||
|
||||
Samples of currently running instances:
|
||||
|
||||
- https://meshview.bayme.sh (SF Bay Area)
|
||||
- https://www.svme.sh (Sacramento Valley)
|
||||
- https://meshview.nyme.sh (New York)
|
||||
- https://meshview.socalmesh.org (LA Area)
|
||||
- https://map.wpamesh.net (Western Pennsylvania)
|
||||
- https://meshview.chicagolandmesh.org (Chicago)
|
||||
- https://meshview.mt.gt (Canadaverse)
|
||||
- https://canadaverse.org (Canadaverse)
|
||||
- https://meshview.bayme.sh (SF Bay Area - USA)
|
||||
- https://www.svme.sh (Sacramento Valley - USA)
|
||||
- https://meshview.nyme.sh (New York - USA)
|
||||
- https://meshview.socalmesh.org (Los Angenles - USA)
|
||||
- https://map.wpamesh.net (Western Pennsylvania - USA)
|
||||
- https://meshview.chicagolandmesh.org (Chicago - USA)
|
||||
- https://meshview.freq51.net/ (Salt Lake City - USA)
|
||||
- https://meshview.mt.gt (Canada)
|
||||
- https://canadaverse.org (Canada)
|
||||
- https://meshview.meshtastic.es (Spain)
|
||||
- https://view.mtnme.sh (North Georgia / East Tennessee)
|
||||
- https://view.mtnme.sh (North Georgia / East Tennessee - USA)
|
||||
- https://meshview.lsinfra.de (Hessen - Germany)
|
||||
- https://meshview.pvmesh.org (Pioneer Valley, Massachusetts)
|
||||
- https://meshview.louisianamesh.org (Louisiana)
|
||||
- https://www.swlamesh.com/map (Southwest Louisiana)
|
||||
- https://meshview.meshcolombia.co/ (Colombia)
|
||||
- https://meshview-salzburg.jmt.gr/ (Salzburg / Austria)
|
||||
- https://meshview.pvmesh.org (Pioneer Valley, Massachusetts - USA)
|
||||
- https://meshview.louisianamesh.org (Louisiana - USA)
|
||||
- https://www.swlamesh.com (Southwest Louisiana- USA)
|
||||
- https://meshview.meshcolombia.co (Colombia)
|
||||
- https://meshview-salzburg.jmt.gr (Salzburg / Austria)
|
||||
- https://map.cromesh.eu (Coatia)
|
||||
- https://view.meshdresden.eu (Dresden / Germany)
|
||||
- https://meshview.meshoregon.com (Oregon - USA)
|
||||
- https://meshview.gamesh.net (Georgia - USA)
|
||||
|
||||
---
|
||||
|
||||
|
||||
|
||||
### Updating from 2.x to 3.x
|
||||
We are adding the use of Alembic. If using GitHub
|
||||
Update your codebase by running the pull command
|
||||
@@ -272,7 +298,10 @@ password = large4cats
|
||||
# Database Configuration
|
||||
# -------------------------
|
||||
[database]
|
||||
# SQLAlchemy connection string. This one uses SQLite with asyncio support.
|
||||
# SQLAlchemy async connection string.
|
||||
# Examples:
|
||||
# sqlite+aiosqlite:///packets.db
|
||||
# postgresql+asyncpg://user:pass@host:5432/meshview
|
||||
connection_string = sqlite+aiosqlite:///packets.db
|
||||
|
||||
|
||||
@@ -306,6 +335,20 @@ db_cleanup_logfile = dbcleanup.log
|
||||
|
||||
---
|
||||
|
||||
## NOTE (PostgreSQL setup)**
|
||||
If you want to use PostgreSQL instead of SQLite:
|
||||
|
||||
Install PostgreSQL for your OS.
|
||||
Create a user and database:
|
||||
```
|
||||
`CREATE USER meshview WITH PASSWORD 'change_me';`
|
||||
`CREATE DATABASE meshview OWNER meshview;`
|
||||
```
|
||||
Update `config.ini` example:
|
||||
```
|
||||
`connection_string = postgresql+asyncpg://meshview:change_me@localhost:5432/meshview`
|
||||
```
|
||||
|
||||
## Running Meshview
|
||||
|
||||
Start the database manager:
|
||||
@@ -475,16 +518,15 @@ db_cleanup_logfile = dbcleanup.log
|
||||
```
|
||||
Once changes are done you need to restart the script for changes to load.
|
||||
|
||||
### Alternatively we can do it via your OS
|
||||
### Alternatively we can do it via your OS (This example is Ubuntu like OS)
|
||||
- Create and save bash script below. (Modify /path/to/file/ to the correct path)
|
||||
- Name it cleanup.sh
|
||||
- Make it executable.
|
||||
```bash
|
||||
#!/bin/bash
|
||||
#!/bin/bash
|
||||
|
||||
DB_FILE="/path/to/file/packets.db"
|
||||
|
||||
|
||||
# Stop DB service
|
||||
sudo systemctl stop meshview-db.service
|
||||
sudo systemctl stop meshview-web.service
|
||||
@@ -493,10 +535,22 @@ sleep 5
|
||||
echo "Run cleanup..."
|
||||
# Run cleanup queries
|
||||
sqlite3 "$DB_FILE" <<EOF
|
||||
DELETE FROM packet WHERE import_time < datetime('now', '-14 day');
|
||||
DELETE FROM packet_seen WHERE import_time < datetime('now', '-14 day');
|
||||
DELETE FROM traceroute WHERE import_time < datetime('now', '-14 day');
|
||||
DELETE FROM node WHERE last_update < datetime('now', '-14 day') OR last_update IS NULL OR last_update = '';
|
||||
DELETE FROM packet
|
||||
WHERE import_time_us IS NOT NULL
|
||||
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
|
||||
SELECT 'packet deleted: ' || changes();
|
||||
DELETE FROM packet_seen
|
||||
WHERE import_time_us IS NOT NULL
|
||||
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
|
||||
SELECT 'packet_seen deleted: ' || changes();
|
||||
DELETE FROM traceroute
|
||||
WHERE import_time_us IS NOT NULL
|
||||
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
|
||||
SELECT 'traceroute deleted: ' || changes();
|
||||
DELETE FROM node
|
||||
WHERE last_seen_us IS NULL
|
||||
OR last_seen_us < (strftime('%s','now','-14 days') * 1000000);
|
||||
SELECT 'node deleted: ' || changes();
|
||||
VACUUM;
|
||||
EOF
|
||||
|
||||
@@ -506,6 +560,80 @@ sudo systemctl start meshview-web.service
|
||||
|
||||
echo "Database cleanup completed on $(date)"
|
||||
|
||||
```
|
||||
- If you are using PostgreSQL, use this version instead (adjust credentials/DB name):
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
DB="postgresql://meshview@localhost:5432/meshview"
|
||||
RETENTION_DAYS=14
|
||||
BATCH_SIZE=100
|
||||
|
||||
PSQL="/usr/bin/psql"
|
||||
|
||||
echo "[$(date)] Starting batched cleanup..."
|
||||
|
||||
while true; do
|
||||
DELETED=$(
|
||||
$PSQL "$DB" -At -v ON_ERROR_STOP=1 <<EOF
|
||||
WITH cutoff AS (
|
||||
SELECT (EXTRACT(EPOCH FROM (NOW() - INTERVAL '${RETENTION_DAYS} days')) * 1000000)::bigint AS ts
|
||||
),
|
||||
old_packets AS (
|
||||
SELECT id
|
||||
FROM packet, cutoff
|
||||
WHERE import_time_us IS NOT NULL
|
||||
AND import_time_us < cutoff.ts
|
||||
ORDER BY id
|
||||
LIMIT ${BATCH_SIZE}
|
||||
),
|
||||
ps_del AS (
|
||||
DELETE FROM packet_seen
|
||||
WHERE packet_id IN (SELECT id FROM old_packets)
|
||||
RETURNING 1
|
||||
),
|
||||
tr_del AS (
|
||||
DELETE FROM traceroute
|
||||
WHERE packet_id IN (SELECT id FROM old_packets)
|
||||
RETURNING 1
|
||||
),
|
||||
p_del AS (
|
||||
DELETE FROM packet
|
||||
WHERE id IN (SELECT id FROM old_packets)
|
||||
RETURNING 1
|
||||
)
|
||||
SELECT COUNT(*) FROM p_del;
|
||||
EOF
|
||||
)
|
||||
|
||||
if [[ "$DELETED" -eq 0 ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
echo "[$(date)] Packet cleanup complete"
|
||||
|
||||
echo "[$(date)] Cleaning old nodes..."
|
||||
|
||||
$PSQL "$DB" -v ON_ERROR_STOP=1 <<EOF
|
||||
DELETE FROM node
|
||||
WHERE last_seen_us IS NOT NULL
|
||||
AND last_seen_us < (
|
||||
EXTRACT(EPOCH FROM (NOW() - INTERVAL '${RETENTION_DAYS} days')) * 1000000
|
||||
);
|
||||
EOF
|
||||
|
||||
echo "[$(date)] Node cleanup complete"
|
||||
|
||||
$PSQL "$DB" -c "VACUUM (ANALYZE) packet_seen;"
|
||||
$PSQL "$DB" -c "VACUUM (ANALYZE) traceroute;"
|
||||
$PSQL "$DB" -c "VACUUM (ANALYZE) packet;"
|
||||
$PSQL "$DB" -c "VACUUM (ANALYZE) node;"
|
||||
|
||||
echo "[$(date)] Cleanup finished"
|
||||
```
|
||||
- Schedule running the script on a regular basis.
|
||||
- In this example it runs every night at 2:00am.
|
||||
|
||||
27
alembic/versions/23dad03d2e42_add_is_mqtt_gateway_to_node.py
Normal file
27
alembic/versions/23dad03d2e42_add_is_mqtt_gateway_to_node.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Add is_mqtt_gateway to node
|
||||
|
||||
Revision ID: 23dad03d2e42
|
||||
Revises: a0c9c13e118f
|
||||
Create Date: 2026-02-13 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "23dad03d2e42"
|
||||
down_revision: str | None = "a0c9c13e118f"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("node", sa.Column("is_mqtt_gateway", sa.Boolean(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("node", "is_mqtt_gateway")
|
||||
65
alembic/versions/9f3b1a8d2c4f_drop_import_time_columns.py
Normal file
65
alembic/versions/9f3b1a8d2c4f_drop_import_time_columns.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Drop import_time columns.
|
||||
|
||||
Revision ID: 9f3b1a8d2c4f
|
||||
Revises: 2b5a61bb2b75
|
||||
Create Date: 2026-01-09 09:55:00.000000
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9f3b1a8d2c4f"
|
||||
down_revision: str | None = "2b5a61bb2b75"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = sa.inspect(conn)
|
||||
|
||||
packet_indexes = {idx["name"] for idx in inspector.get_indexes("packet")}
|
||||
packet_columns = {col["name"] for col in inspector.get_columns("packet")}
|
||||
|
||||
with op.batch_alter_table("packet", schema=None) as batch_op:
|
||||
if "idx_packet_import_time" in packet_indexes:
|
||||
batch_op.drop_index("idx_packet_import_time")
|
||||
if "idx_packet_from_node_time" in packet_indexes:
|
||||
batch_op.drop_index("idx_packet_from_node_time")
|
||||
if "import_time" in packet_columns:
|
||||
batch_op.drop_column("import_time")
|
||||
|
||||
packet_seen_columns = {col["name"] for col in inspector.get_columns("packet_seen")}
|
||||
with op.batch_alter_table("packet_seen", schema=None) as batch_op:
|
||||
if "import_time" in packet_seen_columns:
|
||||
batch_op.drop_column("import_time")
|
||||
|
||||
traceroute_indexes = {idx["name"] for idx in inspector.get_indexes("traceroute")}
|
||||
traceroute_columns = {col["name"] for col in inspector.get_columns("traceroute")}
|
||||
with op.batch_alter_table("traceroute", schema=None) as batch_op:
|
||||
if "idx_traceroute_import_time" in traceroute_indexes:
|
||||
batch_op.drop_index("idx_traceroute_import_time")
|
||||
if "import_time" in traceroute_columns:
|
||||
batch_op.drop_column("import_time")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with op.batch_alter_table("traceroute", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
|
||||
batch_op.create_index("idx_traceroute_import_time", ["import_time"], unique=False)
|
||||
|
||||
with op.batch_alter_table("packet_seen", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
|
||||
|
||||
with op.batch_alter_table("packet", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
|
||||
batch_op.create_index("idx_packet_import_time", [sa.text("import_time DESC")], unique=False)
|
||||
batch_op.create_index(
|
||||
"idx_packet_from_node_time",
|
||||
["from_node_id", sa.text("import_time DESC")],
|
||||
unique=False,
|
||||
)
|
||||
43
alembic/versions/a0c9c13e118f_add_node_public_key.py
Normal file
43
alembic/versions/a0c9c13e118f_add_node_public_key.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Add node_public_key table
|
||||
|
||||
Revision ID: a0c9c13e118f
|
||||
Revises: d4d7b0c2e1a4
|
||||
Create Date: 2026-02-06 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "a0c9c13e118f"
|
||||
down_revision: str | None = "d4d7b0c2e1a4"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"node_public_key",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("node_id", sa.BigInteger(), nullable=False),
|
||||
sa.Column("public_key", sa.String(), nullable=False),
|
||||
sa.Column("first_seen_us", sa.BigInteger(), nullable=True),
|
||||
sa.Column("last_seen_us", sa.BigInteger(), nullable=True),
|
||||
)
|
||||
op.create_index("idx_node_public_key_node_id", "node_public_key", ["node_id"], unique=False)
|
||||
op.create_index(
|
||||
"idx_node_public_key_public_key",
|
||||
"node_public_key",
|
||||
["public_key"],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("idx_node_public_key_public_key", table_name="node_public_key")
|
||||
op.drop_index("idx_node_public_key_node_id", table_name="node_public_key")
|
||||
op.drop_table("node_public_key")
|
||||
94
alembic/versions/b7c3c2e3a1f0_add_last_update_us_to_node.py
Normal file
94
alembic/versions/b7c3c2e3a1f0_add_last_update_us_to_node.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Add last_update_us to node and migrate data.
|
||||
|
||||
Revision ID: b7c3c2e3a1f0
|
||||
Revises: 9f3b1a8d2c4f
|
||||
Create Date: 2026-01-12 10:12:00.000000
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "b7c3c2e3a1f0"
|
||||
down_revision: str | None = "9f3b1a8d2c4f"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def _parse_datetime(value):
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, datetime):
|
||||
dt = value
|
||||
elif isinstance(value, str):
|
||||
text = value.replace("Z", "+00:00")
|
||||
try:
|
||||
dt = datetime.fromisoformat(text)
|
||||
except ValueError:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
if dt.tzinfo is None:
|
||||
return dt.replace(tzinfo=UTC)
|
||||
return dt.astimezone(UTC)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
op.add_column("node", sa.Column("last_update_us", sa.BigInteger(), nullable=True))
|
||||
op.create_index("idx_node_last_update_us", "node", ["last_update_us"], unique=False)
|
||||
|
||||
node = sa.table(
|
||||
"node",
|
||||
sa.column("id", sa.String()),
|
||||
sa.column("last_update", sa.DateTime()),
|
||||
sa.column("last_update_us", sa.BigInteger()),
|
||||
)
|
||||
|
||||
rows = conn.execute(sa.select(node.c.id, node.c.last_update)).all()
|
||||
for node_id, last_update in rows:
|
||||
dt = _parse_datetime(last_update)
|
||||
if dt is None:
|
||||
continue
|
||||
last_update_us = int(dt.timestamp() * 1_000_000)
|
||||
conn.execute(
|
||||
sa.update(node).where(node.c.id == node_id).values(last_update_us=last_update_us)
|
||||
)
|
||||
|
||||
if conn.dialect.name == "sqlite":
|
||||
with op.batch_alter_table("node", schema=None) as batch_op:
|
||||
batch_op.drop_column("last_update")
|
||||
else:
|
||||
op.drop_column("node", "last_update")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
op.add_column("node", sa.Column("last_update", sa.DateTime(), nullable=True))
|
||||
|
||||
node = sa.table(
|
||||
"node",
|
||||
sa.column("id", sa.String()),
|
||||
sa.column("last_update", sa.DateTime()),
|
||||
sa.column("last_update_us", sa.BigInteger()),
|
||||
)
|
||||
|
||||
rows = conn.execute(sa.select(node.c.id, node.c.last_update_us)).all()
|
||||
for node_id, last_update_us in rows:
|
||||
if last_update_us is None:
|
||||
continue
|
||||
dt = datetime.fromtimestamp(last_update_us / 1_000_000, tz=UTC).replace(tzinfo=None)
|
||||
conn.execute(sa.update(node).where(node.c.id == node_id).values(last_update=dt))
|
||||
|
||||
if conn.dialect.name == "sqlite":
|
||||
with op.batch_alter_table("node", schema=None) as batch_op:
|
||||
batch_op.drop_index("idx_node_last_update_us")
|
||||
batch_op.drop_column("last_update_us")
|
||||
else:
|
||||
op.drop_index("idx_node_last_update_us", table_name="node")
|
||||
op.drop_column("node", "last_update_us")
|
||||
@@ -0,0 +1,34 @@
|
||||
"""Drop last_update_us from node.
|
||||
|
||||
Revision ID: d4d7b0c2e1a4
|
||||
Revises: b7c3c2e3a1f0
|
||||
Create Date: 2026-01-12 10:20:00.000000
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "d4d7b0c2e1a4"
|
||||
down_revision: str | None = "b7c3c2e3a1f0"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
if conn.dialect.name == "sqlite":
|
||||
with op.batch_alter_table("node", schema=None) as batch_op:
|
||||
batch_op.drop_index("idx_node_last_update_us")
|
||||
batch_op.drop_column("last_update_us")
|
||||
else:
|
||||
op.drop_index("idx_node_last_update_us", table_name="node")
|
||||
op.drop_column("node", "last_update_us")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.add_column("node", sa.Column("last_update_us", sa.BigInteger(), nullable=True))
|
||||
op.create_index("idx_node_last_update_us", "node", ["last_update_us"], unique=False)
|
||||
90
container/config.ini
Normal file
90
container/config.ini
Normal file
@@ -0,0 +1,90 @@
|
||||
# -------------------------
|
||||
# Server Configuration
|
||||
# -------------------------
|
||||
[server]
|
||||
# The address to bind the server to. Use * to listen on all interfaces.
|
||||
bind = 0.0.0.0
|
||||
|
||||
# Port to run the web server on.
|
||||
port = 8081
|
||||
|
||||
# Path to TLS certificate (leave blank to disable HTTPS).
|
||||
tls_cert =
|
||||
|
||||
# Path for the ACME challenge if using Let's Encrypt.
|
||||
acme_challenge =
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Site Appearance & Behavior
|
||||
# -------------------------
|
||||
[site]
|
||||
domain =
|
||||
language = en
|
||||
title = Bay Area Mesh
|
||||
message = Real time data from around the bay area and beyond.
|
||||
starting = /chat
|
||||
|
||||
nodes = True
|
||||
conversations = True
|
||||
everything = True
|
||||
graphs = True
|
||||
stats = True
|
||||
net = True
|
||||
map = True
|
||||
top = True
|
||||
|
||||
map_top_left_lat = 39
|
||||
map_top_left_lon = -123
|
||||
map_bottom_right_lat = 36
|
||||
map_bottom_right_lon = -121
|
||||
|
||||
map_interval = 3
|
||||
firehose_interal = 3
|
||||
|
||||
weekly_net_message = Weekly Mesh check-in. We will keep it open on every Wednesday from 5:00pm for checkins. The message format should be (LONG NAME) - (CITY YOU ARE IN) #BayMeshNet.
|
||||
net_tag = #BayMeshNet
|
||||
|
||||
|
||||
# -------------------------
|
||||
# MQTT Broker Configuration
|
||||
# -------------------------
|
||||
[mqtt]
|
||||
server = mqtt.meshtastic.org
|
||||
topics = ["msh/US/bayarea/#", "msh/US/CA/mrymesh/#", "msh/US/CA/sacvalley"]
|
||||
port = 1883
|
||||
username = meshdev
|
||||
password = large4cats
|
||||
skip_node_ids =
|
||||
secondary_keys =
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Database Configuration
|
||||
# -------------------------
|
||||
[database]
|
||||
connection_string = sqlite+aiosqlite:////var/lib/meshview/packets.db
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Database Cleanup Configuration
|
||||
# -------------------------
|
||||
[cleanup]
|
||||
enabled = False
|
||||
days_to_keep = 14
|
||||
hour = 2
|
||||
minute = 00
|
||||
vacuum = False
|
||||
|
||||
backup_enabled = False
|
||||
backup_dir = ./backups
|
||||
backup_hour = 2
|
||||
backup_minute = 00
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Logging Configuration
|
||||
# -------------------------
|
||||
[logging]
|
||||
access_log = False
|
||||
db_cleanup_logfile = /var/log/meshview/dbcleanup.log
|
||||
@@ -1,36 +0,0 @@
|
||||
# MeshView Docker Container
|
||||
|
||||
> **Note:** This directory contains legacy Docker build files.
|
||||
>
|
||||
> **For current Docker usage instructions, please see [README-Docker.md](../README-Docker.md) in the project root.**
|
||||
|
||||
## Current Approach
|
||||
|
||||
Pre-built container images are automatically built and published to GitHub Container Registry:
|
||||
|
||||
```bash
|
||||
docker pull ghcr.io/pablorevilla-meshtastic/meshview:latest
|
||||
```
|
||||
|
||||
See **[README-Docker.md](../README-Docker.md)** for:
|
||||
- Quick start instructions
|
||||
- Volume mount configuration
|
||||
- Docker Compose examples
|
||||
- Backup configuration
|
||||
- Troubleshooting
|
||||
|
||||
## Legacy Build (Not Recommended)
|
||||
|
||||
If you need to build your own image for development:
|
||||
|
||||
```bash
|
||||
# From project root
|
||||
docker build -f Containerfile -t meshview:local .
|
||||
```
|
||||
|
||||
The current Containerfile uses:
|
||||
- **Base Image**: `python:3.13-slim` (Debian-based)
|
||||
- **Build tool**: `uv` for fast dependency installation
|
||||
- **User**: Non-root user `app` (UID 10001)
|
||||
- **Exposed Port**: `8081`
|
||||
- **Volumes**: `/etc/meshview`, `/var/lib/meshview`, `/var/log/meshview`
|
||||
@@ -1,82 +1,38 @@
|
||||
|
||||
# API Documentation
|
||||
|
||||
## 1. Chat API
|
||||
Base URL: `http(s)://<host>`
|
||||
|
||||
### GET `/api/chat`
|
||||
Returns the most recent chat messages.
|
||||
All endpoints return JSON. Timestamps are either ISO 8601 strings or `*_us` values in
|
||||
microseconds since epoch.
|
||||
|
||||
**Query Parameters**
|
||||
- `limit` (optional, int): Maximum number of messages to return. Default: `100`.
|
||||
|
||||
**Response Example**
|
||||
```json
|
||||
{
|
||||
"packets": [
|
||||
{
|
||||
"id": 123,
|
||||
"import_time": "2025-07-22T12:45:00",
|
||||
"from_node_id": 987654,
|
||||
"from_node": "Alice",
|
||||
"channel": "main",
|
||||
"payload": "Hello, world!"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GET `/api/chat/updates`
|
||||
Returns chat messages imported after a given timestamp.
|
||||
|
||||
**Query Parameters**
|
||||
- `last_time` (optional, ISO timestamp): Only messages imported after this time are returned.
|
||||
|
||||
**Response Example**
|
||||
```json
|
||||
{
|
||||
"packets": [
|
||||
{
|
||||
"id": 124,
|
||||
"import_time": "2025-07-22T12:50:00",
|
||||
"from_node_id": 987654,
|
||||
"from_node": "Alice",
|
||||
"channel": "main",
|
||||
"payload": "New message!"
|
||||
}
|
||||
],
|
||||
"latest_import_time": "2025-07-22T12:50:00"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Nodes API
|
||||
## 1. Nodes API
|
||||
|
||||
### GET `/api/nodes`
|
||||
Returns a list of all nodes, with optional filtering by last seen.
|
||||
Returns a list of nodes, with optional filtering.
|
||||
|
||||
**Query Parameters**
|
||||
- `hours` (optional, int): Return nodes seen in the last N hours.
|
||||
- `days` (optional, int): Return nodes seen in the last N days.
|
||||
- `last_seen_after` (optional, ISO timestamp): Return nodes seen after this time.
|
||||
Query Parameters
|
||||
- `node_id` (optional, int): Exact node ID.
|
||||
- `role` (optional, string): Node role.
|
||||
- `channel` (optional, string): Channel name.
|
||||
- `hw_model` (optional, string): Hardware model.
|
||||
- `days_active` (optional, int): Nodes seen within the last N days.
|
||||
|
||||
**Response Example**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": 42,
|
||||
"node_id": 1234,
|
||||
"long_name": "Alice",
|
||||
"short_name": "A",
|
||||
"channel": "main",
|
||||
"last_seen": "2025-07-22T12:40:00",
|
||||
"hardware": "T-Beam",
|
||||
"hw_model": "T-Beam",
|
||||
"firmware": "1.2.3",
|
||||
"role": "client",
|
||||
"last_lat": 37.7749,
|
||||
"last_long": -122.4194
|
||||
"last_lat": 377749000,
|
||||
"last_long": -1224194000,
|
||||
"channel": "main",
|
||||
"last_seen_us": 1736370123456789
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -84,45 +40,58 @@ Returns a list of all nodes, with optional filtering by last seen.
|
||||
|
||||
---
|
||||
|
||||
## 3. Packets API
|
||||
## 2. Packets API
|
||||
|
||||
### GET `/api/packets`
|
||||
Returns a list of packets with optional filters.
|
||||
Returns packets with optional filters.
|
||||
|
||||
**Query Parameters**
|
||||
- `limit` (optional, int): Maximum number of packets to return. Default: `200`.
|
||||
- `since` (optional, ISO timestamp): Only packets imported after this timestamp are returned.
|
||||
Query Parameters
|
||||
- `packet_id` (optional, int): Return exactly one packet (overrides other filters).
|
||||
- `limit` (optional, int): Max packets to return, clamped 1-1000. Default: `50`.
|
||||
- `since` (optional, int): Only packets imported after this microsecond timestamp.
|
||||
- `portnum` (optional, int): Filter by port number.
|
||||
- `contains` (optional, string): Payload substring filter.
|
||||
- `from_node_id` (optional, int): Filter by sender node ID.
|
||||
- `to_node_id` (optional, int): Filter by recipient node ID.
|
||||
- `node_id` (optional, int): Legacy filter matching either from or to node ID.
|
||||
|
||||
**Response Example**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"packets": [
|
||||
{
|
||||
"id": 123,
|
||||
"import_time_us": 1736370123456789,
|
||||
"channel": "main",
|
||||
"from_node_id": 5678,
|
||||
"to_node_id": 91011,
|
||||
"portnum": 1,
|
||||
"import_time": "2025-07-22T12:45:00",
|
||||
"payload": "Hello, Bob!"
|
||||
"long_name": "Alice",
|
||||
"payload": "Hello, Bob!",
|
||||
"to_long_name": "Bob",
|
||||
"reply_id": 122
|
||||
}
|
||||
]
|
||||
],
|
||||
"latest_import_time": 1736370123456789
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
Notes
|
||||
- For `portnum=1` (text messages), packets are filtered to remove sequence-only payloads.
|
||||
- `latest_import_time` is returned when available for incremental polling (microseconds).
|
||||
|
||||
---
|
||||
|
||||
## 4. Channels API
|
||||
## 3. Channels API
|
||||
|
||||
### GET `/api/channels`
|
||||
Returns a list of channels seen in a given time period.
|
||||
Returns channels seen in a time period.
|
||||
|
||||
**Query Parameters**
|
||||
- `period_type` (optional, string): Time granularity (`hour` or `day`). Default: `hour`.
|
||||
Query Parameters
|
||||
- `period_type` (optional, string): `hour` or `day`. Default: `hour`.
|
||||
- `length` (optional, int): Number of periods to look back. Default: `24`.
|
||||
|
||||
**Response Example**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"channels": ["LongFast", "MediumFast", "ShortFast"]
|
||||
@@ -131,29 +100,21 @@ Returns a list of channels seen in a given time period.
|
||||
|
||||
---
|
||||
|
||||
## 5. Statistics API
|
||||
## 4. Stats API
|
||||
|
||||
### GET `/api/stats`
|
||||
Returns packet statistics aggregated by time periods, with optional filtering.
|
||||
|
||||
Retrieve packet statistics aggregated by time periods, with optional filtering.
|
||||
|
||||
---
|
||||
|
||||
## Query Parameters
|
||||
|
||||
| Parameter | Type | Required | Default | Description |
|
||||
|--------------|---------|----------|----------|-------------------------------------------------------------------------------------------------|
|
||||
| `period_type` | string | No | `hour` | Time granularity of the stats. Allowed values: `hour`, `day`. |
|
||||
| `length` | integer | No | 24 | Number of periods to include (hours or days). |
|
||||
| `channel` | string | No | — | Filter results by channel name (case-insensitive). |
|
||||
| `portnum` | integer | No | — | Filter results by port number. |
|
||||
| `to_node` | integer | No | — | Filter results to packets sent **to** this node ID. |
|
||||
| `from_node` | integer | No | — | Filter results to packets sent **from** this node ID. |
|
||||
|
||||
---
|
||||
|
||||
## Response
|
||||
Query Parameters
|
||||
- `period_type` (optional, string): `hour` or `day`. Default: `hour`.
|
||||
- `length` (optional, int): Number of periods to include. Default: `24`.
|
||||
- `channel` (optional, string): Filter by channel (case-insensitive).
|
||||
- `portnum` (optional, int): Filter by port number.
|
||||
- `to_node` (optional, int): Filter by destination node ID.
|
||||
- `from_node` (optional, int): Filter by source node ID.
|
||||
- `node` (optional, int): If provided, return combined `sent` and `seen` totals for that node.
|
||||
|
||||
Response Example (series)
|
||||
```json
|
||||
{
|
||||
"period_type": "hour",
|
||||
@@ -163,65 +124,117 @@ Retrieve packet statistics aggregated by time periods, with optional filtering.
|
||||
"to_node": 12345678,
|
||||
"from_node": 87654321,
|
||||
"data": [
|
||||
{ "period": "2025-08-08 14:00", "count": 10 },
|
||||
{ "period": "2025-08-08 15:00", "count": 7 }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Response Example (`node` totals)
|
||||
```json
|
||||
{
|
||||
"node_id": 12345678,
|
||||
"period_type": "hour",
|
||||
"length": 24,
|
||||
"sent": 42,
|
||||
"seen": 58
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GET `/api/stats/count`
|
||||
Returns total packet counts, optionally filtered.
|
||||
|
||||
Query Parameters
|
||||
- `packet_id` (optional, int): Filter packet_seen by packet ID.
|
||||
- `period_type` (optional, string): `hour` or `day`.
|
||||
- `length` (optional, int): Number of periods to include.
|
||||
- `channel` (optional, string): Filter by channel.
|
||||
- `from_node` (optional, int): Filter by source node ID.
|
||||
- `to_node` (optional, int): Filter by destination node ID.
|
||||
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"total_packets": 12345,
|
||||
"total_seen": 67890
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GET `/api/stats/top`
|
||||
Returns nodes sorted by packets seen, with pagination.
|
||||
|
||||
Query Parameters
|
||||
- `period_type` (optional, string): `hour` or `day`. Default: `day`.
|
||||
- `length` (optional, int): Number of periods to include. Default: `1`.
|
||||
- `channel` (optional, string): Filter by channel.
|
||||
- `limit` (optional, int): Max nodes to return. Default: `20`, max `100`.
|
||||
- `offset` (optional, int): Pagination offset. Default: `0`.
|
||||
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"total": 250,
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"nodes": [
|
||||
{
|
||||
"period": "2025-08-08 14:00",
|
||||
"count": 10
|
||||
},
|
||||
{
|
||||
"period": "2025-08-08 15:00",
|
||||
"count": 7
|
||||
"node_id": 1234,
|
||||
"long_name": "Alice",
|
||||
"short_name": "A",
|
||||
"channel": "main",
|
||||
"sent": 100,
|
||||
"seen": 240,
|
||||
"avg": 2.4
|
||||
}
|
||||
// more entries...
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. Edges API
|
||||
## 5. Edges API
|
||||
|
||||
### GET `/api/edges`
|
||||
Returns network edges (connections between nodes) based on traceroutes and neighbor info.
|
||||
Traceroute edges are collected over the last 12 hours. Neighbor edges are based on
|
||||
port 71 packets.
|
||||
|
||||
**Query Parameters**
|
||||
- `type` (optional, string): Filter by edge type (`traceroute` or `neighbor`). If omitted, returns both types.
|
||||
Query Parameters
|
||||
- `type` (optional, string): `traceroute` or `neighbor`. If omitted, returns both.
|
||||
- `node_id` (optional, int): Filter edges to only those touching a node.
|
||||
|
||||
**Response Example**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"edges": [
|
||||
{
|
||||
"from": 12345678,
|
||||
"to": 87654321,
|
||||
"type": "traceroute"
|
||||
},
|
||||
{
|
||||
"from": 11111111,
|
||||
"to": 22222222,
|
||||
"type": "neighbor"
|
||||
}
|
||||
{ "from": 12345678, "to": 87654321, "type": "traceroute" },
|
||||
{ "from": 11111111, "to": 22222222, "type": "neighbor" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. Configuration API
|
||||
## 6. Config API
|
||||
|
||||
### GET `/api/config`
|
||||
Returns the current site configuration (safe subset exposed to clients).
|
||||
Returns a safe subset of server configuration.
|
||||
|
||||
**Response Example**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"site": {
|
||||
"domain": "meshview.example.com",
|
||||
"domain": "example.com",
|
||||
"language": "en",
|
||||
"title": "Bay Area Mesh",
|
||||
"message": "Real time data from around the bay area",
|
||||
"title": "Meshview",
|
||||
"message": "",
|
||||
"starting": "/chat",
|
||||
"nodes": "true",
|
||||
"conversations": "true",
|
||||
"chat": "true",
|
||||
"everything": "true",
|
||||
"graphs": "true",
|
||||
"stats": "true",
|
||||
@@ -236,11 +249,11 @@ Returns the current site configuration (safe subset exposed to clients).
|
||||
"firehose_interval": 3,
|
||||
"weekly_net_message": "Weekly Mesh check-in message.",
|
||||
"net_tag": "#BayMeshNet",
|
||||
"version": "2.0.8 ~ 10-22-25"
|
||||
"version": "3.0.0"
|
||||
},
|
||||
"mqtt": {
|
||||
"server": "mqtt.bayme.sh",
|
||||
"topics": ["msh/US/bayarea/#"]
|
||||
"server": "mqtt.example.com",
|
||||
"topics": ["msh/region/#"]
|
||||
},
|
||||
"cleanup": {
|
||||
"enabled": "false",
|
||||
@@ -254,91 +267,126 @@ Returns the current site configuration (safe subset exposed to clients).
|
||||
|
||||
---
|
||||
|
||||
## 8. Language/Translations API
|
||||
## 7. Language API
|
||||
|
||||
### GET `/api/lang`
|
||||
Returns translation strings for the UI.
|
||||
Returns translation strings.
|
||||
|
||||
**Query Parameters**
|
||||
- `lang` (optional, string): Language code (e.g., `en`, `es`). Defaults to site language setting.
|
||||
- `section` (optional, string): Specific section to retrieve translations for.
|
||||
Query Parameters
|
||||
- `lang` (optional, string): Language code (e.g., `en`, `es`). Default from config or `en`.
|
||||
- `section` (optional, string): Return only one section (e.g., `nodelist`, `firehose`).
|
||||
|
||||
**Response Example (full)**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"chat": {
|
||||
"title": "Chat",
|
||||
"send": "Send"
|
||||
},
|
||||
"map": {
|
||||
"title": "Map",
|
||||
"zoom_in": "Zoom In"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Response Example (section-specific)**
|
||||
Request: `/api/lang?section=chat`
|
||||
```json
|
||||
{
|
||||
"title": "Chat",
|
||||
"send": "Send"
|
||||
"title": "Meshview",
|
||||
"search_placeholder": "Search..."
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. Health Check API
|
||||
## 8. Packets Seen API
|
||||
|
||||
### GET `/api/packets_seen/{packet_id}`
|
||||
Returns packet_seen entries for a packet.
|
||||
|
||||
Path Parameters
|
||||
- `packet_id` (required, int): Packet ID.
|
||||
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"seen": [
|
||||
{
|
||||
"packet_id": 123,
|
||||
"node_id": 456,
|
||||
"rx_time": "2025-07-22T12:45:00",
|
||||
"hop_limit": 7,
|
||||
"hop_start": 0,
|
||||
"channel": "main",
|
||||
"rx_snr": 5.0,
|
||||
"rx_rssi": -90,
|
||||
"topic": "msh/region/#",
|
||||
"import_time_us": 1736370123456789
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. Traceroute API
|
||||
|
||||
### GET `/api/traceroute/{packet_id}`
|
||||
Returns traceroute details and derived paths for a packet.
|
||||
|
||||
Path Parameters
|
||||
- `packet_id` (required, int): Packet ID.
|
||||
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"packet": {
|
||||
"id": 123,
|
||||
"from": 111,
|
||||
"to": 222,
|
||||
"channel": "main"
|
||||
},
|
||||
"traceroute_packets": [
|
||||
{
|
||||
"index": 0,
|
||||
"gateway_node_id": 333,
|
||||
"done": true,
|
||||
"forward_hops": [111, 444, 222],
|
||||
"reverse_hops": [222, 444, 111]
|
||||
}
|
||||
],
|
||||
"unique_forward_paths": [
|
||||
{ "path": [111, 444, 222], "count": 2 }
|
||||
],
|
||||
"unique_reverse_paths": [
|
||||
[222, 444, 111]
|
||||
],
|
||||
"winning_paths": [
|
||||
[111, 444, 222]
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. Health API
|
||||
|
||||
### GET `/health`
|
||||
Health check endpoint for monitoring, load balancers, and orchestration systems.
|
||||
Returns service health and database status.
|
||||
|
||||
**Response Example (Healthy)**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"status": "healthy",
|
||||
"timestamp": "2025-11-03T14:30:00.123456Z",
|
||||
"version": "3.0.0",
|
||||
"git_revision": "6416978",
|
||||
"timestamp": "2025-07-22T12:45:00+00:00",
|
||||
"version": "3.0.3",
|
||||
"git_revision": "abc1234",
|
||||
"database": "connected",
|
||||
"database_size": "853.03 MB",
|
||||
"database_size_bytes": 894468096
|
||||
}
|
||||
```
|
||||
|
||||
**Response Example (Unhealthy)**
|
||||
Status Code: `503 Service Unavailable`
|
||||
```json
|
||||
{
|
||||
"status": "unhealthy",
|
||||
"timestamp": "2025-11-03T14:30:00.123456Z",
|
||||
"version": "2.0.8",
|
||||
"git_revision": "6416978",
|
||||
"database": "disconnected"
|
||||
"database_size": "12.34 MB",
|
||||
"database_size_bytes": 12939444
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. Version API
|
||||
## 11. Version API
|
||||
|
||||
### GET `/version`
|
||||
Returns detailed version information including semver, release date, and git revision.
|
||||
Returns version metadata.
|
||||
|
||||
**Response Example**
|
||||
Response Example
|
||||
```json
|
||||
{
|
||||
"version": "2.0.8",
|
||||
"release_date": "2025-10-22",
|
||||
"git_revision": "6416978a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6q",
|
||||
"git_revision_short": "6416978"
|
||||
"version": "3.0.3",
|
||||
"release_date": "2026-1-15",
|
||||
"git_revision": "abc1234",
|
||||
"git_revision_short": "abc1234"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
- All timestamps (`import_time`, `last_seen`) are returned in ISO 8601 format.
|
||||
- `portnum` is an integer representing the packet type.
|
||||
- `payload` is always a UTF-8 decoded string.
|
||||
- Node IDs are integers (e.g., `12345678`).
|
||||
|
||||
37
docs/COVERAGE.md
Normal file
37
docs/COVERAGE.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Coverage
|
||||
|
||||
## Predicted coverage
|
||||
|
||||
Meshview can display a predicted coverage boundary for a node. This is a **model**
|
||||
estimate, not a guarantee of real-world performance.
|
||||
|
||||
### How it works
|
||||
|
||||
The coverage boundary is computed using the Longley-Rice / ITM **area mode**
|
||||
propagation model. Area mode estimates average path loss over generic terrain
|
||||
and does not use a terrain profile. This means it captures general distance
|
||||
effects, but **does not** account for terrain shadows, buildings, or foliage.
|
||||
|
||||
### What you are seeing
|
||||
|
||||
The UI draws a **perimeter** (not a heatmap) that represents the furthest
|
||||
distance where predicted signal strength is above a threshold (default
|
||||
`-120 dBm`). The model is run radially from the node in multiple directions,
|
||||
and the last point above the threshold forms the outline.
|
||||
|
||||
### Key parameters
|
||||
|
||||
- **Frequency**: default `907 MHz`
|
||||
- **Transmit power**: default `20 dBm`
|
||||
- **Antenna heights**: default `5 m` (TX) and `1.5 m` (RX)
|
||||
- **Reliability**: default `0.5` (median)
|
||||
- **Terrain irregularity**: default `90 m` (average terrain)
|
||||
|
||||
### Limitations
|
||||
|
||||
- No terrain or building data is used (area mode only).
|
||||
- Results are sensitive to power, height, and threshold.
|
||||
- Environmental factors can cause large real-world deviations.
|
||||
|
||||
|
||||
|
||||
1
meshtastic/protobuf/UPSTREAM_REV.txt
Normal file
1
meshtastic/protobuf/UPSTREAM_REV.txt
Normal file
@@ -0,0 +1 @@
|
||||
e1a6b3a868d735da72cd6c94c574d655129d390a
|
||||
@@ -3,8 +3,8 @@
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
__version__ = "3.0.1"
|
||||
__release_date__ = "2025-12-4"
|
||||
__version__ = "3.0.5"
|
||||
__release_date__ = "2026-2-6"
|
||||
|
||||
|
||||
def get_git_revision():
|
||||
|
||||
@@ -6,7 +6,7 @@ parser = argparse.ArgumentParser(description="MeshView Configuration Loader")
|
||||
parser.add_argument(
|
||||
"--config", type=str, default="config.ini", help="Path to config.ini file (default: config.ini)"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
args, _ = parser.parse_known_args()
|
||||
|
||||
# Initialize config parser
|
||||
config_parser = configparser.ConfigParser()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from meshview import models
|
||||
@@ -9,10 +10,19 @@ async_session = None
|
||||
def init_database(database_connection_string):
|
||||
global engine, async_session
|
||||
kwargs = {"echo": False}
|
||||
# Ensure SQLite is opened in read-only mode
|
||||
database_connection_string += "?mode=ro"
|
||||
kwargs["connect_args"] = {"uri": True}
|
||||
engine = create_async_engine(database_connection_string, **kwargs)
|
||||
url = make_url(database_connection_string)
|
||||
connect_args = {}
|
||||
|
||||
if url.drivername.startswith("sqlite"):
|
||||
query = dict(url.query)
|
||||
query.setdefault("mode", "ro")
|
||||
url = url.set(query=query)
|
||||
connect_args["uri"] = True
|
||||
|
||||
if connect_args:
|
||||
kwargs["connect_args"] = connect_args
|
||||
|
||||
engine = create_async_engine(url, **kwargs)
|
||||
async_session = async_sessionmaker(
|
||||
bind=engine,
|
||||
class_=AsyncSession,
|
||||
|
||||
13
meshview/deps.py
Normal file
13
meshview/deps.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import logging
|
||||
from importlib.util import find_spec
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_optional_deps() -> None:
|
||||
if find_spec("pyitm") is None:
|
||||
logger.warning(
|
||||
"Optional dependency missing: pyitm. "
|
||||
"Coverage prediction is disabled. "
|
||||
"Run: ./env/bin/pip install -r requirements.txt"
|
||||
)
|
||||
@@ -13,13 +13,40 @@
|
||||
"go to node": "Go to Node",
|
||||
"all": "All",
|
||||
"portnum_options": {
|
||||
"0": "Unknown",
|
||||
"1": "Text Message",
|
||||
"2": "Remote Hardware",
|
||||
"3": "Position",
|
||||
"4": "Node Info",
|
||||
"5": "Routing",
|
||||
"6": "Admin",
|
||||
"7": "Text (Compressed)",
|
||||
"8": "Waypoint",
|
||||
"9": "Audio",
|
||||
"10": "Detection Sensor",
|
||||
"11": "Alert",
|
||||
"12": "Key Verification",
|
||||
"32": "Reply",
|
||||
"33": "IP Tunnel",
|
||||
"34": "Paxcounter",
|
||||
"35": "Store Forward++",
|
||||
"36": "Node Status",
|
||||
"64": "Serial",
|
||||
"65": "Store & Forward",
|
||||
"66": "Range Test",
|
||||
"67": "Telemetry",
|
||||
"68": "ZPS",
|
||||
"69": "Simulator",
|
||||
"70": "Traceroute",
|
||||
"71": "Neighbor Info"
|
||||
}
|
||||
"71": "Neighbor Info",
|
||||
"72": "ATAK",
|
||||
"73": "Map Report",
|
||||
"74": "Power Stress",
|
||||
"76": "Reticulum Tunnel",
|
||||
"77": "Cayenne",
|
||||
"256": "Private App",
|
||||
"257": "ATAK Forwarder"
|
||||
}
|
||||
},
|
||||
"chat": {
|
||||
"chat_title": "Chats:",
|
||||
@@ -53,8 +80,11 @@
|
||||
"last_lat": "Last Latitude",
|
||||
"last_long": "Last Longitude",
|
||||
"channel": "Channel",
|
||||
"mqtt_gateway": "MQTT",
|
||||
"last_seen": "Last Seen",
|
||||
"favorite": "Favorite",
|
||||
"yes": "Yes",
|
||||
"no": "No",
|
||||
|
||||
"time_just_now": "just now",
|
||||
"time_min_ago": "min ago",
|
||||
@@ -69,15 +99,21 @@
|
||||
"view_packet_details": "More details"
|
||||
},
|
||||
|
||||
"map": {
|
||||
"show_routers_only": "Show Routers Only",
|
||||
"share_view": "Share This View",
|
||||
"reset_filters": "Reset Filters To Defaults",
|
||||
"channel_label": "Channel:",
|
||||
"map": {
|
||||
"show_routers_only": "Show Routers Only",
|
||||
"show_mqtt_only": "Show MQTT Gateways Only",
|
||||
"share_view": "Share This View",
|
||||
"reset_filters": "Reset Filters To Defaults",
|
||||
"unmapped_packets_title": "Unmapped Packets",
|
||||
"unmapped_packets_empty": "No recent unmapped packets.",
|
||||
"channel_label": "Channel:",
|
||||
"model_label": "Model:",
|
||||
"role_label": "Role:",
|
||||
"mqtt_gateway": "MQTT Gateway:",
|
||||
"last_seen": "Last seen:",
|
||||
"firmware": "Firmware:",
|
||||
"yes": "Yes",
|
||||
"no": "No",
|
||||
"link_copied": "Link Copied!",
|
||||
"legend_traceroute": "Traceroute (with arrows)",
|
||||
"legend_neighbor": "Neighbor"
|
||||
@@ -88,6 +124,7 @@
|
||||
{
|
||||
"mesh_stats_summary": "Mesh Statistics - Summary (all available in Database)",
|
||||
"total_nodes": "Total Nodes",
|
||||
"total_gateways": "Total Gateways",
|
||||
"total_packets": "Total Packets",
|
||||
"total_packets_seen": "Total Packets Seen",
|
||||
"packets_per_day_all": "Packets per Day - All Ports (Last 14 Days)",
|
||||
@@ -98,6 +135,10 @@
|
||||
"hardware_breakdown": "Hardware Breakdown",
|
||||
"role_breakdown": "Role Breakdown",
|
||||
"channel_breakdown": "Channel Breakdown",
|
||||
"gateway_channel_breakdown": "Gateway Channel Breakdown",
|
||||
"gateway_role_breakdown": "Gateway Role Breakdown",
|
||||
"gateway_firmware_breakdown": "Gateway Firmware Breakdown",
|
||||
"no_gateways": "No gateways found",
|
||||
"expand_chart": "Expand Chart",
|
||||
"export_csv": "Export CSV",
|
||||
"all_channels": "All Channels",
|
||||
@@ -163,9 +204,11 @@
|
||||
"hw_model": "Hardware Model",
|
||||
"firmware": "Firmware",
|
||||
"role": "Role",
|
||||
"mqtt_gateway": "MQTT Gateway",
|
||||
"channel": "Channel",
|
||||
"latitude": "Latitude",
|
||||
"longitude": "Longitude",
|
||||
"first_update": "First Update",
|
||||
"last_update": "Last Update",
|
||||
"battery_voltage": "Battery & Voltage",
|
||||
"air_channel": "Air & Channel Utilization",
|
||||
@@ -183,7 +226,19 @@
|
||||
"statistics": "Statistics",
|
||||
"last_24h": "24h",
|
||||
"packets_sent": "Packets sent",
|
||||
"times_seen": "Times seen"
|
||||
"times_seen": "Times seen",
|
||||
"yes": "Yes",
|
||||
"no": "No",
|
||||
"copy_import_url": "Copy Import URL",
|
||||
"show_qr_code": "Show QR Code",
|
||||
"toggle_coverage": "Predicted Coverage",
|
||||
"location_required": "Location required for coverage",
|
||||
"coverage_help": "Coverage Help",
|
||||
"share_contact_qr": "Share Contact QR",
|
||||
"copy_url": "Copy URL",
|
||||
"copied": "Copied!",
|
||||
"potential_impersonation": "Potential Impersonation Detected",
|
||||
"scan_qr_to_add": "Scan this QR code to add this node as a contact on another device."
|
||||
},
|
||||
"packet": {
|
||||
"loading": "Loading packet information...",
|
||||
@@ -209,4 +264,4 @@
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,12 +13,39 @@
|
||||
"go_to_node": "Ir al nodo",
|
||||
"all": "Todos",
|
||||
"portnum_options": {
|
||||
"0": "Desconocido",
|
||||
"1": "Mensaje de Texto",
|
||||
"2": "Hardware Remoto",
|
||||
"3": "Ubicación",
|
||||
"4": "Información del Nodo",
|
||||
"5": "Enrutamiento",
|
||||
"6": "Administración",
|
||||
"7": "Texto (Comprimido)",
|
||||
"8": "Punto de Referencia",
|
||||
"9": "Audio",
|
||||
"10": "Sensor de Detección",
|
||||
"11": "Alerta",
|
||||
"12": "Verificación de Clave",
|
||||
"32": "Respuesta",
|
||||
"33": "Túnel IP",
|
||||
"34": "Paxcounter",
|
||||
"35": "Store Forward++",
|
||||
"36": "Estado del Nodo",
|
||||
"64": "Serial",
|
||||
"65": "Store & Forward",
|
||||
"66": "Prueba de Alcance",
|
||||
"67": "Telemetría",
|
||||
"68": "ZPS",
|
||||
"69": "Simulador",
|
||||
"70": "Traceroute",
|
||||
"71": "Información de Vecinos"
|
||||
"71": "Información de Vecinos",
|
||||
"72": "ATAK",
|
||||
"73": "Reporte de Mapa",
|
||||
"74": "Prueba de Energía",
|
||||
"76": "Túnel Reticulum",
|
||||
"77": "Cayenne",
|
||||
"256": "App Privada",
|
||||
"257": "ATAK Forwarder"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -51,8 +78,11 @@
|
||||
"last_lat": "Última latitud",
|
||||
"last_long": "Última longitud",
|
||||
"channel": "Canal",
|
||||
"mqtt_gateway": "MQTT",
|
||||
"last_seen": "Última vez visto",
|
||||
"favorite": "Favorito",
|
||||
"yes": "Sí",
|
||||
"no": "No",
|
||||
"time_just_now": "justo ahora",
|
||||
"time_min_ago": "min atrás",
|
||||
"time_hr_ago": "h atrás",
|
||||
@@ -67,14 +97,21 @@
|
||||
},
|
||||
|
||||
"map": {
|
||||
"filter_routers_only": "Mostrar solo enrutadores",
|
||||
"share_view": "Compartir esta vista",
|
||||
"reset_filters": "Restablecer filtros",
|
||||
"channel_label": "Canal:",
|
||||
"filter_routers_only": "Mostrar solo enrutadores",
|
||||
"show_routers_only": "Mostrar solo enrutadores",
|
||||
"show_mqtt_only": "Mostrar solo gateways MQTT",
|
||||
"share_view": "Compartir esta vista",
|
||||
"reset_filters": "Restablecer filtros",
|
||||
"unmapped_packets_title": "Paquetes sin mapa",
|
||||
"unmapped_packets_empty": "No hay paquetes sin mapa recientes.",
|
||||
"channel_label": "Canal:",
|
||||
"model_label": "Modelo:",
|
||||
"role_label": "Rol:",
|
||||
"mqtt_gateway": "Gateway MQTT:",
|
||||
"last_seen": "Visto por última vez:",
|
||||
"firmware": "Firmware:",
|
||||
"yes": "Sí",
|
||||
"no": "No",
|
||||
"link_copied": "¡Enlace copiado!",
|
||||
"legend_traceroute": "Ruta de traceroute (flechas de dirección)",
|
||||
"legend_neighbor": "Vínculo de vecinos"
|
||||
@@ -83,6 +120,7 @@
|
||||
"stats": {
|
||||
"mesh_stats_summary": "Estadísticas de la Malla - Resumen (completas en la base de datos)",
|
||||
"total_nodes": "Nodos Totales",
|
||||
"total_gateways": "Gateways Totales",
|
||||
"total_packets": "Paquetes Totales",
|
||||
"total_packets_seen": "Paquetes Totales Vistos",
|
||||
"packets_per_day_all": "Paquetes por Día - Todos los Puertos (Últimos 14 Días)",
|
||||
@@ -93,6 +131,10 @@
|
||||
"hardware_breakdown": "Distribución de Hardware",
|
||||
"role_breakdown": "Distribución de Roles",
|
||||
"channel_breakdown": "Distribución de Canales",
|
||||
"gateway_channel_breakdown": "Desglose de canales de gateways",
|
||||
"gateway_role_breakdown": "Desglose de roles de gateways",
|
||||
"gateway_firmware_breakdown": "Desglose de firmware de gateways",
|
||||
"no_gateways": "No se encontraron gateways",
|
||||
"expand_chart": "Ampliar Gráfico",
|
||||
"export_csv": "Exportar CSV",
|
||||
"all_channels": "Todos los Canales"
|
||||
@@ -148,9 +190,11 @@
|
||||
"hw_model": "Modelo de Hardware",
|
||||
"firmware": "Firmware",
|
||||
"role": "Rol",
|
||||
"mqtt_gateway": "Gateway MQTT",
|
||||
"channel": "Canal",
|
||||
"latitude": "Latitud",
|
||||
"longitude": "Longitud",
|
||||
"first_update": "Primera Actualización",
|
||||
"last_update": "Última Actualización",
|
||||
"battery_voltage": "Batería y voltaje",
|
||||
"air_channel": "Utilización del aire y del canal",
|
||||
@@ -168,7 +212,19 @@
|
||||
"statistics": "Estadísticas",
|
||||
"last_24h": "24h",
|
||||
"packets_sent": "Paquetes enviados",
|
||||
"times_seen": "Veces visto"
|
||||
"times_seen": "Veces visto",
|
||||
"yes": "Sí",
|
||||
"no": "No",
|
||||
"copy_import_url": "Copiar URL de importación",
|
||||
"show_qr_code": "Mostrar código QR",
|
||||
"toggle_coverage": "Cobertura predicha",
|
||||
"location_required": "Se requiere ubicación para la cobertura",
|
||||
"coverage_help": "Ayuda de cobertura",
|
||||
"share_contact_qr": "Compartir contacto QR",
|
||||
"copy_url": "Copiar URL",
|
||||
"copied": "¡Copiado!",
|
||||
"potential_impersonation": "Posible suplantación detectada",
|
||||
"scan_qr_to_add": "Escanea este código QR para agregar este nodo como contacto en otro dispositivo."
|
||||
},
|
||||
|
||||
"packet": {
|
||||
|
||||
@@ -186,19 +186,24 @@ async def create_migration_status_table(engine: AsyncEngine) -> None:
|
||||
text("""
|
||||
CREATE TABLE IF NOT EXISTS migration_status (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
in_progress BOOLEAN NOT NULL DEFAULT 0,
|
||||
in_progress BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
)
|
||||
|
||||
# Insert initial row if not exists
|
||||
await conn.execute(
|
||||
result = await conn.execute(
|
||||
text("""
|
||||
INSERT OR IGNORE INTO migration_status (id, in_progress)
|
||||
VALUES (1, 0)
|
||||
SELECT 1 FROM migration_status WHERE id = 1
|
||||
""")
|
||||
)
|
||||
if result.first() is None:
|
||||
await conn.execute(
|
||||
text("""
|
||||
INSERT INTO migration_status (id, in_progress)
|
||||
VALUES (1, FALSE)
|
||||
""")
|
||||
)
|
||||
|
||||
|
||||
async def set_migration_in_progress(engine: AsyncEngine, in_progress: bool) -> None:
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import BigInteger, ForeignKey, Index, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||
@@ -22,7 +20,7 @@ class Node(Base):
|
||||
last_lat: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
last_long: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
channel: Mapped[str] = mapped_column(nullable=True)
|
||||
last_update: Mapped[datetime] = mapped_column(nullable=True)
|
||||
is_mqtt_gateway: Mapped[bool] = mapped_column(nullable=True)
|
||||
first_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
last_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
|
||||
@@ -33,11 +31,7 @@ class Node(Base):
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
column.name: getattr(self, column.name)
|
||||
for column in self.__table__.columns
|
||||
if column.name != "last_update"
|
||||
}
|
||||
return {column.name: getattr(self, column.name) for column in self.__table__.columns}
|
||||
|
||||
|
||||
class Packet(Base):
|
||||
@@ -55,17 +49,13 @@ class Packet(Base):
|
||||
overlaps="from_node",
|
||||
)
|
||||
payload: Mapped[bytes] = mapped_column(nullable=True)
|
||||
import_time: Mapped[datetime] = mapped_column(nullable=True)
|
||||
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
channel: Mapped[str] = mapped_column(nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_packet_from_node_id", "from_node_id"),
|
||||
Index("idx_packet_to_node_id", "to_node_id"),
|
||||
Index("idx_packet_import_time", desc("import_time")),
|
||||
Index("idx_packet_import_time_us", desc("import_time_us")),
|
||||
# Composite index for /top endpoint performance - filters by from_node_id AND import_time
|
||||
Index("idx_packet_from_node_time", "from_node_id", desc("import_time")),
|
||||
Index("idx_packet_from_node_time_us", "from_node_id", desc("import_time_us")),
|
||||
)
|
||||
|
||||
@@ -86,7 +76,6 @@ class PacketSeen(Base):
|
||||
rx_snr: Mapped[float] = mapped_column(nullable=True)
|
||||
rx_rssi: Mapped[int] = mapped_column(nullable=True)
|
||||
topic: Mapped[str] = mapped_column(nullable=True)
|
||||
import_time: Mapped[datetime] = mapped_column(nullable=True)
|
||||
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
@@ -108,11 +97,25 @@ class Traceroute(Base):
|
||||
gateway_node_id: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
done: Mapped[bool] = mapped_column(nullable=True)
|
||||
route: Mapped[bytes] = mapped_column(nullable=True)
|
||||
import_time: Mapped[datetime] = mapped_column(nullable=True)
|
||||
route_return: Mapped[bytes] = mapped_column(nullable=True)
|
||||
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_traceroute_import_time", "import_time"),
|
||||
Index("idx_traceroute_packet_id", "packet_id"),
|
||||
Index("idx_traceroute_import_time_us", "import_time_us"),
|
||||
)
|
||||
|
||||
|
||||
class NodePublicKey(Base):
|
||||
__tablename__ = "node_public_key"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||
node_id: Mapped[int] = mapped_column(BigInteger, nullable=False)
|
||||
public_key: Mapped[str] = mapped_column(nullable=False)
|
||||
first_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
last_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("idx_node_public_key_node_id", "node_id"),
|
||||
Index("idx_node_public_key_public_key", "public_key"),
|
||||
)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
|
||||
from meshview import models
|
||||
@@ -5,9 +7,26 @@ from meshview import models
|
||||
|
||||
def init_database(database_connection_string):
|
||||
global engine, async_session
|
||||
engine = create_async_engine(
|
||||
database_connection_string, echo=False, connect_args={"timeout": 900}
|
||||
)
|
||||
|
||||
url = make_url(database_connection_string)
|
||||
kwargs = {"echo": False}
|
||||
|
||||
if url.drivername.startswith("sqlite"):
|
||||
kwargs["connect_args"] = {"timeout": 900} # seconds
|
||||
|
||||
engine = create_async_engine(url, **kwargs)
|
||||
|
||||
# Enforce SQLite pragmas on every new DB connection
|
||||
if url.drivername.startswith("sqlite"):
|
||||
|
||||
@event.listens_for(engine.sync_engine, "connect")
|
||||
def _set_sqlite_pragmas(dbapi_conn, _):
|
||||
cursor = dbapi_conn.cursor()
|
||||
cursor.execute("PRAGMA journal_mode=WAL;")
|
||||
cursor.execute("PRAGMA busy_timeout=900000;") # ms
|
||||
cursor.execute("PRAGMA synchronous=NORMAL;")
|
||||
cursor.close()
|
||||
|
||||
async_session = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,11 @@ import aiomqtt
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from meshtastic.protobuf.mesh_pb2 import Data
|
||||
from meshtastic.protobuf.mqtt_pb2 import ServiceEnvelope
|
||||
from meshview.config import CONFIG
|
||||
|
||||
KEY = base64.b64decode("1PG7OiApB1nwvP+rz05pAQ==")
|
||||
PRIMARY_KEY = base64.b64decode("1PG7OiApB1nwvP+rz05pAQ==")
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
@@ -21,24 +23,94 @@ logging.basicConfig(
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def decrypt(packet):
|
||||
def _parse_skip_node_ids():
|
||||
mqtt_config = CONFIG.get("mqtt", {})
|
||||
raw_value = mqtt_config.get("skip_node_ids", "")
|
||||
if not raw_value:
|
||||
return set()
|
||||
|
||||
if isinstance(raw_value, str):
|
||||
raw_value = raw_value.strip()
|
||||
if not raw_value:
|
||||
return set()
|
||||
values = [v.strip() for v in raw_value.split(",") if v.strip()]
|
||||
else:
|
||||
values = [raw_value]
|
||||
|
||||
skip_ids = set()
|
||||
for value in values:
|
||||
try:
|
||||
skip_ids.add(int(value, 0))
|
||||
except (TypeError, ValueError):
|
||||
logger.warning("Invalid node id in mqtt.skip_node_ids: %s", value)
|
||||
return skip_ids
|
||||
|
||||
|
||||
def _strip_quotes(value):
|
||||
if len(value) >= 2 and value[0] == value[-1] and value[0] in ("'", '"'):
|
||||
return value[1:-1]
|
||||
return value
|
||||
|
||||
|
||||
def _parse_secondary_keys():
|
||||
mqtt_config = CONFIG.get("mqtt", {})
|
||||
raw_value = mqtt_config.get("secondary_keys", "")
|
||||
if not raw_value:
|
||||
return []
|
||||
|
||||
if isinstance(raw_value, str):
|
||||
raw_value = raw_value.strip()
|
||||
if not raw_value:
|
||||
return []
|
||||
values = [v.strip() for v in raw_value.split(",") if v.strip()]
|
||||
else:
|
||||
values = [raw_value]
|
||||
|
||||
keys = []
|
||||
for value in values:
|
||||
try:
|
||||
cleaned = _strip_quotes(str(value).strip())
|
||||
if cleaned:
|
||||
keys.append(base64.b64decode(cleaned))
|
||||
except (TypeError, ValueError):
|
||||
logger.warning("Invalid base64 key in mqtt.secondary_keys: %s", value)
|
||||
return keys
|
||||
|
||||
|
||||
SKIP_NODE_IDS = _parse_skip_node_ids()
|
||||
SECONDARY_KEYS = _parse_secondary_keys()
|
||||
|
||||
logger.info("Primary key: %s", PRIMARY_KEY)
|
||||
if SECONDARY_KEYS:
|
||||
logger.info("Secondary keys: %s", SECONDARY_KEYS)
|
||||
else:
|
||||
logger.info("Secondary keys: []")
|
||||
|
||||
|
||||
# Thank you to "Robert Grizzell" for the decryption code!
|
||||
# https://github.com/rgrizzell
|
||||
def decrypt(packet, key):
|
||||
if packet.HasField("decoded"):
|
||||
return
|
||||
return True
|
||||
packet_id = packet.id.to_bytes(8, "little")
|
||||
from_node_id = getattr(packet, "from").to_bytes(8, "little")
|
||||
nonce = packet_id + from_node_id
|
||||
|
||||
cipher = Cipher(algorithms.AES(KEY), modes.CTR(nonce))
|
||||
cipher = Cipher(algorithms.AES(key), modes.CTR(nonce))
|
||||
decryptor = cipher.decryptor()
|
||||
raw_proto = decryptor.update(packet.encrypted) + decryptor.finalize()
|
||||
try:
|
||||
packet.decoded.ParseFromString(raw_proto)
|
||||
data = Data()
|
||||
data.ParseFromString(raw_proto)
|
||||
packet.decoded.CopyFrom(data)
|
||||
except DecodeError:
|
||||
pass
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_passwd):
|
||||
identifier = str(random.getrandbits(16))
|
||||
keyring = [PRIMARY_KEY, *SECONDARY_KEYS]
|
||||
msg_count = 0
|
||||
start_time = None
|
||||
while True:
|
||||
@@ -65,14 +137,14 @@ async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_pa
|
||||
except DecodeError:
|
||||
continue
|
||||
|
||||
decrypt(envelope.packet)
|
||||
# print(envelope.packet.decoded)
|
||||
for key in keyring:
|
||||
if decrypt(envelope.packet, key):
|
||||
break
|
||||
if not envelope.packet.decoded:
|
||||
continue
|
||||
|
||||
# Skip packets from specific node
|
||||
# FIXME: make this configurable as a list of node IDs to skip
|
||||
if getattr(envelope.packet, "from", None) == 2144342101:
|
||||
# Skip packets from configured node IDs
|
||||
if getattr(envelope.packet, "from", None) in SKIP_NODE_IDS:
|
||||
continue
|
||||
|
||||
msg_count += 1
|
||||
|
||||
@@ -1,14 +1,21 @@
|
||||
import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from meshtastic.protobuf.config_pb2 import Config
|
||||
from meshtastic.protobuf.mesh_pb2 import HardwareModel
|
||||
from meshtastic.protobuf.portnums_pb2 import PortNum
|
||||
from meshview import decode_payload, mqtt_database
|
||||
from meshview.models import Node, Packet, PacketSeen, Traceroute
|
||||
from meshview.models import Node, NodePublicKey, Packet, PacketSeen, Traceroute
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MQTT_GATEWAY_CACHE: set[int] = set()
|
||||
|
||||
|
||||
async def process_envelope(topic, env):
|
||||
@@ -37,8 +44,7 @@ async def process_envelope(topic, env):
|
||||
await session.execute(select(Node).where(Node.node_id == node_id))
|
||||
).scalar_one_or_none()
|
||||
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
now_us = int(now.timestamp() * 1_000_000)
|
||||
now_us = int(time.time() * 1_000_000)
|
||||
|
||||
if node:
|
||||
node.node_id = node_id
|
||||
@@ -50,7 +56,6 @@ async def process_envelope(topic, env):
|
||||
node.last_lat = map_report.latitude_i
|
||||
node.last_long = map_report.longitude_i
|
||||
node.firmware = map_report.firmware_version
|
||||
node.last_update = now
|
||||
node.last_seen_us = now_us
|
||||
if node.first_seen_us is None:
|
||||
node.first_seen_us = now_us
|
||||
@@ -66,7 +71,6 @@ async def process_envelope(topic, env):
|
||||
firmware=map_report.firmware_version,
|
||||
last_lat=map_report.latitude_i,
|
||||
last_long=map_report.longitude_i,
|
||||
last_update=now,
|
||||
first_seen_us=now_us,
|
||||
last_seen_us=now_us,
|
||||
)
|
||||
@@ -84,24 +88,41 @@ async def process_envelope(topic, env):
|
||||
result = await session.execute(select(Packet).where(Packet.id == env.packet.id))
|
||||
packet = result.scalar_one_or_none()
|
||||
if not packet:
|
||||
now_us = int(time.time() * 1_000_000)
|
||||
packet_values = {
|
||||
"id": env.packet.id,
|
||||
"portnum": env.packet.decoded.portnum,
|
||||
"from_node_id": getattr(env.packet, "from"),
|
||||
"to_node_id": env.packet.to,
|
||||
"payload": env.packet.SerializeToString(),
|
||||
"import_time_us": now_us,
|
||||
"channel": env.channel_id,
|
||||
}
|
||||
dialect = session.get_bind().dialect.name
|
||||
stmt = None
|
||||
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
now_us = int(now.timestamp() * 1_000_000)
|
||||
stmt = (
|
||||
sqlite_insert(Packet)
|
||||
.values(
|
||||
id=env.packet.id,
|
||||
portnum=env.packet.decoded.portnum,
|
||||
from_node_id=getattr(env.packet, "from"),
|
||||
to_node_id=env.packet.to,
|
||||
payload=env.packet.SerializeToString(),
|
||||
import_time=now,
|
||||
import_time_us=now_us,
|
||||
channel=env.channel_id,
|
||||
if dialect == "sqlite":
|
||||
stmt = (
|
||||
sqlite_insert(Packet)
|
||||
.values(**packet_values)
|
||||
.on_conflict_do_nothing(index_elements=["id"])
|
||||
)
|
||||
.on_conflict_do_nothing(index_elements=["id"])
|
||||
)
|
||||
await session.execute(stmt)
|
||||
elif dialect == "postgresql":
|
||||
stmt = (
|
||||
pg_insert(Packet)
|
||||
.values(**packet_values)
|
||||
.on_conflict_do_nothing(index_elements=["id"])
|
||||
)
|
||||
|
||||
if stmt is not None:
|
||||
await session.execute(stmt)
|
||||
else:
|
||||
try:
|
||||
async with session.begin_nested():
|
||||
session.add(Packet(**packet_values))
|
||||
await session.flush()
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
# --- PacketSeen (no conflict handling here, normal insert)
|
||||
|
||||
@@ -112,6 +133,12 @@ async def process_envelope(topic, env):
|
||||
else:
|
||||
node_id = int(env.gateway_id[1:], 16)
|
||||
|
||||
if node_id not in MQTT_GATEWAY_CACHE:
|
||||
MQTT_GATEWAY_CACHE.add(node_id)
|
||||
await session.execute(
|
||||
update(Node).where(Node.node_id == node_id).values(is_mqtt_gateway=True)
|
||||
)
|
||||
|
||||
result = await session.execute(
|
||||
select(PacketSeen).where(
|
||||
PacketSeen.packet_id == env.packet.id,
|
||||
@@ -120,8 +147,7 @@ async def process_envelope(topic, env):
|
||||
)
|
||||
)
|
||||
if not result.scalar_one_or_none():
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
now_us = int(now.timestamp() * 1_000_000)
|
||||
now_us = int(time.time() * 1_000_000)
|
||||
seen = PacketSeen(
|
||||
packet_id=env.packet.id,
|
||||
node_id=int(env.gateway_id[1:], 16),
|
||||
@@ -132,7 +158,6 @@ async def process_envelope(topic, env):
|
||||
hop_limit=env.packet.hop_limit,
|
||||
hop_start=env.packet.hop_start,
|
||||
topic=topic,
|
||||
import_time=now,
|
||||
import_time_us=now_us,
|
||||
)
|
||||
session.add(seen)
|
||||
@@ -164,8 +189,7 @@ async def process_envelope(topic, env):
|
||||
await session.execute(select(Node).where(Node.id == user.id))
|
||||
).scalar_one_or_none()
|
||||
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
now_us = int(now.timestamp() * 1_000_000)
|
||||
now_us = int(time.time() * 1_000_000)
|
||||
|
||||
if node:
|
||||
node.node_id = node_id
|
||||
@@ -174,7 +198,6 @@ async def process_envelope(topic, env):
|
||||
node.hw_model = hw_model
|
||||
node.role = role
|
||||
node.channel = env.channel_id
|
||||
node.last_update = now
|
||||
node.last_seen_us = now_us
|
||||
if node.first_seen_us is None:
|
||||
node.first_seen_us = now_us
|
||||
@@ -187,11 +210,32 @@ async def process_envelope(topic, env):
|
||||
hw_model=hw_model,
|
||||
role=role,
|
||||
channel=env.channel_id,
|
||||
last_update=now,
|
||||
first_seen_us=now_us,
|
||||
last_seen_us=now_us,
|
||||
)
|
||||
session.add(node)
|
||||
|
||||
if user.public_key:
|
||||
public_key_hex = user.public_key.hex()
|
||||
existing_key = (
|
||||
await session.execute(
|
||||
select(NodePublicKey).where(
|
||||
NodePublicKey.node_id == node_id,
|
||||
NodePublicKey.public_key == public_key_hex,
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
|
||||
if existing_key:
|
||||
existing_key.last_seen_us = now_us
|
||||
else:
|
||||
new_key = NodePublicKey(
|
||||
node_id=node_id,
|
||||
public_key=public_key_hex,
|
||||
first_seen_us=now_us,
|
||||
last_seen_us=now_us,
|
||||
)
|
||||
session.add(new_key)
|
||||
except Exception as e:
|
||||
print(f"Error processing NODEINFO_APP: {e}")
|
||||
|
||||
@@ -206,11 +250,9 @@ async def process_envelope(topic, env):
|
||||
await session.execute(select(Node).where(Node.node_id == from_node_id))
|
||||
).scalar_one_or_none()
|
||||
if node:
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
now_us = int(now.timestamp() * 1_000_000)
|
||||
now_us = int(time.time() * 1_000_000)
|
||||
node.last_lat = position.latitude_i
|
||||
node.last_long = position.longitude_i
|
||||
node.last_update = now
|
||||
node.last_seen_us = now_us
|
||||
if node.first_seen_us is None:
|
||||
node.first_seen_us = now_us
|
||||
@@ -220,18 +262,23 @@ async def process_envelope(topic, env):
|
||||
if env.packet.decoded.portnum == PortNum.TRACEROUTE_APP:
|
||||
packet_id = env.packet.id
|
||||
if packet_id is not None:
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
now_us = int(now.timestamp() * 1_000_000)
|
||||
now_us = int(time.time() * 1_000_000)
|
||||
session.add(
|
||||
Traceroute(
|
||||
packet_id=packet_id,
|
||||
route=env.packet.decoded.payload,
|
||||
done=not env.packet.decoded.want_response,
|
||||
gateway_node_id=int(env.gateway_id[1:], 16),
|
||||
import_time=now,
|
||||
import_time_us=now_us,
|
||||
)
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def load_gateway_cache():
|
||||
async with mqtt_database.async_session() as session:
|
||||
result = await session.execute(
|
||||
select(Node.node_id).where(Node.is_mqtt_gateway == True) # noqa: E712
|
||||
)
|
||||
MQTT_GATEWAY_CACHE.update(result.scalars().all())
|
||||
|
||||
146
meshview/radio/coverage.py
Normal file
146
meshview/radio/coverage.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import math
|
||||
from functools import lru_cache
|
||||
|
||||
try:
|
||||
from pyitm import itm
|
||||
|
||||
ITM_AVAILABLE = True
|
||||
except Exception:
|
||||
itm = None
|
||||
ITM_AVAILABLE = False
|
||||
|
||||
DEFAULT_CLIMATE = 5 # Continental temperate
|
||||
DEFAULT_GROUND = 0.005 # Average ground conductivity
|
||||
DEFAULT_EPS_DIELECT = 15.0
|
||||
DEFAULT_DELTA_H = 90.0
|
||||
DEFAULT_RELIABILITY = 0.5
|
||||
DEFAULT_MIN_DBM = -130.0
|
||||
DEFAULT_MAX_DBM = -80.0
|
||||
DEFAULT_THRESHOLD_DBM = -120.0
|
||||
EARTH_RADIUS_KM = 6371.0
|
||||
BEARING_STEP_DEG = 5
|
||||
|
||||
|
||||
def destination_point(
|
||||
lat: float, lon: float, bearing_deg: float, distance_km: float
|
||||
) -> tuple[float, float]:
|
||||
lat1 = math.radians(lat)
|
||||
lon1 = math.radians(lon)
|
||||
bearing = math.radians(bearing_deg)
|
||||
|
||||
d = distance_km / EARTH_RADIUS_KM
|
||||
|
||||
lat2 = math.asin(
|
||||
math.sin(lat1) * math.cos(d) + math.cos(lat1) * math.sin(d) * math.cos(bearing)
|
||||
)
|
||||
|
||||
lon2 = lon1 + math.atan2(
|
||||
math.sin(bearing) * math.sin(d) * math.cos(lat1),
|
||||
math.cos(d) - math.sin(lat1) * math.sin(lat2),
|
||||
)
|
||||
|
||||
return math.degrees(lat2), math.degrees(lon2)
|
||||
|
||||
|
||||
@lru_cache(maxsize=512)
|
||||
def compute_coverage(
|
||||
lat: float,
|
||||
lon: float,
|
||||
freq_mhz: float,
|
||||
tx_dbm: float,
|
||||
tx_height_m: float,
|
||||
rx_height_m: float,
|
||||
radius_km: float,
|
||||
step_km: float,
|
||||
reliability: float,
|
||||
) -> list[tuple[float, float, float]]:
|
||||
if not ITM_AVAILABLE:
|
||||
return []
|
||||
|
||||
points = []
|
||||
distance = max(step_km, 1.0)
|
||||
while distance <= radius_km:
|
||||
for bearing in range(0, 360, BEARING_STEP_DEG):
|
||||
rx_lat, rx_lon = destination_point(lat, lon, bearing, distance)
|
||||
try:
|
||||
loss_db, _ = itm.area(
|
||||
ModVar=2,
|
||||
deltaH=DEFAULT_DELTA_H,
|
||||
tht_m=tx_height_m,
|
||||
rht_m=rx_height_m,
|
||||
dist_km=distance,
|
||||
TSiteCriteria=0,
|
||||
RSiteCriteria=0,
|
||||
eps_dielect=DEFAULT_EPS_DIELECT,
|
||||
sgm_conductivity=DEFAULT_GROUND,
|
||||
eno_ns_surfref=301,
|
||||
frq_mhz=freq_mhz,
|
||||
radio_climate=DEFAULT_CLIMATE,
|
||||
pol=1,
|
||||
pctTime=reliability,
|
||||
pctLoc=0.5,
|
||||
pctConf=0.5,
|
||||
)
|
||||
except itm.InputError:
|
||||
continue
|
||||
rx_dbm = tx_dbm - loss_db
|
||||
points.append((rx_lat, rx_lon, rx_dbm))
|
||||
distance += step_km
|
||||
|
||||
return points
|
||||
|
||||
|
||||
@lru_cache(maxsize=512)
|
||||
def compute_perimeter(
|
||||
lat: float,
|
||||
lon: float,
|
||||
freq_mhz: float,
|
||||
tx_dbm: float,
|
||||
tx_height_m: float,
|
||||
rx_height_m: float,
|
||||
radius_km: float,
|
||||
step_km: float,
|
||||
reliability: float,
|
||||
threshold_dbm: float,
|
||||
) -> list[tuple[float, float]]:
|
||||
if not ITM_AVAILABLE:
|
||||
return []
|
||||
|
||||
perimeter = []
|
||||
distance = max(step_km, 1.0)
|
||||
for bearing in range(0, 360, BEARING_STEP_DEG):
|
||||
last_point = None
|
||||
dist = distance
|
||||
while dist <= radius_km:
|
||||
try:
|
||||
loss_db, _ = itm.area(
|
||||
ModVar=2,
|
||||
deltaH=DEFAULT_DELTA_H,
|
||||
tht_m=tx_height_m,
|
||||
rht_m=rx_height_m,
|
||||
dist_km=dist,
|
||||
TSiteCriteria=0,
|
||||
RSiteCriteria=0,
|
||||
eps_dielect=DEFAULT_EPS_DIELECT,
|
||||
sgm_conductivity=DEFAULT_GROUND,
|
||||
eno_ns_surfref=301,
|
||||
frq_mhz=freq_mhz,
|
||||
radio_climate=DEFAULT_CLIMATE,
|
||||
pol=1,
|
||||
pctTime=reliability,
|
||||
pctLoc=0.5,
|
||||
pctConf=0.5,
|
||||
)
|
||||
except itm.InputError:
|
||||
dist += step_km
|
||||
continue
|
||||
|
||||
rx_dbm = tx_dbm - loss_db
|
||||
if rx_dbm >= threshold_dbm:
|
||||
last_point = destination_point(lat, lon, bearing, dist)
|
||||
dist += step_km
|
||||
|
||||
if last_point:
|
||||
perimeter.append(last_point)
|
||||
|
||||
return perimeter
|
||||
@@ -44,6 +44,7 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
|
||||
|
||||
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js" crossorigin></script>
|
||||
<script src="https://unpkg.com/leaflet-polylinedecorator@1.6.0/dist/leaflet.polylinedecorator.js" crossorigin></script>
|
||||
<script src="/static/portmaps.js"></script>
|
||||
|
||||
<script>
|
||||
(async function(){
|
||||
@@ -75,8 +76,8 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
|
||||
return color;
|
||||
}
|
||||
|
||||
function timeAgo(dateStr){
|
||||
const diff = Date.now() - new Date(dateStr);
|
||||
function timeAgoFromUs(us){
|
||||
const diff = Date.now() - (us / 1000);
|
||||
const s=Math.floor(diff/1000), m=Math.floor(s/60), h=Math.floor(m/60), d=Math.floor(h/24);
|
||||
if(d>0) return d+'d'; if(h>0) return h+'h'; if(m>0) return m+'m'; return s+'s';
|
||||
}
|
||||
@@ -97,7 +98,7 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
|
||||
const channels = new Set();
|
||||
const activeBlinks = new Map();
|
||||
|
||||
const portMap = {1:"Text",67:"Telemetry",3:"Position",70:"Traceroute",4:"Node Info",71:"Neighbour Info",73:"Map Report"};
|
||||
const portMap = window.PORT_LABEL_MAP;
|
||||
|
||||
nodes.forEach(node=>{
|
||||
if(isInvalidCoord(node)) return;
|
||||
@@ -118,7 +119,7 @@ body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
|
||||
<b>Channel:</b> ${node.channel}<br>
|
||||
<b>Model:</b> ${node.hw_model}<br>
|
||||
<b>Role:</b> ${node.role}<br>`;
|
||||
if(node.last_update) popupContent+=`<b>Last seen:</b> ${timeAgo(node.last_update)}<br>`;
|
||||
if(node.last_seen_us) popupContent+=`<b>Last seen:</b> ${timeAgoFromUs(node.last_seen_us)}<br>`;
|
||||
if(node.firmware) popupContent+=`<b>Firmware:</b> ${node.firmware}<br>`;
|
||||
|
||||
marker.on('click', e=>{
|
||||
|
||||
77
meshview/static/portmaps.js
Normal file
77
meshview/static/portmaps.js
Normal file
@@ -0,0 +1,77 @@
|
||||
// Shared port label/color definitions for UI pages.
|
||||
// Port numbers defined in: https://github.com/meshtastic/protobufs/blob/master/meshtastic/portnums.proto
|
||||
window.PORT_LABEL_MAP = {
|
||||
0: "Unknown",
|
||||
1: "Text",
|
||||
2: "Remote Hardware",
|
||||
3: "Position",
|
||||
4: "Node Info",
|
||||
5: "Routing",
|
||||
6: "Admin",
|
||||
7: "Text (Compressed)",
|
||||
8: "Waypoint",
|
||||
9: "Audio",
|
||||
10: "Detection Sensor",
|
||||
11: "Alert",
|
||||
12: "Key Verification",
|
||||
32: "Reply",
|
||||
33: "IP Tunnel",
|
||||
34: "Paxcounter",
|
||||
35: "Store Forward++",
|
||||
36: "Node Status",
|
||||
64: "Serial",
|
||||
65: "Store & Forward",
|
||||
66: "Range Test",
|
||||
67: "Telemetry",
|
||||
68: "ZPS",
|
||||
69: "Simulator",
|
||||
70: "Traceroute",
|
||||
71: "Neighbor",
|
||||
72: "ATAK",
|
||||
73: "Map Report",
|
||||
74: "Power Stress",
|
||||
76: "Reticulum Tunnel",
|
||||
77: "Cayenne",
|
||||
256: "Private App",
|
||||
257: "ATAK Forwarder",
|
||||
};
|
||||
|
||||
window.PORT_COLOR_MAP = {
|
||||
0: "#6c757d", // gray - Unknown
|
||||
1: "#1f77b4", // blue - Text
|
||||
2: "#795548", // brown - Remote Hardware
|
||||
3: "#2ca02c", // green - Position
|
||||
4: "#ffbf00", // yellow - Node Info
|
||||
5: "#ff7f0e", // orange - Routing
|
||||
6: "#20c997", // teal - Admin
|
||||
7: "#6a51a3", // purple - Text (Compressed)
|
||||
8: "#fd7e14", // orange - Waypoint
|
||||
9: "#e91e63", // pink - Audio
|
||||
10: "#ff9800", // amber - Detection Sensor
|
||||
11: "#f44336", // bright red - Alert
|
||||
12: "#9c27b0", // purple - Key Verification
|
||||
32: "#00bcd4", // cyan - Reply
|
||||
33: "#607d8b", // blue-gray - IP Tunnel
|
||||
34: "#8d6e63", // brown-gray - Paxcounter
|
||||
35: "#8bc34a", // light green - Store Forward++
|
||||
36: "#4caf50", // green - Node Status
|
||||
64: "#9e9e9e", // gray - Serial
|
||||
65: "#6610f2", // indigo - Store & Forward
|
||||
66: "#cddc39", // lime - Range Test
|
||||
67: "#17a2b8", // info blue - Telemetry
|
||||
68: "#3f51b5", // indigo - ZPS
|
||||
69: "#673ab7", // deep purple - Simulator
|
||||
70: "#f44336", // bright red - Traceroute
|
||||
71: "#e377c2", // pink - Neighbor
|
||||
72: "#2196f3", // blue - ATAK
|
||||
73: "#9999ff", // light purple - Map Report
|
||||
74: "#ff5722", // deep orange - Power Stress
|
||||
76: "#009688", // teal - Reticulum Tunnel
|
||||
77: "#4db6ac", // teal accent - Cayenne
|
||||
256: "#757575", // dark gray - Private App
|
||||
257: "#1976d2", // blue - ATAK Forwarder
|
||||
};
|
||||
|
||||
// Aliases for pages that expect different names.
|
||||
window.PORT_MAP = window.PORT_LABEL_MAP;
|
||||
window.PORT_COLORS = window.PORT_COLOR_MAP;
|
||||
@@ -1,10 +1,14 @@
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import select, and_, or_, func, cast, Text
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from sqlalchemy import Text, and_, cast, func, or_, select
|
||||
from sqlalchemy.orm import lazyload
|
||||
|
||||
from meshview import database, models
|
||||
from meshview.models import Node, Packet, PacketSeen, Traceroute
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def get_node(node_id):
|
||||
async with database.async_session() as session:
|
||||
@@ -91,8 +95,10 @@ async def get_packets_from(node_id=None, portnum=None, since=None, limit=500):
|
||||
if portnum:
|
||||
q = q.where(Packet.portnum == portnum)
|
||||
if since:
|
||||
q = q.where(Packet.import_time > (datetime.now() - since))
|
||||
result = await session.execute(q.limit(limit).order_by(Packet.import_time.desc()))
|
||||
now_us = int(datetime.now().timestamp() * 1_000_000)
|
||||
start_us = now_us - int(since.total_seconds() * 1_000_000)
|
||||
q = q.where(Packet.import_time_us > start_us)
|
||||
result = await session.execute(q.limit(limit).order_by(Packet.import_time_us.desc()))
|
||||
return result.scalars()
|
||||
|
||||
|
||||
@@ -108,7 +114,7 @@ async def get_packets_seen(packet_id):
|
||||
result = await session.execute(
|
||||
select(PacketSeen)
|
||||
.where(PacketSeen.packet_id == packet_id)
|
||||
.order_by(PacketSeen.import_time.desc())
|
||||
.order_by(PacketSeen.import_time_us.desc())
|
||||
)
|
||||
return result.scalars()
|
||||
|
||||
@@ -129,18 +135,21 @@ async def get_traceroute(packet_id):
|
||||
result = await session.execute(
|
||||
select(Traceroute)
|
||||
.where(Traceroute.packet_id == packet_id)
|
||||
.order_by(Traceroute.import_time)
|
||||
.order_by(Traceroute.import_time_us)
|
||||
)
|
||||
return result.scalars()
|
||||
|
||||
|
||||
async def get_traceroutes(since):
|
||||
if isinstance(since, datetime):
|
||||
since_us = int(since.timestamp() * 1_000_000)
|
||||
else:
|
||||
since_us = int(since)
|
||||
async with database.async_session() as session:
|
||||
stmt = (
|
||||
select(Traceroute)
|
||||
.join(Packet)
|
||||
.where(Traceroute.import_time > since)
|
||||
.order_by(Traceroute.import_time)
|
||||
.where(Traceroute.import_time_us > since_us)
|
||||
.order_by(Traceroute.import_time_us)
|
||||
)
|
||||
stream = await session.stream_scalars(stmt)
|
||||
async for tr in stream:
|
||||
@@ -148,6 +157,8 @@ async def get_traceroutes(since):
|
||||
|
||||
|
||||
async def get_mqtt_neighbors(since):
|
||||
now_us = int(datetime.now().timestamp() * 1_000_000)
|
||||
start_us = now_us - int(since.total_seconds() * 1_000_000)
|
||||
async with database.async_session() as session:
|
||||
result = await session.execute(
|
||||
select(PacketSeen, Packet)
|
||||
@@ -155,7 +166,7 @@ async def get_mqtt_neighbors(since):
|
||||
.where(
|
||||
(PacketSeen.hop_limit == PacketSeen.hop_start)
|
||||
& (PacketSeen.hop_start != 0)
|
||||
& (PacketSeen.import_time > (datetime.now() - since))
|
||||
& (PacketSeen.import_time_us > start_us)
|
||||
)
|
||||
.options(
|
||||
lazyload(Packet.from_node),
|
||||
@@ -168,9 +179,9 @@ async def get_mqtt_neighbors(since):
|
||||
async def get_total_node_count(channel: str = None) -> int:
|
||||
try:
|
||||
async with database.async_session() as session:
|
||||
q = select(func.count(Node.id)).where(
|
||||
Node.last_update > datetime.now() - timedelta(days=1)
|
||||
)
|
||||
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
|
||||
cutoff_us = now_us - 86400 * 1_000_000
|
||||
q = select(func.count(Node.id)).where(Node.last_seen_us > cutoff_us)
|
||||
|
||||
if channel:
|
||||
q = q.where(Node.channel == channel)
|
||||
@@ -185,26 +196,32 @@ async def get_total_node_count(channel: str = None) -> int:
|
||||
async def get_top_traffic_nodes():
|
||||
try:
|
||||
async with database.async_session() as session:
|
||||
result = await session.execute(
|
||||
text("""
|
||||
SELECT
|
||||
n.node_id,
|
||||
n.long_name,
|
||||
n.short_name,
|
||||
n.channel,
|
||||
COUNT(DISTINCT p.id) AS total_packets_sent,
|
||||
COUNT(ps.packet_id) AS total_times_seen
|
||||
FROM node n
|
||||
LEFT JOIN packet p ON n.node_id = p.from_node_id
|
||||
AND p.import_time >= DATETIME('now', 'localtime', '-24 hours')
|
||||
LEFT JOIN packet_seen ps ON p.id = ps.packet_id
|
||||
GROUP BY n.node_id, n.long_name, n.short_name
|
||||
HAVING total_packets_sent > 0
|
||||
ORDER BY total_times_seen DESC;
|
||||
""")
|
||||
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
|
||||
cutoff_us = now_us - 86400 * 1_000_000
|
||||
total_packets_sent = func.count(func.distinct(Packet.id)).label("total_packets_sent")
|
||||
total_times_seen = func.count(PacketSeen.packet_id).label("total_times_seen")
|
||||
|
||||
stmt = (
|
||||
select(
|
||||
Node.node_id,
|
||||
Node.long_name,
|
||||
Node.short_name,
|
||||
Node.channel,
|
||||
total_packets_sent,
|
||||
total_times_seen,
|
||||
)
|
||||
.select_from(Node)
|
||||
.outerjoin(
|
||||
Packet,
|
||||
(Packet.from_node_id == Node.node_id) & (Packet.import_time_us >= cutoff_us),
|
||||
)
|
||||
.outerjoin(PacketSeen, PacketSeen.packet_id == Packet.id)
|
||||
.group_by(Node.node_id, Node.long_name, Node.short_name, Node.channel)
|
||||
.having(total_packets_sent > 0)
|
||||
.order_by(total_times_seen.desc())
|
||||
)
|
||||
|
||||
rows = result.fetchall()
|
||||
rows = (await session.execute(stmt)).all()
|
||||
|
||||
nodes = [
|
||||
{
|
||||
@@ -227,33 +244,30 @@ async def get_top_traffic_nodes():
|
||||
async def get_node_traffic(node_id: int):
|
||||
try:
|
||||
async with database.async_session() as session:
|
||||
result = await session.execute(
|
||||
text("""
|
||||
SELECT
|
||||
node.long_name, packet.portnum,
|
||||
COUNT(*) AS packet_count
|
||||
FROM packet
|
||||
JOIN node ON packet.from_node_id = node.node_id
|
||||
WHERE node.node_id = :node_id
|
||||
AND packet.import_time >= DATETIME('now', 'localtime', '-24 hours')
|
||||
GROUP BY packet.portnum
|
||||
ORDER BY packet_count DESC;
|
||||
"""),
|
||||
{"node_id": node_id},
|
||||
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
|
||||
cutoff_us = now_us - 86400 * 1_000_000
|
||||
packet_count = func.count().label("packet_count")
|
||||
|
||||
stmt = (
|
||||
select(Node.long_name, Packet.portnum, packet_count)
|
||||
.select_from(Packet)
|
||||
.join(Node, Packet.from_node_id == Node.node_id)
|
||||
.where(Node.node_id == node_id)
|
||||
.where(Packet.import_time_us >= cutoff_us)
|
||||
.group_by(Node.long_name, Packet.portnum)
|
||||
.order_by(packet_count.desc())
|
||||
)
|
||||
|
||||
# Map the result to include node.long_name and packet data
|
||||
traffic_data = [
|
||||
result = await session.execute(stmt)
|
||||
return [
|
||||
{
|
||||
"long_name": row[0], # node.long_name
|
||||
"portnum": row[1], # packet.portnum
|
||||
"packet_count": row[2], # COUNT(*) as packet_count
|
||||
"long_name": row.long_name,
|
||||
"portnum": row.portnum,
|
||||
"packet_count": row.packet_count,
|
||||
}
|
||||
for row in result.all()
|
||||
]
|
||||
|
||||
return traffic_data
|
||||
|
||||
except Exception as e:
|
||||
# Log the error or handle it as needed
|
||||
print(f"Error fetching node traffic: {str(e)}")
|
||||
@@ -282,7 +296,11 @@ async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_a
|
||||
|
||||
# Apply filters based on provided parameters
|
||||
if node_id is not None:
|
||||
query = query.where(Node.node_id == node_id)
|
||||
try:
|
||||
node_id_int = int(node_id)
|
||||
except (TypeError, ValueError):
|
||||
node_id_int = node_id
|
||||
query = query.where(Node.node_id == node_id_int)
|
||||
if role is not None:
|
||||
query = query.where(Node.role == role.upper()) # Ensure role is uppercase
|
||||
if channel is not None:
|
||||
@@ -291,10 +309,12 @@ async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_a
|
||||
query = query.where(Node.hw_model == hw_model)
|
||||
|
||||
if days_active is not None:
|
||||
query = query.where(Node.last_update > datetime.now() - timedelta(days_active))
|
||||
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000) # noqa: UP017
|
||||
cutoff_us = now_us - int(timedelta(days_active).total_seconds() * 1_000_000)
|
||||
query = query.where(Node.last_seen_us > cutoff_us)
|
||||
|
||||
# Exclude nodes where last_update is an empty string
|
||||
query = query.where(Node.last_update != "")
|
||||
# Exclude nodes with missing last_seen_us
|
||||
query = query.where(Node.last_seen_us.is_not(None))
|
||||
|
||||
# Order results by long_name in ascending order
|
||||
query = query.order_by(Node.short_name.asc())
|
||||
@@ -305,7 +325,7 @@ async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_a
|
||||
return nodes # Return the list of nodes
|
||||
|
||||
except Exception:
|
||||
print("error reading DB") # Consider using logging instead of print
|
||||
logger.exception("error reading DB")
|
||||
return [] # Return an empty list in case of failure
|
||||
|
||||
|
||||
@@ -317,22 +337,36 @@ async def get_packet_stats(
|
||||
to_node: int | None = None,
|
||||
from_node: int | None = None,
|
||||
):
|
||||
now = datetime.now()
|
||||
now = datetime.now(timezone.utc) # noqa: UP017
|
||||
|
||||
if period_type == "hour":
|
||||
start_time = now - timedelta(hours=length)
|
||||
time_format = '%Y-%m-%d %H:00'
|
||||
time_format_sqlite = "%Y-%m-%d %H:00"
|
||||
time_format_pg = "YYYY-MM-DD HH24:00"
|
||||
elif period_type == "day":
|
||||
start_time = now - timedelta(days=length)
|
||||
time_format = '%Y-%m-%d'
|
||||
time_format_sqlite = "%Y-%m-%d"
|
||||
time_format_pg = "YYYY-MM-DD"
|
||||
else:
|
||||
raise ValueError("period_type must be 'hour' or 'day'")
|
||||
|
||||
async with database.async_session() as session:
|
||||
dialect = session.get_bind().dialect.name
|
||||
if dialect == "postgresql":
|
||||
period_expr = func.to_char(
|
||||
func.to_timestamp(Packet.import_time_us / 1_000_000.0),
|
||||
time_format_pg,
|
||||
)
|
||||
else:
|
||||
period_expr = func.strftime(
|
||||
time_format_sqlite,
|
||||
func.datetime(Packet.import_time_us / 1_000_000, "unixepoch"),
|
||||
)
|
||||
|
||||
q = select(
|
||||
func.strftime(time_format, Packet.import_time).label('period'),
|
||||
func.count().label('count'),
|
||||
).where(Packet.import_time >= start_time)
|
||||
period_expr.label("period"),
|
||||
func.count().label("count"),
|
||||
).where(Packet.import_time_us >= int(start_time.timestamp() * 1_000_000))
|
||||
|
||||
# Filters
|
||||
if channel:
|
||||
|
||||
@@ -115,6 +115,7 @@
|
||||
|
||||
</div>
|
||||
|
||||
<script src="/static/portmaps.js"></script>
|
||||
<script>
|
||||
/* ======================================================
|
||||
FIREHOSE TRANSLATION SYSTEM (isolated from base)
|
||||
@@ -177,41 +178,8 @@ function nodeName(id) {
|
||||
/* ======================================================
|
||||
PORT COLORS & NAMES
|
||||
====================================================== */
|
||||
const PORT_MAP = {
|
||||
0: "UNKNOWN APP",
|
||||
1: "Text Message",
|
||||
3: "Position",
|
||||
4: "Node Info",
|
||||
5: "Routing",
|
||||
6: "Administration",
|
||||
8: "Waypoint",
|
||||
65: "Store Forward",
|
||||
67: "Telemetry",
|
||||
70: "Trace Route",
|
||||
71: "Neighbor Info"
|
||||
};
|
||||
|
||||
const PORT_COLORS = {
|
||||
0: "#6c757d",
|
||||
1: "#007bff",
|
||||
3: "#28a745",
|
||||
4: "#ffc107",
|
||||
5: "#dc3545",
|
||||
6: "#20c997",
|
||||
65: "#6610f2",
|
||||
67: "#17a2b8",
|
||||
68: "#fd7e14",
|
||||
69: "#6f42c1",
|
||||
70: "#ff4444",
|
||||
71: "#ff66cc",
|
||||
72: "#00cc99",
|
||||
73: "#9999ff",
|
||||
74: "#cc00cc",
|
||||
75: "#ffbb33",
|
||||
76: "#00bcd4",
|
||||
77: "#8bc34a",
|
||||
78: "#795548"
|
||||
};
|
||||
const PORT_MAP = window.PORT_MAP || {};
|
||||
const PORT_COLORS = window.PORT_COLORS || {};
|
||||
|
||||
function portLabel(portnum, payload, linksHtml) {
|
||||
const name = PORT_MAP[portnum] || "Unknown";
|
||||
@@ -233,13 +201,37 @@ function portLabel(portnum, payload, linksHtml) {
|
||||
/* ======================================================
|
||||
TIME FORMAT
|
||||
====================================================== */
|
||||
function formatLocalTime(importTimeUs) {
|
||||
const ms = importTimeUs / 1000;
|
||||
return new Date(ms).toLocaleTimeString([], {
|
||||
function formatTimes(importTimeUs) {
|
||||
const ms = Number(importTimeUs) / 1000;
|
||||
if (!Number.isFinite(ms)) {
|
||||
return { local: "—", utc: "—", epoch: "—" };
|
||||
}
|
||||
const date = new Date(ms);
|
||||
const local = date.toLocaleTimeString([], {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit"
|
||||
second: "2-digit",
|
||||
timeZoneName: "short"
|
||||
});
|
||||
const utc = date.toLocaleTimeString([], {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
timeZone: "UTC",
|
||||
timeZoneName: "short"
|
||||
});
|
||||
return { local, utc, epoch: String(importTimeUs) };
|
||||
}
|
||||
|
||||
function logPacketTimes(packet) {
|
||||
const times = formatTimes(packet.import_time_us);
|
||||
console.log(
|
||||
"[firehose] packet time",
|
||||
"id=" + packet.id,
|
||||
"epoch_us=" + times.epoch,
|
||||
"local=" + times.local,
|
||||
"utc=" + times.utc
|
||||
);
|
||||
}
|
||||
|
||||
/* ======================================================
|
||||
@@ -261,7 +253,7 @@ async function fetchUpdates() {
|
||||
if (updatesPaused) return;
|
||||
|
||||
const url = new URL("/api/packets", window.location.origin);
|
||||
url.searchParams.set("limit", 50);
|
||||
url.searchParams.set("limit", 100);
|
||||
|
||||
if (lastImportTimeUs)
|
||||
url.searchParams.set("since", lastImportTimeUs);
|
||||
@@ -277,6 +269,7 @@ async function fetchUpdates() {
|
||||
const list = document.getElementById("packet_list");
|
||||
|
||||
for (const pkt of packets.reverse()) {
|
||||
logPacketTimes(pkt);
|
||||
|
||||
/* FROM — includes translation */
|
||||
const from =
|
||||
@@ -336,7 +329,9 @@ async function fetchUpdates() {
|
||||
const html = `
|
||||
<tr class="packet-row">
|
||||
|
||||
<td>${formatLocalTime(pkt.import_time_us)}</td>
|
||||
<td>
|
||||
${formatTimes(pkt.import_time_us).local}<br>
|
||||
</td>
|
||||
|
||||
<td>
|
||||
<span class="toggle-btn">▶</span>
|
||||
|
||||
@@ -24,12 +24,70 @@
|
||||
#reset-filters-button:active { background-color:#c41e0d; }
|
||||
|
||||
.blinking-tooltip { background:white;color:black;border:1px solid black;border-radius:4px;padding:2px 5px; }
|
||||
|
||||
#map-wrapper {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: calc(100vh - 270px);
|
||||
}
|
||||
#map {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
#unmapped-packets {
|
||||
position: absolute;
|
||||
bottom: 30px;
|
||||
right: 15px;
|
||||
z-index: 600;
|
||||
width: 220px;
|
||||
padding: 6px 8px;
|
||||
background: rgba(255, 255, 255, 0.95);
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
text-align: left;
|
||||
box-shadow: 0 0 10px rgba(0,0,0,0.2);
|
||||
pointer-events: auto;
|
||||
}
|
||||
#unmapped-packets h3 {
|
||||
margin: 0 0 6px;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
color: #000;
|
||||
}
|
||||
#unmapped-list {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
max-height: 120px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
#unmapped-list li {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
padding: 3px 0;
|
||||
border-bottom: 1px dotted #e0e0e0;
|
||||
}
|
||||
#unmapped-list li:last-child { border-bottom: none; }
|
||||
.unmapped-node { font-weight: 400; color: #000; }
|
||||
.unmapped-empty { color: #666; font-style: italic; }
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
<div id="map" style="width:100%; height:calc(100vh - 270px)"></div>
|
||||
<div id="map-wrapper">
|
||||
<div id="map"></div>
|
||||
|
||||
<div id="unmapped-packets">
|
||||
<h3 data-translate-lang="unmapped_packets_title">Unmapped Packets</h3>
|
||||
<ul id="unmapped-list">
|
||||
<li class="unmapped-empty" data-translate-lang="unmapped_packets_empty">
|
||||
No recent unmapped packets.
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="map-legend"
|
||||
class="legend"
|
||||
@@ -52,6 +110,8 @@
|
||||
<div id="filter-container">
|
||||
<input type="checkbox" class="filter-checkbox" id="filter-routers-only">
|
||||
<span data-translate-lang="show_routers_only">Show Routers Only</span>
|
||||
<input type="checkbox" class="filter-checkbox" id="filter-mqtt-only">
|
||||
<span data-translate-lang="show_mqtt_only">Show MQTT Gateways Only</span>
|
||||
</div>
|
||||
|
||||
<div style="text-align:center;margin-top:5px;">
|
||||
@@ -70,6 +130,7 @@
|
||||
<script src="https://unpkg.com/leaflet-polylinedecorator@1.6.0/dist/leaflet.polylinedecorator.js"
|
||||
integrity="sha384-FhPn/2P/fJGhQLeNWDn9B/2Gml2bPOrKJwFqJXgR3xOPYxWg5mYQ5XZdhUSugZT0"
|
||||
crossorigin></script>
|
||||
<script src="/static/portmaps.js"></script>
|
||||
|
||||
<script>
|
||||
/* ======================================================
|
||||
@@ -117,16 +178,11 @@ var nodes = [], markers = {}, markerById = {}, nodeMap = new Map();
|
||||
var edgeLayer = L.layerGroup().addTo(map), selectedNodeId = null;
|
||||
var activeBlinks = new Map(), lastImportTime = null;
|
||||
var mapInterval = 0;
|
||||
var unmappedPackets = [];
|
||||
const UNMAPPED_LIMIT = 50;
|
||||
const UNMAPPED_TTL_MS = 5000;
|
||||
|
||||
const portMap = {
|
||||
1:"Text",
|
||||
67:"Telemetry",
|
||||
3:"Position",
|
||||
70:"Traceroute",
|
||||
4:"Node Info",
|
||||
71:"Neighbour Info",
|
||||
73:"Map Report"
|
||||
};
|
||||
const portMap = window.PORT_LABEL_MAP;
|
||||
|
||||
const palette = ["#e6194b","#4363d8","#f58231","#911eb4","#46f0f0","#f032e6","#bcf60c","#fabebe",
|
||||
"#008080","#e6beff","#9a6324","#fffac8","#800000","#aaffc3","#808000","#ffd8b1",
|
||||
@@ -140,8 +196,8 @@ map.on("popupopen", function (e) {
|
||||
if (popupEl) applyTranslationsMap(popupEl);
|
||||
});
|
||||
|
||||
function timeAgo(date){
|
||||
const diff = Date.now() - new Date(date);
|
||||
function timeAgoFromUs(us){
|
||||
const diff = Date.now() - (us / 1000);
|
||||
const s = Math.floor(diff/1000), m = Math.floor(s/60),
|
||||
h = Math.floor(m/60), d = Math.floor(h/24);
|
||||
return d>0?d+"d":h>0?h+"h":m>0?m+"m":s+"s";
|
||||
@@ -154,11 +210,37 @@ function hashToColor(str){
|
||||
return c;
|
||||
}
|
||||
|
||||
function hashToUnit(str){
|
||||
let h = 2166136261;
|
||||
for(let i=0;i<str.length;i++){
|
||||
h ^= str.charCodeAt(i);
|
||||
h = Math.imul(h, 16777619);
|
||||
}
|
||||
return (h >>> 0) / 0xffffffff;
|
||||
}
|
||||
|
||||
function jitterLatLng(lat, lon, key){
|
||||
const meters = 15; // small, visually separates overlaps
|
||||
const angle = hashToUnit(String(key)) * Math.PI * 2;
|
||||
const r = meters * (0.3 + 0.7 * hashToUnit(`r:${key}`));
|
||||
const dLat = (r * Math.cos(angle)) / 111320;
|
||||
const dLon = (r * Math.sin(angle)) / (111320 * Math.cos(lat * Math.PI / 180));
|
||||
return [lat + dLat, lon + dLon];
|
||||
}
|
||||
|
||||
function isInvalidCoord(n){
|
||||
return !n || !n.lat || !n.long || n.lat === 0 || n.long === 0 ||
|
||||
Number.isNaN(n.lat) || Number.isNaN(n.long);
|
||||
}
|
||||
|
||||
function getNodeLatLng(n){
|
||||
const marker = markerById[n.key];
|
||||
if(marker){
|
||||
return marker.getLatLng();
|
||||
}
|
||||
return { lat: n.lat, lng: n.long };
|
||||
}
|
||||
|
||||
/* ======================================================
|
||||
PACKET FETCHING (unchanged)
|
||||
====================================================== */
|
||||
@@ -191,7 +273,11 @@ function fetchNewPackets(){
|
||||
|
||||
const marker = markerById[pkt.from_node_id];
|
||||
const nodeData = nodeMap.get(pkt.from_node_id);
|
||||
if(marker && nodeData) blinkNode(marker,nodeData.long_name,pkt.portnum);
|
||||
if(marker && nodeData) {
|
||||
blinkNode(marker,nodeData.long_name,pkt.portnum);
|
||||
} else {
|
||||
addUnmappedPacket(pkt, nodeData);
|
||||
}
|
||||
});
|
||||
|
||||
lastImportTime = latest;
|
||||
@@ -289,7 +375,8 @@ fetch('/api/nodes?days_active=3')
|
||||
hw_model: n.hw_model || "",
|
||||
role: n.role || "",
|
||||
firmware: n.firmware || "",
|
||||
last_update: n.last_update || "",
|
||||
last_seen_us: n.last_seen_us || null,
|
||||
is_mqtt_gateway: n.is_mqtt_gateway === true,
|
||||
isRouter: (n.role||"").toLowerCase().includes("router")
|
||||
}));
|
||||
|
||||
@@ -313,7 +400,8 @@ function renderNodesOnMap(){
|
||||
|
||||
const color = hashToColor(node.channel);
|
||||
|
||||
const marker = L.circleMarker([node.lat,node.long], {
|
||||
const [jLat, jLon] = jitterLatLng(node.lat, node.long, node.key);
|
||||
const marker = L.circleMarker([jLat,jLon], {
|
||||
radius: node.isRouter ? 9 : 7,
|
||||
color: "white",
|
||||
fillColor: color,
|
||||
@@ -331,10 +419,13 @@ function renderNodesOnMap(){
|
||||
<b data-translate-lang="channel_label"></b> ${node.channel}<br>
|
||||
<b data-translate-lang="model_label"></b> ${node.hw_model}<br>
|
||||
<b data-translate-lang="role_label"></b> ${node.role}<br>
|
||||
<b data-translate-lang="mqtt_gateway"></b> ${
|
||||
node.is_mqtt_gateway ? (mapTranslations.yes || "Yes") : (mapTranslations.no || "No")
|
||||
}<br>
|
||||
|
||||
${
|
||||
node.last_update
|
||||
? `<b data-translate-lang="last_seen"></b> ${timeAgo(node.last_update)}<br>`
|
||||
node.last_seen_us
|
||||
? `<b data-translate-lang="last_seen"></b> ${timeAgoFromUs(node.last_seen_us)}<br>`
|
||||
: ""
|
||||
}
|
||||
|
||||
@@ -354,6 +445,70 @@ function renderNodesOnMap(){
|
||||
setTimeout(() => applyTranslationsMap(), 50);
|
||||
}
|
||||
|
||||
/* ======================================================
|
||||
UNMAPPED PACKETS LIST
|
||||
====================================================== */
|
||||
|
||||
function addUnmappedPacket(pkt, nodeData){
|
||||
if(nodeData && !isInvalidCoord(nodeData)) return;
|
||||
|
||||
const now = Date.now();
|
||||
const entry = {
|
||||
id: pkt.id,
|
||||
key: `${pkt.id ?? "x"}-${pkt.import_time_us ?? now}-${Math.random().toString(16).slice(2)}`,
|
||||
import_time_us: pkt.import_time_us || 0,
|
||||
from_node_id: pkt.from_node_id,
|
||||
long_name: pkt.long_name || (nodeData?.long_name || ""),
|
||||
portnum: pkt.portnum,
|
||||
payload: (pkt.payload || "").trim(),
|
||||
expires_at: now + UNMAPPED_TTL_MS
|
||||
};
|
||||
|
||||
unmappedPackets.unshift(entry);
|
||||
pruneUnmappedPackets(now);
|
||||
if(unmappedPackets.length > UNMAPPED_LIMIT){
|
||||
unmappedPackets = unmappedPackets.slice(0, UNMAPPED_LIMIT);
|
||||
}
|
||||
renderUnmappedPackets();
|
||||
|
||||
setTimeout(() => {
|
||||
pruneUnmappedPackets(Date.now());
|
||||
renderUnmappedPackets();
|
||||
}, UNMAPPED_TTL_MS + 50);
|
||||
}
|
||||
|
||||
function pruneUnmappedPackets(now){
|
||||
unmappedPackets = unmappedPackets.filter(p => p.expires_at > now);
|
||||
}
|
||||
|
||||
function renderUnmappedPackets(){
|
||||
pruneUnmappedPackets(Date.now());
|
||||
const list = document.getElementById("unmapped-list");
|
||||
list.innerHTML = "";
|
||||
|
||||
if(unmappedPackets.length === 0){
|
||||
const empty = document.createElement("li");
|
||||
empty.className = "unmapped-empty";
|
||||
empty.dataset.translateLang = "unmapped_packets_empty";
|
||||
empty.textContent = "No recent unmapped packets.";
|
||||
list.appendChild(empty);
|
||||
return;
|
||||
}
|
||||
|
||||
unmappedPackets.forEach(p=>{
|
||||
const li = document.createElement("li");
|
||||
|
||||
const node = document.createElement("span");
|
||||
node.className = "unmapped-node";
|
||||
const type = portMap[p.portnum] || `Port ${p.portnum ?? "?"}`;
|
||||
const name = p.long_name || `Node ${p.from_node_id ?? "?"}`;
|
||||
node.textContent = `${name} (${type})`;
|
||||
|
||||
li.appendChild(node);
|
||||
list.appendChild(li);
|
||||
});
|
||||
}
|
||||
|
||||
/* ======================================================
|
||||
⭐ NEW: DYNAMIC EDGE LOADING
|
||||
====================================================== */
|
||||
@@ -374,7 +529,9 @@ async function onNodeClick(node){
|
||||
if(!f || !t || isInvalidCoord(f) || isInvalidCoord(t)) return;
|
||||
|
||||
const color = edge.type === "neighbor" ? "gray" : "orange";
|
||||
const line = L.polyline([[f.lat, f.long], [t.lat, t.long]], {
|
||||
const fLatLng = getNodeLatLng(f);
|
||||
const tLatLng = getNodeLatLng(t);
|
||||
const line = L.polyline([[fLatLng.lat, fLatLng.lng], [tLatLng.lat, tLatLng.lng]], {
|
||||
color, weight: 3
|
||||
}).addTo(edgeLayer);
|
||||
|
||||
@@ -482,10 +639,14 @@ function createChannelFilters(){
|
||||
});
|
||||
|
||||
const routerOnly=document.getElementById("filter-routers-only");
|
||||
const mqttOnly=document.getElementById("filter-mqtt-only");
|
||||
routerOnly.checked = saved["routersOnly"] || false;
|
||||
mqttOnly.checked = saved["mqttOnly"] || false;
|
||||
|
||||
routerOnly.addEventListener("change", saveFiltersToLocalStorage);
|
||||
routerOnly.addEventListener("change", updateNodeVisibility);
|
||||
mqttOnly.addEventListener("change", saveFiltersToLocalStorage);
|
||||
mqttOnly.addEventListener("change", updateNodeVisibility);
|
||||
|
||||
updateNodeVisibility();
|
||||
}
|
||||
@@ -496,12 +657,14 @@ function saveFiltersToLocalStorage(){
|
||||
state[ch] = document.getElementById(`filter-channel-${ch}`).checked;
|
||||
});
|
||||
state["routersOnly"] = document.getElementById("filter-routers-only").checked;
|
||||
state["mqttOnly"] = document.getElementById("filter-mqtt-only").checked;
|
||||
|
||||
localStorage.setItem("mapFilters", JSON.stringify(state));
|
||||
}
|
||||
|
||||
function updateNodeVisibility(){
|
||||
const routerOnly = document.getElementById("filter-routers-only").checked;
|
||||
const mqttOnly = document.getElementById("filter-mqtt-only").checked;
|
||||
const activeChannels = [...channelSet].filter(ch =>
|
||||
document.getElementById(`filter-channel-${ch}`).checked
|
||||
);
|
||||
@@ -511,6 +674,7 @@ function updateNodeVisibility(){
|
||||
if(marker){
|
||||
const visible =
|
||||
(!routerOnly || n.isRouter) &&
|
||||
(!mqttOnly || n.is_mqtt_gateway) &&
|
||||
activeChannels.includes(n.channel);
|
||||
|
||||
visible ? map.addLayer(marker) : map.removeLayer(marker);
|
||||
@@ -541,6 +705,7 @@ function shareCurrentView() {
|
||||
|
||||
function resetFiltersToDefaults(){
|
||||
document.getElementById("filter-routers-only").checked = false;
|
||||
document.getElementById("filter-mqtt-only").checked = false;
|
||||
channelSet.forEach(ch => {
|
||||
document.getElementById(`filter-channel-${ch}`).checked = true;
|
||||
});
|
||||
|
||||
@@ -141,7 +141,7 @@ document.addEventListener("DOMContentLoaded", async () => {
|
||||
</span>
|
||||
|
||||
<span class="col-3 nodename">
|
||||
<a href="/packet_list/${packet.from_node_id}">
|
||||
<a href="/node/${packet.from_node_id}">
|
||||
${escapeHtml(fromName)}
|
||||
</a>
|
||||
</span>
|
||||
@@ -178,7 +178,7 @@ document.addEventListener("DOMContentLoaded", async () => {
|
||||
const sinceUs = Math.floor(sixDaysAgoMs * 1000);
|
||||
|
||||
const url =
|
||||
`/api/packets?portnum=1&contains=${encodeURIComponent(tag)}&since=${sinceUs}`;
|
||||
`/api/packets?portnum=1&contains=${encodeURIComponent(tag)}&since=${sinceUs}&limit=1000`;
|
||||
|
||||
const resp = await fetch(url);
|
||||
const data = await resp.json();
|
||||
|
||||
@@ -131,6 +131,195 @@
|
||||
color: #9fd4ff;
|
||||
}
|
||||
.inline-link:hover { color: #c7e6ff; }
|
||||
|
||||
/* --- QR Code & Import --- */
|
||||
.node-actions {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
margin-bottom: 14px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.node-actions {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
margin-bottom: 16px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.node-actions button {
|
||||
background: linear-gradient(135deg, #2d3748 0%, #1a202c 100%);
|
||||
border: 1px solid #4a5568;
|
||||
border-radius: 8px;
|
||||
color: #e4e9ee;
|
||||
padding: 8px 16px;
|
||||
font-size: 0.9rem;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
font-weight: 500;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
.node-actions button:hover {
|
||||
background: linear-gradient(135deg, #3d4758 0%, #2a303c 100%);
|
||||
border-color: #6a7788;
|
||||
transform: translateY(-1px);
|
||||
box-shadow: 0 4px 12px rgba(0,0,0,0.3);
|
||||
}
|
||||
.node-actions button.copied {
|
||||
background: linear-gradient(135deg, #276749 0%, #22543d 100%);
|
||||
border-color: #48bb78;
|
||||
color: #fff;
|
||||
}
|
||||
.copy-success {
|
||||
color: #4ade80 !important;
|
||||
transition: opacity 0.3s;
|
||||
}
|
||||
|
||||
/* --- QR Modal --- */
|
||||
#qrModal {
|
||||
display:none;
|
||||
position:fixed;
|
||||
top:0; left:0; width:100%; height:100%;
|
||||
background:rgba(0,0,0,0.95);
|
||||
z-index:10000;
|
||||
align-items:center;
|
||||
justify-content:center;
|
||||
backdrop-filter:blur(4px);
|
||||
}
|
||||
#qrModal > div {
|
||||
background:linear-gradient(145deg, #1e2228, #16191d);
|
||||
border:1px solid #3a4450;
|
||||
border-radius:16px;
|
||||
padding:28px;
|
||||
max-width:380px;
|
||||
text-align:center;
|
||||
color:#e4e9ee;
|
||||
box-shadow:0 25px 80px rgba(0,0,0,0.6);
|
||||
}
|
||||
#qrModal .qr-header {
|
||||
display:flex;
|
||||
justify-content:space-between;
|
||||
align-items:center;
|
||||
margin-bottom:16px;
|
||||
}
|
||||
#qrModal .qr-title {
|
||||
font-size:1.3rem;
|
||||
font-weight:600;
|
||||
margin:0;
|
||||
color:#9fd4ff;
|
||||
}
|
||||
#qrModal .qr-close {
|
||||
background:rgba(255,255,255,0.05);
|
||||
border:1px solid #4a5568;
|
||||
color:#9ca3af;
|
||||
width:32px;
|
||||
height:32px;
|
||||
border-radius:8px;
|
||||
cursor:pointer;
|
||||
font-size:1.2rem;
|
||||
display:flex;
|
||||
align-items:center;
|
||||
justify-content:center;
|
||||
transition:all 0.2s;
|
||||
}
|
||||
#qrModal .qr-close:hover {
|
||||
background:rgba(255,255,255,0.1);
|
||||
color:#fff;
|
||||
border-color:#6a7788;
|
||||
}
|
||||
#qrModal .qr-node-name {
|
||||
font-size:1.15rem;
|
||||
color:#fff;
|
||||
margin:12px 0 20px;
|
||||
font-weight:500;
|
||||
}
|
||||
#qrModal .qr-image {
|
||||
background:#fff;
|
||||
padding:16px;
|
||||
border-radius:12px;
|
||||
display:inline-block;
|
||||
margin-bottom:16px;
|
||||
box-shadow:0 8px 30px rgba(0,0,0,0.4);
|
||||
}
|
||||
#qrModal .qr-image img {
|
||||
display:block;
|
||||
border-radius:4px;
|
||||
}
|
||||
#qrModal .qr-url-container {
|
||||
background:rgba(0,0,0,0.4);
|
||||
border-radius:8px;
|
||||
padding:12px;
|
||||
margin-bottom:18px;
|
||||
}
|
||||
#qrModal .qr-url {
|
||||
font-size:0.65rem;
|
||||
color:#9ca3af;
|
||||
word-break:break-all;
|
||||
font-family:'Monaco', 'Menlo', monospace;
|
||||
line-height:1.4;
|
||||
max-height:48px;
|
||||
overflow-y:auto;
|
||||
display:block;
|
||||
}
|
||||
#qrModal .qr-actions {
|
||||
display:flex;
|
||||
gap:12px;
|
||||
justify-content:center;
|
||||
}
|
||||
#qrModal .qr-btn {
|
||||
background:linear-gradient(135deg, #2d3748 0%, #1a202c 100%);
|
||||
border:1px solid #4a5568;
|
||||
color:#e4e9ee;
|
||||
padding:12px 24px;
|
||||
border-radius:10px;
|
||||
cursor:pointer;
|
||||
font-size:0.9rem;
|
||||
font-weight:500;
|
||||
transition:all 0.2s;
|
||||
display:flex;
|
||||
align-items:center;
|
||||
gap:8px;
|
||||
min-width:140px;
|
||||
justify-content:center;
|
||||
}
|
||||
#qrModal .qr-btn:hover {
|
||||
background:linear-gradient(135deg, #3d4758 0%, #2a303c 100%);
|
||||
border-color:#6a7788;
|
||||
transform:translateY(-2px);
|
||||
box-shadow:0 4px 12px rgba(0,0,0,0.3);
|
||||
}
|
||||
#qrModal .qr-btn.copied {
|
||||
background:linear-gradient(135deg, #276749 0%, #22543d 100%);
|
||||
border-color:#48bb78;
|
||||
color:#fff;
|
||||
}
|
||||
|
||||
/* --- Impersonation Warning --- */
|
||||
.impersonation-warning {
|
||||
background: rgba(239, 68, 68, 0.15);
|
||||
border: 1px solid rgba(239, 68, 68, 0.4);
|
||||
border-radius: 8px;
|
||||
padding: 12px 16px;
|
||||
margin-bottom: 14px;
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 10px;
|
||||
}
|
||||
.impersonation-warning .warning-icon {
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
.impersonation-warning .warning-content {
|
||||
flex: 1;
|
||||
}
|
||||
.impersonation-warning .warning-title {
|
||||
color: #f87171;
|
||||
font-weight: bold;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.impersonation-warning .warning-text {
|
||||
font-size: 0.85rem;
|
||||
color: #ccc;
|
||||
}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
@@ -141,6 +330,31 @@
|
||||
<span id="nodeLabel"></span>
|
||||
</h5>
|
||||
|
||||
<!-- Node Actions -->
|
||||
<div class="node-actions" id="nodeActions" style="display:none;">
|
||||
<button onclick="copyImportUrl()" id="copyUrlBtn">
|
||||
<span>📋</span> <span data-translate-lang="copy_import_url">Copy Import URL</span>
|
||||
</button>
|
||||
<button onclick="showQrCode()" id="showQrBtn">
|
||||
<span>🔳</span> <span data-translate-lang="show_qr_code">Show QR Code</span>
|
||||
</button>
|
||||
<button onclick="toggleCoverage()" id="toggleCoverageBtn" disabled title="Location required for coverage">
|
||||
<span>📡</span> <span data-translate-lang="toggle_coverage">Predicted Coverage</span>
|
||||
</button>
|
||||
<a class="inline-link" id="coverageHelpLink" href="/docs/COVERAGE.md" target="_blank" rel="noopener" data-translate-lang="coverage_help">
|
||||
Coverage Help
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<!-- Impersonation Warning -->
|
||||
<div id="impersonationWarning" class="impersonation-warning" style="display:none;">
|
||||
<span class="warning-icon">⚠️</span>
|
||||
<div class="warning-content">
|
||||
<div class="warning-title" data-translate-lang="potential_impersonation">Potential Impersonation Detected</div>
|
||||
<div class="warning-text" id="impersonationText"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Node Info -->
|
||||
<div id="node-info" class="node-info">
|
||||
<div><strong data-translate-lang="node_id">Node ID</strong><strong>: </strong><span id="info-node-id">—</span></div>
|
||||
@@ -152,10 +366,12 @@
|
||||
<div><strong data-translate-lang="firmware">Firmware</strong><strong>: </strong> <span id="info-firmware">—</span></div>
|
||||
<div><strong data-translate-lang="role">Role</strong><strong>: </strong> <span id="info-role">—</span></div>
|
||||
|
||||
<div><strong data-translate-lang="mqtt_gateway">MQTT Gateway</strong><strong>: </strong> <span id="info-mqtt-gateway">—</span></div>
|
||||
<div><strong data-translate-lang="channel">Channel</strong><strong>: </strong> <span id="info-channel">—</span></div>
|
||||
<div><strong data-translate-lang="latitude">Latitude</strong><strong>: </strong> <span id="info-lat">—</span></div>
|
||||
<div><strong data-translate-lang="longitude">Longitude</strong><strong>: </strong> <span id="info-lon">—</span></div>
|
||||
|
||||
<div><strong data-translate-lang="first_update">First Update</strong><strong>: </strong> <span id="info-first-update">—</span></div>
|
||||
<div><strong data-translate-lang="last_update">Last Update</strong><strong>: </strong> <span id="info-last-update">—</span></div>
|
||||
<div>
|
||||
<strong data-translate-lang="statistics">Statistics</strong><strong>: </strong>
|
||||
@@ -284,35 +500,37 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- QR Code Modal -->
|
||||
<div id="qrModal">
|
||||
<div>
|
||||
<div class="qr-header">
|
||||
<h3 class="qr-title" data-translate-lang="share_contact_qr">Share Contact QR</h3>
|
||||
<button class="qr-close" onclick="closeQrModal()">✕</button>
|
||||
</div>
|
||||
<div class="qr-node-name" id="qrNodeName">Loading...</div>
|
||||
<div class="qr-image">
|
||||
<div id="qrCodeContainer"></div>
|
||||
</div>
|
||||
<div class="qr-url-container">
|
||||
<span class="qr-url" id="qrUrl">Generating...</span>
|
||||
</div>
|
||||
<div class="qr-actions">
|
||||
<button class="qr-btn" onclick="copyQrUrl()" id="copyQrBtn">
|
||||
<span>📋</span> <span data-translate-lang="copy_url">Copy URL</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/qrcodejs/1.0.0/qrcode.min.js"></script>
|
||||
<script src="https://unpkg.com/leaflet.heat/dist/leaflet-heat.js"></script>
|
||||
<script src="/static/portmaps.js"></script>
|
||||
|
||||
<script>
|
||||
|
||||
const PORT_COLOR_MAP = {
|
||||
0: "#6c757d",
|
||||
1: "#007bff",
|
||||
3: "#28a745",
|
||||
4: "#ffc107",
|
||||
5: "#dc3545",
|
||||
6: "#20c997",
|
||||
65: "#6610f2",
|
||||
67: "#17a2b8",
|
||||
70: "#ff9800",
|
||||
71: "#ff66cc",
|
||||
};
|
||||
|
||||
const PORT_LABEL_MAP = {
|
||||
0: "UNKNOWN",
|
||||
1: "Text",
|
||||
3: "Position",
|
||||
4: "Node Info",
|
||||
5: "Routing",
|
||||
6: "Admin",
|
||||
65: "Store & Forward",
|
||||
67: "Telemetry",
|
||||
70: "Traceroute",
|
||||
71: "Neighbor"
|
||||
};
|
||||
const PORT_COLOR_MAP = window.PORT_COLOR_MAP || {};
|
||||
const PORT_LABEL_MAP = window.PORT_LABEL_MAP || {};
|
||||
|
||||
/* ======================================================
|
||||
NODE PAGE TRANSLATION (isolated from base)
|
||||
@@ -386,6 +604,10 @@ function makeNodePopup(node) {
|
||||
<b><span data-translate-lang="last_update">
|
||||
${nodeTranslations.last_update || "Last Update"}:
|
||||
</span></b> ${formatLastSeen(node.last_seen_us)}
|
||||
<br>
|
||||
<b><span data-translate-lang="first_update">
|
||||
${nodeTranslations.first_update || "First Update"}:
|
||||
</span></b> ${formatLastSeen(node.first_seen_us)}
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
@@ -421,6 +643,7 @@ let currentNode = null;
|
||||
let currentPacketRows = [];
|
||||
|
||||
let map, markers = {};
|
||||
let coverageLayer = null;
|
||||
let chartData = {}, neighborData = { ids:[], names:[], snrs:[] };
|
||||
|
||||
let fromNodeId = new URLSearchParams(window.location.search).get("from_node_id");
|
||||
@@ -488,17 +711,38 @@ async function loadNodeInfo(){
|
||||
document.getElementById("info-hw-model").textContent = node.hw_model ?? "—";
|
||||
document.getElementById("info-firmware").textContent = node.firmware ?? "—";
|
||||
document.getElementById("info-role").textContent = node.role ?? "—";
|
||||
document.getElementById("info-mqtt-gateway").textContent =
|
||||
node.is_mqtt_gateway ? (nodeTranslations.yes || "Yes") : (nodeTranslations.no || "No");
|
||||
document.getElementById("info-channel").textContent = node.channel ?? "—";
|
||||
|
||||
document.getElementById("info-lat").textContent =
|
||||
node.last_lat ? (node.last_lat / 1e7).toFixed(6) : "—";
|
||||
document.getElementById("info-lon").textContent =
|
||||
node.last_long ? (node.last_long / 1e7).toFixed(6) : "—";
|
||||
const coverageBtn = document.getElementById("toggleCoverageBtn");
|
||||
const coverageHelp = document.getElementById("coverageHelpLink");
|
||||
if (coverageBtn) {
|
||||
const hasLocation = Boolean(node.last_lat && node.last_long);
|
||||
coverageBtn.disabled = !hasLocation;
|
||||
coverageBtn.title = hasLocation
|
||||
? ""
|
||||
: (nodeTranslations.location_required || "Location required for coverage");
|
||||
coverageBtn.style.display = hasLocation ? "" : "none";
|
||||
}
|
||||
if (coverageHelp) {
|
||||
const hasLocation = Boolean(node.last_lat && node.last_long);
|
||||
coverageHelp.style.display = hasLocation ? "" : "none";
|
||||
}
|
||||
|
||||
let lastSeen = "—";
|
||||
if (node.last_seen_us) {
|
||||
lastSeen = formatLastSeen(node.last_seen_us);
|
||||
}
|
||||
let firstSeen = "—";
|
||||
if (node.first_seen_us) {
|
||||
firstSeen = formatLastSeen(node.first_seen_us);
|
||||
}
|
||||
document.getElementById("info-first-update").textContent = firstSeen;
|
||||
document.getElementById("info-last-update").textContent = lastSeen;
|
||||
loadNodeStats(node.node_id);
|
||||
} catch (err) {
|
||||
@@ -580,6 +824,44 @@ function initMap(){
|
||||
}).addTo(map);
|
||||
}
|
||||
|
||||
async function toggleCoverage() {
|
||||
if (!map) initMap();
|
||||
|
||||
if (coverageLayer) {
|
||||
map.removeLayer(coverageLayer);
|
||||
coverageLayer = null;
|
||||
return;
|
||||
}
|
||||
|
||||
const nodeId = currentNode?.node_id || fromNodeId;
|
||||
if (!nodeId) return;
|
||||
|
||||
try {
|
||||
const res = await fetch(`/api/coverage/${encodeURIComponent(nodeId)}?mode=perimeter`);
|
||||
if (!res.ok) {
|
||||
console.error("Coverage request failed", res.status);
|
||||
return;
|
||||
}
|
||||
const data = await res.json();
|
||||
if (!data.perimeter || data.perimeter.length < 3) {
|
||||
console.warn("Coverage perimeter missing or too small");
|
||||
return;
|
||||
}
|
||||
coverageLayer = L.polygon(data.perimeter, {
|
||||
color: "#6f42c1",
|
||||
weight: 2,
|
||||
opacity: 0.7,
|
||||
fillColor: "#000000",
|
||||
fillOpacity: 0.10
|
||||
}).addTo(map);
|
||||
map.fitBounds(coverageLayer.getBounds(), { padding: [20, 20] });
|
||||
map.invalidateSize();
|
||||
} catch (err) {
|
||||
console.error("Coverage request failed", err);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function hideMap(){
|
||||
const mapDiv = document.getElementById("map");
|
||||
if (mapDiv) {
|
||||
@@ -1331,6 +1613,9 @@ document.addEventListener("DOMContentLoaded", async () => {
|
||||
requestAnimationFrame(async () => {
|
||||
await loadNodeInfo();
|
||||
|
||||
// Load QR code URL and impersonation check
|
||||
await loadNodeQrAndImpersonation();
|
||||
|
||||
// ✅ MAP MUST EXIST FIRST
|
||||
if (!map) initMap();
|
||||
|
||||
@@ -1452,12 +1737,126 @@ async function loadNodeStats(nodeId) {
|
||||
const csv = rows.map(r => r.join(",")).join("\n");
|
||||
const blob = new Blob([csv], { type: "text/csv" });
|
||||
|
||||
const link = document.createElement("a");
|
||||
const link = document.createElement("a");
|
||||
link.href = URL.createObjectURL(blob);
|
||||
link.download = `packets_${fromNodeId}_${Date.now()}.csv`;
|
||||
link.click();
|
||||
}
|
||||
|
||||
/* ======================================================
|
||||
QR CODE & IMPORT URL
|
||||
====================================================== */
|
||||
|
||||
let currentMeshtasticUrl = "";
|
||||
|
||||
async function loadNodeQrAndImpersonation() {
|
||||
const actionsDiv = document.getElementById("nodeActions");
|
||||
const warningDiv = document.getElementById("impersonationWarning");
|
||||
|
||||
try {
|
||||
const [qrRes, impRes] = await Promise.all([
|
||||
fetch(`/api/node/${fromNodeId}/qr`),
|
||||
fetch(`/api/node/${fromNodeId}/impersonation-check`)
|
||||
]);
|
||||
|
||||
const qrData = await qrRes.json();
|
||||
if (qrRes.ok && qrData.meshtastic_url) {
|
||||
currentMeshtasticUrl = qrData.meshtastic_url;
|
||||
actionsDiv.style.display = "flex";
|
||||
} else {
|
||||
actionsDiv.style.display = "none";
|
||||
}
|
||||
|
||||
const impData = await impRes.json();
|
||||
if (impRes.ok && impData.potential_impersonation) {
|
||||
warningDiv.style.display = "flex";
|
||||
document.getElementById("impersonationText").textContent =
|
||||
impData.warning || `This node has sent ${impData.unique_public_key_count} different public keys. This could indicate impersonation.`;
|
||||
} else {
|
||||
warningDiv.style.display = "none";
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to load QR/impersonation data:", err);
|
||||
actionsDiv.style.display = "none";
|
||||
warningDiv.style.display = "none";
|
||||
}
|
||||
}
|
||||
|
||||
function copyImportUrl() {
|
||||
if (!currentMeshtasticUrl) return;
|
||||
|
||||
navigator.clipboard.writeText(currentMeshtasticUrl).then(() => {
|
||||
const btn = document.getElementById("copyUrlBtn");
|
||||
const originalText = btn.innerHTML;
|
||||
btn.innerHTML = '<span>✅</span> <span data-translate-lang="copied">Copied!</span>';
|
||||
btn.classList.add("copy-success");
|
||||
setTimeout(() => {
|
||||
btn.innerHTML = originalText;
|
||||
btn.classList.remove("copy-success");
|
||||
}, 2000);
|
||||
}).catch(err => {
|
||||
console.error("Failed to copy:", err);
|
||||
alert("Failed to copy URL to clipboard");
|
||||
});
|
||||
}
|
||||
|
||||
function showQrCode() {
|
||||
if (!currentMeshtasticUrl) return;
|
||||
|
||||
const node = currentNode;
|
||||
document.getElementById("qrNodeName").textContent =
|
||||
node && node.long_name ? node.long_name : `Node ${fromNodeId}`;
|
||||
document.getElementById("qrUrl").textContent = currentMeshtasticUrl;
|
||||
|
||||
generateQrCode(currentMeshtasticUrl);
|
||||
|
||||
document.getElementById("qrModal").style.display = "flex";
|
||||
}
|
||||
|
||||
function closeQrModal() {
|
||||
document.getElementById("qrModal").style.display = "none";
|
||||
}
|
||||
|
||||
function copyQrUrl() {
|
||||
navigator.clipboard.writeText(currentMeshtasticUrl).then(() => {
|
||||
const btn = document.getElementById("copyQrBtn");
|
||||
const originalHTML = btn.innerHTML;
|
||||
btn.innerHTML = '<span>✅</span> <span data-translate-lang="copied">Copied!</span>';
|
||||
btn.classList.add("copied");
|
||||
setTimeout(() => {
|
||||
btn.innerHTML = originalHTML;
|
||||
btn.classList.remove("copied");
|
||||
}, 2000);
|
||||
}).catch(err => {
|
||||
console.error("Failed to copy:", err);
|
||||
});
|
||||
}
|
||||
|
||||
function generateQrCode(text) {
|
||||
const container = document.getElementById("qrCodeContainer");
|
||||
if (!container) return;
|
||||
|
||||
container.innerHTML = "";
|
||||
|
||||
try {
|
||||
new QRCode(container, {
|
||||
text: text,
|
||||
width: 200,
|
||||
height: 200,
|
||||
colorDark: "#000000",
|
||||
colorLight: "#ffffff",
|
||||
correctLevel: QRCode.CorrectLevel.M
|
||||
});
|
||||
} catch (e) {
|
||||
console.error("QR Code generation error:", e);
|
||||
container.innerHTML = '<div style="padding:20px;color:#f87171;">Failed to generate QR code</div>';
|
||||
}
|
||||
}
|
||||
|
||||
/* ======================================================
|
||||
END QR CODE & IMPORT URL
|
||||
====================================================== */
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
@@ -266,13 +266,14 @@ select, .export-btn, .search-box, .clear-btn {
|
||||
<th data-translate-lang="last_lat">Last Latitude <span class="sort-icon"></span></th>
|
||||
<th data-translate-lang="last_long">Last Longitude <span class="sort-icon"></span></th>
|
||||
<th data-translate-lang="channel">Channel <span class="sort-icon"></span></th>
|
||||
<th data-translate-lang="mqtt_gateway">MQTT</th>
|
||||
<th data-translate-lang="last_seen">Last Seen <span class="sort-icon"></span></th>
|
||||
<th data-translate-lang="favorite"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="node-table-body">
|
||||
<tr>
|
||||
<td colspan="10" style="text-align:center; color:white;" data-translate-lang="loading_nodes">
|
||||
<td colspan="11" style="text-align:center; color:white;" data-translate-lang="loading_nodes">
|
||||
Loading nodes...
|
||||
</td>
|
||||
</tr>
|
||||
@@ -448,7 +449,7 @@ document.addEventListener("DOMContentLoaded", async function() {
|
||||
setStatus("");
|
||||
} catch (err) {
|
||||
tbody.innerHTML = `<tr>
|
||||
<td colspan="10" style="text-align:center; color:red;">
|
||||
<td colspan="11" style="text-align:center; color:red;">
|
||||
${nodelistTranslations.error_loading_nodes || "Error loading nodes"}
|
||||
</td></tr>`;
|
||||
setStatus("");
|
||||
@@ -583,7 +584,7 @@ document.addEventListener("DOMContentLoaded", async function() {
|
||||
if (!nodes.length) {
|
||||
if (shouldRenderTable) {
|
||||
tbody.innerHTML = `<tr>
|
||||
<td colspan="10" style="text-align:center; color:white;">
|
||||
<td colspan="11" style="text-align:center; color:white;">
|
||||
${nodelistTranslations.no_nodes_found || "No nodes found"}
|
||||
</td>
|
||||
</tr>`;
|
||||
@@ -613,6 +614,7 @@ document.addEventListener("DOMContentLoaded", async function() {
|
||||
<td>${node.last_lat ? (node.last_lat / 1e7).toFixed(7) : "N/A"}</td>
|
||||
<td>${node.last_long ? (node.last_long / 1e7).toFixed(7) : "N/A"}</td>
|
||||
<td>${node.channel || "N/A"}</td>
|
||||
<td>${node.is_mqtt_gateway ? (nodelistTranslations.yes || "Yes") : (nodelistTranslations.no || "No")}</td>
|
||||
<td>${timeAgoFromMs(node.last_seen_ms)}</td>
|
||||
<td style="text-align:center;">
|
||||
<span class="favorite-star ${fav ? "active" : ""}" data-node-id="${node.node_id}">
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
{% block title %}Packet Details{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="/static/portmaps.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block css %}
|
||||
{{ super() }}
|
||||
<style>
|
||||
@@ -178,17 +182,7 @@ document.addEventListener("DOMContentLoaded", async () => {
|
||||
const packetId = match[1];
|
||||
|
||||
/* PORT LABELS (NOT TRANSLATED) */
|
||||
const PORT_NAMES = {
|
||||
0:"UNKNOWN APP",
|
||||
1:"Text",
|
||||
3:"Position",
|
||||
4:"Node Info",
|
||||
5:"Routing",
|
||||
6:"Admin",
|
||||
67:"Telemetry",
|
||||
70:"Traceroute",
|
||||
71:"Neighbor"
|
||||
};
|
||||
const PORT_NAMES = window.PORT_LABEL_MAP;
|
||||
|
||||
/* ---------------------------------------------
|
||||
Fetch packet
|
||||
|
||||
@@ -89,6 +89,7 @@
|
||||
|
||||
{% block head %}
|
||||
<script src="https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js"></script>
|
||||
<script src="/static/portmaps.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
@@ -111,6 +112,10 @@
|
||||
<p data-translate-lang="total_packets_seen">Total Packets Seen</p>
|
||||
<div class="summary-count" id="summary_seen">0</div>
|
||||
</div>
|
||||
<div class="summary-card" style="flex:1;">
|
||||
<p data-translate-lang="total_gateways">Total Gateways</p>
|
||||
<div class="summary-count" id="summary_gateways">0</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Daily Charts -->
|
||||
@@ -189,6 +194,28 @@
|
||||
<button class="export-btn" data-chart="chart_channel" data-translate-lang="export_csv">Export CSV</button>
|
||||
<div id="chart_channel" class="chart"></div>
|
||||
</div>
|
||||
|
||||
<!-- Gateway breakdown charts -->
|
||||
<div class="card-section">
|
||||
<p class="section-header" data-translate-lang="gateway_channel_breakdown">Gateway Channel Breakdown</p>
|
||||
<button class="expand-btn" data-chart="chart_gateway_channel" data-translate-lang="expand_chart">Expand Chart</button>
|
||||
<button class="export-btn" data-chart="chart_gateway_channel" data-translate-lang="export_csv">Export CSV</button>
|
||||
<div id="chart_gateway_channel" class="chart"></div>
|
||||
</div>
|
||||
|
||||
<div class="card-section">
|
||||
<p class="section-header" data-translate-lang="gateway_role_breakdown">Gateway Role Breakdown</p>
|
||||
<button class="expand-btn" data-chart="chart_gateway_role" data-translate-lang="expand_chart">Expand Chart</button>
|
||||
<button class="export-btn" data-chart="chart_gateway_role" data-translate-lang="export_csv">Export CSV</button>
|
||||
<div id="chart_gateway_role" class="chart"></div>
|
||||
</div>
|
||||
|
||||
<div class="card-section">
|
||||
<p class="section-header" data-translate-lang="gateway_firmware_breakdown">Gateway Firmware Breakdown</p>
|
||||
<button class="expand-btn" data-chart="chart_gateway_firmware" data-translate-lang="expand_chart">Expand Chart</button>
|
||||
<button class="export-btn" data-chart="chart_gateway_firmware" data-translate-lang="export_csv">Export CSV</button>
|
||||
<div id="chart_gateway_firmware" class="chart"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Modal for expanded charts -->
|
||||
@@ -205,14 +232,7 @@
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const PORTNUM_LABELS = {
|
||||
1: "Text Messages",
|
||||
3: "Position",
|
||||
4: "Node Info",
|
||||
67: "Telemetry",
|
||||
70: "Traceroute",
|
||||
71: "Neighbor Info"
|
||||
};
|
||||
const PORTNUM_LABELS = window.PORT_LABEL_MAP;
|
||||
|
||||
// --- Fetch & Processing ---
|
||||
async function fetchStats(period_type,length,portnum=null,channel=null){
|
||||
@@ -345,6 +365,7 @@ function renderPieChart(elId,data,name){
|
||||
return chart;
|
||||
}
|
||||
|
||||
|
||||
// --- Packet Type Pie Chart ---
|
||||
async function fetchPacketTypeBreakdown(channel=null) {
|
||||
const portnums = [1,3,4,67,70,71];
|
||||
@@ -368,6 +389,7 @@ async function fetchPacketTypeBreakdown(channel=null) {
|
||||
let chartHourlyAll, chartPortnum1, chartPortnum3, chartPortnum4, chartPortnum67, chartPortnum70, chartPortnum71;
|
||||
let chartDailyAll, chartDailyPortnum1;
|
||||
let chartHwModel, chartRole, chartChannel;
|
||||
let chartGatewayChannel, chartGatewayRole, chartGatewayFirmware;
|
||||
let chartPacketTypes;
|
||||
|
||||
async function init(){
|
||||
@@ -414,10 +436,31 @@ async function init(){
|
||||
chartRole=renderPieChart("chart_role",processCountField(nodes,"role"),"Role");
|
||||
chartChannel=renderPieChart("chart_channel",processCountField(nodes,"channel"),"Channel");
|
||||
|
||||
const gateways = nodes.filter(n => n.is_mqtt_gateway);
|
||||
chartGatewayChannel = renderPieChart(
|
||||
"chart_gateway_channel",
|
||||
processCountField(gateways, "channel"),
|
||||
"Gateway Channel"
|
||||
);
|
||||
chartGatewayRole = renderPieChart(
|
||||
"chart_gateway_role",
|
||||
processCountField(gateways, "role"),
|
||||
"Gateway Role"
|
||||
);
|
||||
chartGatewayFirmware = renderPieChart(
|
||||
"chart_gateway_firmware",
|
||||
processCountField(gateways, "firmware"),
|
||||
"Gateway Firmware"
|
||||
);
|
||||
|
||||
const summaryNodesEl = document.getElementById("summary_nodes");
|
||||
if (summaryNodesEl) {
|
||||
summaryNodesEl.textContent = nodes.length.toLocaleString();
|
||||
}
|
||||
const summaryGatewaysEl = document.getElementById("summary_gateways");
|
||||
if (summaryGatewaysEl) {
|
||||
summaryGatewaysEl.textContent = gateways.length.toLocaleString();
|
||||
}
|
||||
|
||||
// Packet types pie
|
||||
const packetTypesData = await fetchPacketTypeBreakdown();
|
||||
@@ -464,6 +507,9 @@ window.addEventListener('resize',()=>{
|
||||
chartHwModel,
|
||||
chartRole,
|
||||
chartChannel,
|
||||
chartGatewayChannel,
|
||||
chartGatewayRole,
|
||||
chartGatewayFirmware,
|
||||
chartPacketTypes
|
||||
].forEach(c=>c?.resize());
|
||||
});
|
||||
|
||||
138
meshview/templates/traceroute.html
Normal file
138
meshview/templates/traceroute.html
Normal file
@@ -0,0 +1,138 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<script src="https://cdn.jsdelivr.net/npm/echarts/dist/echarts.min.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block css %}
|
||||
#traceroute-graph {
|
||||
width: 100%;
|
||||
height: 85vh;
|
||||
border: 1px solid #2a2f36;
|
||||
background: linear-gradient(135deg, #0f1216 0%, #171b22 100%);
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
||||
#traceroute-meta {
|
||||
padding: 12px 16px;
|
||||
color: #c8d0da;
|
||||
}
|
||||
|
||||
#traceroute-error {
|
||||
color: #ff6b6b;
|
||||
}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<div id="traceroute-meta">
|
||||
<div><b>Traceroute</b> <span id="traceroute-title"></span></div>
|
||||
<div id="traceroute-error"></div>
|
||||
</div>
|
||||
<div id="traceroute-graph"></div>
|
||||
|
||||
<script>
|
||||
const el = document.getElementById("traceroute-graph");
|
||||
const chart = echarts.init(el);
|
||||
|
||||
function packetIdFromPath() {
|
||||
const parts = window.location.pathname.split("/").filter(Boolean);
|
||||
return parts[parts.length - 1];
|
||||
}
|
||||
|
||||
function addPathEdges(path, edges, style) {
|
||||
for (let i = 0; i < path.length - 1; i++) {
|
||||
edges.push({
|
||||
source: String(path[i]),
|
||||
target: String(path[i + 1]),
|
||||
lineStyle: style
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function loadTraceroute() {
|
||||
const packetId = packetIdFromPath();
|
||||
document.getElementById("traceroute-title").textContent = `#${packetId}`;
|
||||
|
||||
const [res, nodesRes] = await Promise.all([
|
||||
fetch(`/api/traceroute/${packetId}`),
|
||||
fetch("/api/nodes"),
|
||||
]);
|
||||
if (!res.ok) {
|
||||
document.getElementById("traceroute-error").textContent = "Traceroute not found.";
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
const nodesData = nodesRes.ok ? await nodesRes.json() : { nodes: [] };
|
||||
const nodeShortNameById = new Map(
|
||||
(nodesData.nodes || []).map(n => [String(n.node_id), n.short_name || n.long_name || String(n.node_id)])
|
||||
);
|
||||
const nodeLongNameById = new Map(
|
||||
(nodesData.nodes || []).map(n => [String(n.node_id), n.long_name || n.short_name || String(n.node_id)])
|
||||
);
|
||||
const nodes = new Map();
|
||||
const edges = [];
|
||||
|
||||
const forwardPaths = data?.winning_paths?.forward || [];
|
||||
const reversePaths = data?.winning_paths?.reverse || [];
|
||||
const originId = data?.packet?.from != null ? String(data.packet.from) : null;
|
||||
const targetId = data?.packet?.to != null ? String(data.packet.to) : null;
|
||||
|
||||
forwardPaths.forEach(path => {
|
||||
path.forEach(id => nodes.set(String(id), { name: String(id) }));
|
||||
addPathEdges(path, edges, { color: "#ff5733", width: 3 });
|
||||
});
|
||||
|
||||
reversePaths.forEach(path => {
|
||||
path.forEach(id => nodes.set(String(id), { name: String(id) }));
|
||||
addPathEdges(path, edges, { color: "#00c3ff", width: 2, type: "dashed" });
|
||||
});
|
||||
|
||||
const graphNodes = Array.from(nodes.values()).map(n => {
|
||||
const isOrigin = originId && n.name === originId;
|
||||
const isTarget = targetId && n.name === targetId;
|
||||
const color = isOrigin ? "#ff3b30" : isTarget ? "#34c759" : "#8aa4c8";
|
||||
const size = isOrigin || isTarget ? 44 : 36;
|
||||
return {
|
||||
id: n.name,
|
||||
name: nodeShortNameById.get(n.name) || n.name,
|
||||
symbolSize: size,
|
||||
itemStyle: { color },
|
||||
label: {
|
||||
show: true,
|
||||
color: "#e7eef7",
|
||||
fontWeight: "bold"
|
||||
},
|
||||
tooltip: {
|
||||
formatter: () => nodeLongNameById.get(n.name) || n.name
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const option = {
|
||||
backgroundColor: "transparent",
|
||||
tooltip: { trigger: "item" },
|
||||
series: [
|
||||
{
|
||||
type: "graph",
|
||||
layout: "force",
|
||||
roam: true,
|
||||
zoom: 1.2,
|
||||
draggable: true,
|
||||
force: { repulsion: 200, edgeLength: 80 },
|
||||
data: graphNodes,
|
||||
edges: edges,
|
||||
lineStyle: { opacity: 0.8, curveness: 0.1 },
|
||||
edgeSymbol: ["none", "arrow"],
|
||||
edgeSymbolSize: 10
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
chart.setOption(option);
|
||||
}
|
||||
|
||||
loadTraceroute();
|
||||
window.addEventListener("resize", () => chart.resize());
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Main web server routes and page rendering for Meshview."""
|
||||
|
||||
import asyncio
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import ssl
|
||||
from dataclasses import dataclass
|
||||
@@ -12,12 +15,13 @@ from google.protobuf import text_format
|
||||
from google.protobuf.message import Message
|
||||
from jinja2 import Environment, PackageLoader, Undefined, select_autoescape
|
||||
from markupsafe import Markup
|
||||
import pathlib
|
||||
|
||||
from meshtastic.protobuf.portnums_pb2 import PortNum
|
||||
from meshview import config, database, decode_payload, migrations, models, store
|
||||
from meshview.__version__ import (
|
||||
__version_string__,
|
||||
)
|
||||
from meshview.deps import check_optional_deps
|
||||
from meshview.web_api import api
|
||||
|
||||
logging.basicConfig(
|
||||
@@ -35,6 +39,7 @@ env = Environment(loader=PackageLoader("meshview"), autoescape=select_autoescape
|
||||
|
||||
# Start Database
|
||||
database.init_database(CONFIG["database"]["connection_string"])
|
||||
check_optional_deps()
|
||||
|
||||
BASE_DIR = os.path.dirname(__file__)
|
||||
LANG_DIR = os.path.join(BASE_DIR, "lang")
|
||||
@@ -45,22 +50,25 @@ with open(os.path.join(os.path.dirname(__file__), '1x1.png'), 'rb') as png:
|
||||
|
||||
@dataclass
|
||||
class Packet:
|
||||
"""UI-friendly packet wrapper for templates and API payloads."""
|
||||
|
||||
id: int
|
||||
from_node_id: int
|
||||
from_node: models.Node
|
||||
to_node_id: int
|
||||
to_node: models.Node
|
||||
channel: str
|
||||
portnum: int
|
||||
data: str
|
||||
raw_mesh_packet: object
|
||||
raw_payload: object
|
||||
payload: str
|
||||
pretty_payload: Markup
|
||||
import_time: datetime.datetime
|
||||
import_time_us: int
|
||||
|
||||
@classmethod
|
||||
def from_model(cls, packet):
|
||||
"""Convert a Packet ORM model into a presentation-friendly Packet."""
|
||||
mesh_packet, payload = decode_payload.decode(packet)
|
||||
pretty_payload = None
|
||||
|
||||
@@ -97,11 +105,11 @@ class Packet:
|
||||
from_node_id=packet.from_node_id,
|
||||
to_node=packet.to_node,
|
||||
to_node_id=packet.to_node_id,
|
||||
channel=packet.channel,
|
||||
portnum=packet.portnum,
|
||||
data=text_mesh_packet,
|
||||
payload=text_payload, # now always a string
|
||||
pretty_payload=pretty_payload,
|
||||
import_time=packet.import_time,
|
||||
import_time_us=packet.import_time_us, # <-- include microseconds
|
||||
raw_mesh_packet=mesh_packet,
|
||||
raw_payload=payload,
|
||||
@@ -109,6 +117,7 @@ class Packet:
|
||||
|
||||
|
||||
async def build_trace(node_id):
|
||||
"""Build a recent GPS trace list for a node using position packets."""
|
||||
trace = []
|
||||
for raw_p in await store.get_packets_from(
|
||||
node_id, PortNum.POSITION_APP, since=datetime.timedelta(hours=24)
|
||||
@@ -130,6 +139,7 @@ async def build_trace(node_id):
|
||||
|
||||
|
||||
async def build_neighbors(node_id):
|
||||
"""Return neighbor node metadata for the given node ID."""
|
||||
packets = await store.get_packets_from(node_id, PortNum.NEIGHBORINFO_APP, limit=1)
|
||||
packet = packets.first()
|
||||
|
||||
@@ -159,6 +169,7 @@ async def build_neighbors(node_id):
|
||||
|
||||
|
||||
def node_id_to_hex(node_id):
|
||||
"""Format a node_id in Meshtastic hex notation."""
|
||||
if node_id is None or isinstance(node_id, Undefined):
|
||||
return "Invalid node_id" # i... have no clue
|
||||
if node_id == 4294967295:
|
||||
@@ -168,6 +179,7 @@ def node_id_to_hex(node_id):
|
||||
|
||||
|
||||
def format_timestamp(timestamp):
|
||||
"""Normalize timestamps to ISO 8601 strings."""
|
||||
if isinstance(timestamp, int):
|
||||
timestamp = datetime.datetime.fromtimestamp(timestamp, datetime.UTC)
|
||||
return timestamp.isoformat(timespec="milliseconds")
|
||||
@@ -200,9 +212,11 @@ async def redirect_packet_list(request):
|
||||
packet_id = request.match_info["packet_id"]
|
||||
raise web.HTTPFound(location=f"/node/{packet_id}")
|
||||
|
||||
|
||||
# Generic static HTML route
|
||||
@routes.get("/{page}")
|
||||
async def serve_page(request):
|
||||
"""Serve static HTML pages from meshview/static."""
|
||||
page = request.match_info["page"]
|
||||
|
||||
# default to index.html if no extension
|
||||
@@ -217,6 +231,19 @@ async def serve_page(request):
|
||||
return web.Response(text=content, content_type="text/html")
|
||||
|
||||
|
||||
@routes.get("/docs/{doc}")
|
||||
async def serve_doc(request):
|
||||
"""Serve documentation files from docs/ (markdown)."""
|
||||
doc = request.match_info["doc"]
|
||||
docs_root = pathlib.Path(__file__).parent.parent / "docs"
|
||||
doc_path = (docs_root / doc).resolve()
|
||||
|
||||
if not doc_path.is_file() or docs_root not in doc_path.parents:
|
||||
raise web.HTTPNotFound(text="Document not found")
|
||||
|
||||
content = doc_path.read_text(encoding="utf-8")
|
||||
return web.Response(text=content, content_type="text/markdown")
|
||||
|
||||
|
||||
@routes.get("/net")
|
||||
async def net(request):
|
||||
@@ -303,6 +330,15 @@ async def stats(request):
|
||||
)
|
||||
|
||||
|
||||
@routes.get("/traceroute/{packet_id}")
|
||||
async def traceroute_page(request):
|
||||
template = env.get_template("traceroute.html")
|
||||
return web.Response(
|
||||
text=template.render(),
|
||||
content_type="text/html",
|
||||
)
|
||||
|
||||
|
||||
# Keep !!
|
||||
@routes.get("/graph/traceroute/{packet_id}")
|
||||
async def graph_traceroute(request):
|
||||
@@ -352,8 +388,8 @@ async def graph_traceroute(request):
|
||||
# It seems some nodes add them self to the list before uplinking
|
||||
path.append(tr.gateway_node_id)
|
||||
|
||||
if not tr.done and tr.gateway_node_id not in node_seen_time and tr.import_time:
|
||||
node_seen_time[path[-1]] = tr.import_time
|
||||
if not tr.done and tr.gateway_node_id not in node_seen_time and tr.import_time_us:
|
||||
node_seen_time[path[-1]] = tr.import_time_us
|
||||
|
||||
mqtt_nodes.add(tr.gateway_node_id)
|
||||
node_color[path[-1]] = '#' + hex(hash(tuple(path)))[3:9]
|
||||
@@ -363,7 +399,7 @@ async def graph_traceroute(request):
|
||||
for path in paths:
|
||||
used_nodes.update(path)
|
||||
|
||||
import_times = [tr.import_time for tr in traceroutes if tr.import_time]
|
||||
import_times = [tr.import_time_us for tr in traceroutes if tr.import_time_us]
|
||||
if import_times:
|
||||
first_time = min(import_times)
|
||||
else:
|
||||
@@ -378,7 +414,7 @@ async def graph_traceroute(request):
|
||||
f'[{node.short_name}] {node.long_name}\n{node_id_to_hex(node_id)}\n{node.role}'
|
||||
)
|
||||
if node_id in node_seen_time:
|
||||
ms = (node_seen_time[node_id] - first_time).total_seconds() * 1000
|
||||
ms = (node_seen_time[node_id] - first_time) / 1000
|
||||
node_name += f'\n {ms:.2f}ms'
|
||||
style = 'dashed'
|
||||
if node_id == dest:
|
||||
@@ -396,7 +432,7 @@ async def graph_traceroute(request):
|
||||
shape='box',
|
||||
color=node_color.get(node_id, 'black'),
|
||||
style=style,
|
||||
href=f"/packet_list/{node_id}",
|
||||
href=f"/node/{node_id}",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -412,6 +448,7 @@ async def graph_traceroute(request):
|
||||
|
||||
|
||||
async def run_server():
|
||||
"""Start the aiohttp web server after migrations are complete."""
|
||||
# Wait for database migrations to complete before starting web server
|
||||
logger.info("Checking database schema status...")
|
||||
database_url = CONFIG["database"]["connection_string"]
|
||||
@@ -428,6 +465,7 @@ async def run_server():
|
||||
logger.info("Database schema verified - starting web server")
|
||||
|
||||
app = web.Application()
|
||||
app.router.add_static("/static/", pathlib.Path(__file__).parent / "static")
|
||||
app.add_routes(api.routes) # Add API routes
|
||||
app.add_routes(routes) # Add main web routes
|
||||
|
||||
|
||||
@@ -3,15 +3,28 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
|
||||
from aiohttp import web
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from meshtastic.protobuf.portnums_pb2 import PortNum
|
||||
from meshview import database, decode_payload, store
|
||||
from meshview.__version__ import __version__, _git_revision_short, get_version_info
|
||||
from meshview.config import CONFIG
|
||||
from meshview.models import Node, NodePublicKey
|
||||
from meshview.models import Packet as PacketModel
|
||||
from meshview.models import PacketSeen as PacketSeenModel
|
||||
from meshview.radio.coverage import (
|
||||
DEFAULT_MAX_DBM,
|
||||
DEFAULT_MIN_DBM,
|
||||
DEFAULT_RELIABILITY,
|
||||
DEFAULT_THRESHOLD_DBM,
|
||||
ITM_AVAILABLE,
|
||||
compute_coverage,
|
||||
compute_perimeter,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,11 +32,35 @@ logger = logging.getLogger(__name__)
|
||||
Packet = None
|
||||
SEQ_REGEX = None
|
||||
LANG_DIR = None
|
||||
_LANG_CACHE = {}
|
||||
|
||||
# Create dedicated route table for API endpoints
|
||||
routes = web.RouteTableDef()
|
||||
|
||||
|
||||
def _haversine_km(lat1, lon1, lat2, lon2):
|
||||
r = 6371.0
|
||||
phi1 = math.radians(lat1)
|
||||
phi2 = math.radians(lat2)
|
||||
dphi = math.radians(lat2 - lat1)
|
||||
dlambda = math.radians(lon2 - lon1)
|
||||
a = math.sin(dphi / 2.0) ** 2 + math.cos(phi1) * math.cos(phi2) * math.sin(dlambda / 2.0) ** 2
|
||||
return 2 * r * math.asin(math.sqrt(a))
|
||||
|
||||
|
||||
def _bearing_deg(lat1, lon1, lat2, lon2):
|
||||
phi1 = math.radians(lat1)
|
||||
phi2 = math.radians(lat2)
|
||||
dlambda = math.radians(lon2 - lon1)
|
||||
y = math.sin(dlambda) * math.cos(phi2)
|
||||
x = math.cos(phi1) * math.sin(phi2) - math.sin(phi1) * math.cos(phi2) * math.cos(dlambda)
|
||||
bearing = math.degrees(math.atan2(y, x))
|
||||
return (bearing + 360.0) % 360.0
|
||||
|
||||
|
||||
OBSERVED_MAX_DISTANCE_KM = 50.0
|
||||
|
||||
|
||||
def init_api_module(packet_class, seq_regex, lang_dir):
|
||||
"""Initialize API module with dependencies from main web module."""
|
||||
global Packet, SEQ_REGEX, LANG_DIR
|
||||
@@ -80,7 +117,9 @@ async def api_nodes(request):
|
||||
"last_lat": getattr(n, "last_lat", None),
|
||||
"last_long": getattr(n, "last_long", None),
|
||||
"channel": n.channel,
|
||||
"is_mqtt_gateway": getattr(n, "is_mqtt_gateway", None),
|
||||
# "last_update": n.last_update.isoformat(),
|
||||
"first_seen_us": n.first_seen_us,
|
||||
"last_seen_us": n.last_seen_us,
|
||||
}
|
||||
)
|
||||
@@ -126,8 +165,7 @@ async def api_packets(request):
|
||||
"portnum": int(p.portnum) if p.portnum is not None else None,
|
||||
"payload": (p.payload or "").strip(),
|
||||
"import_time_us": p.import_time_us,
|
||||
"import_time": p.import_time.isoformat() if p.import_time else None,
|
||||
"channel": getattr(p.from_node, "channel", ""),
|
||||
"channel": p.channel,
|
||||
"long_name": getattr(p.from_node, "long_name", ""),
|
||||
}
|
||||
return web.json_response({"packets": [data]})
|
||||
@@ -178,13 +216,17 @@ async def api_packets(request):
|
||||
logger.warning(f"Invalid node_id: {node_id_str}")
|
||||
|
||||
# --- Fetch packets using explicit filters ---
|
||||
contains_for_query = contains
|
||||
if portnum == PortNum.TEXT_MESSAGE_APP and contains:
|
||||
contains_for_query = None
|
||||
|
||||
packets = await store.get_packets(
|
||||
from_node_id=from_node_id,
|
||||
to_node_id=to_node_id,
|
||||
node_id=node_id,
|
||||
portnum=portnum,
|
||||
after=since,
|
||||
contains=contains,
|
||||
contains=contains_for_query,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
@@ -208,8 +250,7 @@ async def api_packets(request):
|
||||
packet_dict = {
|
||||
"id": p.id,
|
||||
"import_time_us": p.import_time_us,
|
||||
"import_time": p.import_time.isoformat() if p.import_time else None,
|
||||
"channel": getattr(p.from_node, "channel", ""),
|
||||
"channel": p.channel,
|
||||
"from_node_id": p.from_node_id,
|
||||
"to_node_id": p.to_node_id,
|
||||
"portnum": int(p.portnum),
|
||||
@@ -228,20 +269,12 @@ async def api_packets(request):
|
||||
|
||||
packets_data.append(packet_dict)
|
||||
|
||||
# --- Latest import_time for incremental fetch ---
|
||||
# --- Latest import_time_us for incremental fetch ---
|
||||
latest_import_time = None
|
||||
if packets_data:
|
||||
for p in packets_data:
|
||||
if p.get("import_time_us") and p["import_time_us"] > 0:
|
||||
latest_import_time = max(latest_import_time or 0, p["import_time_us"])
|
||||
elif p.get("import_time") and latest_import_time is None:
|
||||
try:
|
||||
dt = datetime.datetime.fromisoformat(
|
||||
p["import_time"].replace("Z", "+00:00")
|
||||
)
|
||||
latest_import_time = int(dt.timestamp() * 1_000_000)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
response = {"packets": packets_data}
|
||||
if latest_import_time is not None:
|
||||
@@ -421,7 +454,7 @@ async def api_stats_count(request):
|
||||
|
||||
@routes.get("/api/edges")
|
||||
async def api_edges(request):
|
||||
since = datetime.datetime.now() - datetime.timedelta(hours=48)
|
||||
since = datetime.datetime.now() - datetime.timedelta(hours=12)
|
||||
filter_type = request.query.get("type")
|
||||
|
||||
# NEW → optional single-node filter
|
||||
@@ -431,14 +464,10 @@ async def api_edges(request):
|
||||
try:
|
||||
node_filter = int(node_filter_str)
|
||||
except ValueError:
|
||||
return web.json_response(
|
||||
{"error": "node_id must be integer"},
|
||||
status=400
|
||||
)
|
||||
return web.json_response({"error": "node_id must be integer"}, status=400)
|
||||
|
||||
edges = {}
|
||||
traceroute_count = 0
|
||||
neighbor_packet_count = 0
|
||||
edges_added_tr = 0
|
||||
edges_added_neighbor = 0
|
||||
|
||||
@@ -463,8 +492,6 @@ async def api_edges(request):
|
||||
# --- Neighbor edges ---
|
||||
if filter_type in (None, "neighbor"):
|
||||
packets = await store.get_packets(portnum=71)
|
||||
neighbor_packet_count = len(packets)
|
||||
|
||||
for packet in packets:
|
||||
try:
|
||||
_, neighbor_info = decode_payload.decode(packet)
|
||||
@@ -479,21 +506,16 @@ async def api_edges(request):
|
||||
|
||||
# Convert to list
|
||||
edges_list = [
|
||||
{"from": frm, "to": to, "type": edge_type}
|
||||
for (frm, to), edge_type in edges.items()
|
||||
{"from": frm, "to": to, "type": edge_type} for (frm, to), edge_type in edges.items()
|
||||
]
|
||||
|
||||
# NEW → apply node_id filtering
|
||||
if node_filter is not None:
|
||||
edges_list = [
|
||||
e for e in edges_list
|
||||
if e["from"] == node_filter or e["to"] == node_filter
|
||||
]
|
||||
edges_list = [e for e in edges_list if e["from"] == node_filter or e["to"] == node_filter]
|
||||
|
||||
return web.json_response({"edges": edges_list})
|
||||
|
||||
|
||||
|
||||
@routes.get("/api/config")
|
||||
async def api_config(request):
|
||||
try:
|
||||
@@ -607,9 +629,20 @@ async def api_lang(request):
|
||||
if not os.path.exists(lang_file):
|
||||
lang_file = os.path.join(LANG_DIR, "en.json")
|
||||
|
||||
# Load JSON translations
|
||||
with open(lang_file, encoding="utf-8") as f:
|
||||
translations = json.load(f)
|
||||
# Cache by file + mtime to avoid re-reading on every request
|
||||
try:
|
||||
mtime = os.path.getmtime(lang_file)
|
||||
except OSError:
|
||||
mtime = None
|
||||
|
||||
cache_key = lang_file
|
||||
cached = _LANG_CACHE.get(cache_key)
|
||||
if cached and cached.get("mtime") == mtime:
|
||||
translations = cached["translations"]
|
||||
else:
|
||||
with open(lang_file, encoding="utf-8") as f:
|
||||
translations = json.load(f)
|
||||
_LANG_CACHE[cache_key] = {"mtime": mtime, "translations": translations}
|
||||
|
||||
if section:
|
||||
section = section.lower()
|
||||
@@ -637,8 +670,14 @@ async def health_check(request):
|
||||
# Check database connectivity
|
||||
try:
|
||||
async with database.async_session() as session:
|
||||
await session.execute(text("SELECT 1"))
|
||||
result = await session.execute(select(func.max(PacketModel.import_time_us)))
|
||||
last_import_time_us = result.scalar()
|
||||
health_status["database"] = "connected"
|
||||
if last_import_time_us is not None:
|
||||
now_us = int(datetime.datetime.now(datetime.UTC).timestamp() * 1_000_000)
|
||||
health_status["seconds_since_last_message"] = round(
|
||||
(now_us - last_import_time_us) / 1_000_000, 1
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Database health check failed: {e}")
|
||||
health_status["database"] = "disconnected"
|
||||
@@ -711,7 +750,6 @@ async def api_packets_seen(request):
|
||||
"rx_snr": row.rx_snr,
|
||||
"rx_rssi": row.rx_rssi,
|
||||
"topic": row.topic,
|
||||
"import_time": (row.import_time.isoformat() if row.import_time else None),
|
||||
"import_time_us": row.import_time_us,
|
||||
}
|
||||
)
|
||||
@@ -725,6 +763,7 @@ async def api_packets_seen(request):
|
||||
status=500,
|
||||
)
|
||||
|
||||
|
||||
@routes.get("/api/traceroute/{packet_id}")
|
||||
async def api_traceroute(request):
|
||||
packet_id = int(request.match_info['packet_id'])
|
||||
@@ -746,14 +785,15 @@ async def api_traceroute(request):
|
||||
forward_list = list(route.route)
|
||||
reverse_list = list(route.route_back)
|
||||
|
||||
tr_groups.append({
|
||||
"index": idx,
|
||||
"import_time": tr.import_time.isoformat() if tr.import_time else None,
|
||||
"gateway_node_id": tr.gateway_node_id,
|
||||
"done": tr.done,
|
||||
"forward_hops": forward_list,
|
||||
"reverse_hops": reverse_list,
|
||||
})
|
||||
tr_groups.append(
|
||||
{
|
||||
"index": idx,
|
||||
"gateway_node_id": tr.gateway_node_id,
|
||||
"done": tr.done,
|
||||
"forward_hops": forward_list,
|
||||
"reverse_hops": reverse_list,
|
||||
}
|
||||
)
|
||||
|
||||
# --------------------------------------------
|
||||
# Compute UNIQUE paths + counts + winning path
|
||||
@@ -762,7 +802,8 @@ async def api_traceroute(request):
|
||||
|
||||
forward_paths = []
|
||||
reverse_paths = []
|
||||
winning_paths = []
|
||||
winning_forward_paths = []
|
||||
winning_reverse_paths = []
|
||||
|
||||
for tr in tr_groups:
|
||||
f = tuple(tr["forward_hops"])
|
||||
@@ -775,7 +816,10 @@ async def api_traceroute(request):
|
||||
reverse_paths.append(r)
|
||||
|
||||
if tr["done"]:
|
||||
winning_paths.append(f)
|
||||
if tr["forward_hops"]:
|
||||
winning_forward_paths.append(f)
|
||||
if tr["reverse_hops"]:
|
||||
winning_reverse_paths.append(r)
|
||||
|
||||
# Deduplicate
|
||||
unique_forward_paths = sorted(set(forward_paths))
|
||||
@@ -791,23 +835,48 @@ async def api_traceroute(request):
|
||||
|
||||
unique_reverse_paths_json = [list(p) for p in unique_reverse_paths]
|
||||
|
||||
winning_paths_json = [list(p) for p in set(winning_paths)]
|
||||
from_node_id = packet.from_node_id
|
||||
to_node_id = packet.to_node_id
|
||||
winning_forward_with_endpoints = []
|
||||
for path in set(winning_forward_paths):
|
||||
full_path = list(path)
|
||||
if from_node_id is not None and (not full_path or full_path[0] != from_node_id):
|
||||
full_path = [from_node_id, *full_path]
|
||||
if to_node_id is not None and (not full_path or full_path[-1] != to_node_id):
|
||||
full_path = [*full_path, to_node_id]
|
||||
winning_forward_with_endpoints.append(full_path)
|
||||
|
||||
winning_reverse_with_endpoints = []
|
||||
for path in set(winning_reverse_paths):
|
||||
full_path = list(path)
|
||||
if to_node_id is not None and (not full_path or full_path[0] != to_node_id):
|
||||
full_path = [to_node_id, *full_path]
|
||||
if from_node_id is not None and (not full_path or full_path[-1] != from_node_id):
|
||||
full_path = [*full_path, from_node_id]
|
||||
winning_reverse_with_endpoints.append(full_path)
|
||||
|
||||
winning_paths_json = {
|
||||
"forward": winning_forward_with_endpoints,
|
||||
"reverse": winning_reverse_with_endpoints,
|
||||
}
|
||||
|
||||
# --------------------------------------------
|
||||
# Final API output
|
||||
# --------------------------------------------
|
||||
return web.json_response({
|
||||
"packet": {
|
||||
"id": packet.id,
|
||||
"from": packet.from_node_id,
|
||||
"to": packet.to_node_id,
|
||||
"channel": packet.channel,
|
||||
},
|
||||
"traceroute_packets": tr_groups,
|
||||
"unique_forward_paths": unique_forward_paths_json,
|
||||
"unique_reverse_paths": unique_reverse_paths_json,
|
||||
"winning_paths": winning_paths_json,
|
||||
})
|
||||
return web.json_response(
|
||||
{
|
||||
"packet": {
|
||||
"id": packet.id,
|
||||
"from": packet.from_node_id,
|
||||
"to": packet.to_node_id,
|
||||
"channel": packet.channel,
|
||||
},
|
||||
"traceroute_packets": tr_groups,
|
||||
"unique_forward_paths": unique_forward_paths_json,
|
||||
"unique_reverse_paths": unique_reverse_paths_json,
|
||||
"winning_paths": winning_paths_json,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@routes.get("/api/stats/top")
|
||||
@@ -823,90 +892,264 @@ async def api_stats_top(request):
|
||||
limit = min(int(request.query.get("limit", 20)), 100)
|
||||
offset = int(request.query.get("offset", 0))
|
||||
|
||||
params = {
|
||||
"period_type": period_type,
|
||||
"length": length,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
}
|
||||
multiplier = 3600 if period_type == "hour" else 86400
|
||||
window_us = length * multiplier * 1_000_000
|
||||
|
||||
channel_filter = ""
|
||||
if channel:
|
||||
channel_filter = "AND n.channel = :channel"
|
||||
params["channel"] = channel
|
||||
max_packet_import = select(func.max(PacketModel.import_time_us)).scalar_subquery()
|
||||
max_seen_import = select(func.max(PacketSeenModel.import_time_us)).scalar_subquery()
|
||||
|
||||
sql = f"""
|
||||
WITH sent AS (
|
||||
SELECT
|
||||
p.from_node_id AS node_id,
|
||||
COUNT(*) AS sent
|
||||
FROM packet p
|
||||
WHERE p.import_time_us >= (
|
||||
SELECT MAX(import_time_us) FROM packet
|
||||
) - (
|
||||
CASE
|
||||
WHEN :period_type = 'hour' THEN :length * 3600 * 1000000
|
||||
ELSE :length * 86400 * 1000000
|
||||
END
|
||||
)
|
||||
GROUP BY p.from_node_id
|
||||
),
|
||||
seen AS (
|
||||
SELECT
|
||||
p.from_node_id AS node_id,
|
||||
COUNT(*) AS seen
|
||||
FROM packet_seen ps
|
||||
JOIN packet p ON p.id = ps.packet_id
|
||||
WHERE ps.import_time_us >= (
|
||||
SELECT MAX(import_time_us) FROM packet_seen
|
||||
) - (
|
||||
CASE
|
||||
WHEN :period_type = 'hour' THEN :length * 3600 * 1000000
|
||||
ELSE :length * 86400 * 1000000
|
||||
END
|
||||
)
|
||||
GROUP BY p.from_node_id
|
||||
sent_cte = (
|
||||
select(PacketModel.from_node_id.label("node_id"), func.count().label("sent"))
|
||||
.where(PacketModel.import_time_us >= max_packet_import - window_us)
|
||||
.group_by(PacketModel.from_node_id)
|
||||
.cte("sent")
|
||||
)
|
||||
SELECT
|
||||
n.node_id,
|
||||
n.long_name,
|
||||
n.short_name,
|
||||
n.channel,
|
||||
COALESCE(s.sent, 0) AS sent,
|
||||
COALESCE(se.seen, 0) AS seen
|
||||
FROM node n
|
||||
LEFT JOIN sent s ON s.node_id = n.node_id
|
||||
LEFT JOIN seen se ON se.node_id = n.node_id
|
||||
WHERE 1=1
|
||||
{channel_filter}
|
||||
ORDER BY seen DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
"""
|
||||
|
||||
count_sql = f"""
|
||||
SELECT COUNT(*) FROM node n WHERE 1=1 {channel_filter}
|
||||
"""
|
||||
seen_cte = (
|
||||
select(PacketModel.from_node_id.label("node_id"), func.count().label("seen"))
|
||||
.select_from(PacketSeenModel)
|
||||
.join(PacketModel, PacketModel.id == PacketSeenModel.packet_id)
|
||||
.where(PacketSeenModel.import_time_us >= max_seen_import - window_us)
|
||||
.group_by(PacketModel.from_node_id)
|
||||
.cte("seen")
|
||||
)
|
||||
|
||||
query = (
|
||||
select(
|
||||
Node.node_id,
|
||||
Node.long_name,
|
||||
Node.short_name,
|
||||
Node.channel,
|
||||
func.coalesce(sent_cte.c.sent, 0).label("sent"),
|
||||
func.coalesce(seen_cte.c.seen, 0).label("seen"),
|
||||
)
|
||||
.select_from(Node)
|
||||
.outerjoin(sent_cte, sent_cte.c.node_id == Node.node_id)
|
||||
.outerjoin(seen_cte, seen_cte.c.node_id == Node.node_id)
|
||||
.order_by(func.coalesce(seen_cte.c.seen, 0).desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
count_query = select(func.count()).select_from(Node)
|
||||
|
||||
if channel:
|
||||
query = query.where(Node.channel == channel)
|
||||
count_query = count_query.where(Node.channel == channel)
|
||||
|
||||
async with database.async_session() as session:
|
||||
rows = (await session.execute(text(sql), params)).all()
|
||||
total = (await session.execute(text(count_sql), params)).scalar() or 0
|
||||
rows = (await session.execute(query)).all()
|
||||
total = (await session.execute(count_query)).scalar() or 0
|
||||
|
||||
nodes = []
|
||||
for r in rows:
|
||||
avg = r.seen / max(r.sent, 1)
|
||||
nodes.append({
|
||||
"node_id": r.node_id,
|
||||
"long_name": r.long_name,
|
||||
"short_name": r.short_name,
|
||||
"channel": r.channel,
|
||||
"sent": r.sent,
|
||||
"seen": r.seen,
|
||||
"avg": round(avg, 2),
|
||||
})
|
||||
nodes.append(
|
||||
{
|
||||
"node_id": r.node_id,
|
||||
"long_name": r.long_name,
|
||||
"short_name": r.short_name,
|
||||
"channel": r.channel,
|
||||
"sent": r.sent,
|
||||
"seen": r.seen,
|
||||
"avg": round(avg, 2),
|
||||
}
|
||||
)
|
||||
|
||||
return web.json_response({
|
||||
"total": total,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"nodes": nodes,
|
||||
})
|
||||
return web.json_response(
|
||||
{
|
||||
"total": total,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"nodes": nodes,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@routes.get("/api/node/{node_id}/qr")
|
||||
async def api_node_qr(request):
|
||||
"""
|
||||
Generate a Meshtastic URL for importing the node as a contact.
|
||||
Returns the URL that can be used to generate a QR code.
|
||||
"""
|
||||
try:
|
||||
node_id_str = request.match_info["node_id"]
|
||||
node_id = int(node_id_str, 0)
|
||||
except (KeyError, ValueError):
|
||||
return web.json_response({"error": "Invalid node_id"}, status=400)
|
||||
|
||||
node = await store.get_node(node_id)
|
||||
if not node:
|
||||
return web.json_response({"error": "Node not found"}, status=404)
|
||||
|
||||
try:
|
||||
from meshtastic.protobuf.admin_pb2 import SharedContact
|
||||
from meshtastic.protobuf.mesh_pb2 import User
|
||||
|
||||
user = User()
|
||||
user.id = f"!{node_id:08x}"
|
||||
if node.long_name:
|
||||
user.long_name = node.long_name
|
||||
if node.short_name:
|
||||
user.short_name = node.short_name
|
||||
if node.hw_model:
|
||||
try:
|
||||
from meshtastic.protobuf.mesh_pb2 import HardwareModel
|
||||
|
||||
hw_model_value = getattr(HardwareModel, node.hw_model.upper(), None)
|
||||
if hw_model_value is not None:
|
||||
user.hw_model = hw_model_value
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
||||
|
||||
contact = SharedContact()
|
||||
contact.node_num = node_id
|
||||
contact.user.CopyFrom(user)
|
||||
contact.manually_verified = False
|
||||
|
||||
contact_bytes = contact.SerializeToString()
|
||||
import base64
|
||||
|
||||
contact_b64 = base64.b64encode(contact_bytes).decode("ascii")
|
||||
contact_b64url = contact_b64.replace("+", "-").replace("/", "_").rstrip("=")
|
||||
|
||||
meshtastic_url = f"https://meshtastic.org/v/#{contact_b64url}"
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
"node_id": node_id,
|
||||
"long_name": node.long_name,
|
||||
"short_name": node.short_name,
|
||||
"meshtastic_url": meshtastic_url,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error(f"Error generating QR URL for node {node_id}: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return web.json_response({"error": f"Failed to generate URL: {str(e)}"}, status=500)
|
||||
|
||||
|
||||
@routes.get("/api/node/{node_id}/impersonation-check")
|
||||
async def api_node_impersonation_check(request):
|
||||
"""
|
||||
Check if a node has multiple different public keys, which could indicate impersonation.
|
||||
"""
|
||||
try:
|
||||
node_id_str = request.match_info["node_id"]
|
||||
node_id = int(node_id_str, 0)
|
||||
except (KeyError, ValueError):
|
||||
return web.json_response({"error": "Invalid node_id"}, status=400)
|
||||
|
||||
try:
|
||||
async with database.async_session() as session:
|
||||
result = await session.execute(
|
||||
select(NodePublicKey.public_key).where(NodePublicKey.node_id == node_id).distinct()
|
||||
)
|
||||
public_keys = result.scalars().all()
|
||||
|
||||
unique_key_count = len(public_keys)
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
"node_id": node_id,
|
||||
"unique_public_key_count": unique_key_count,
|
||||
"potential_impersonation": unique_key_count > 1,
|
||||
"public_keys": public_keys
|
||||
if unique_key_count <= 3
|
||||
else public_keys[:3] + ["..."],
|
||||
"warning": "Multiple different public keys detected. This node may be getting impersonated."
|
||||
if unique_key_count > 1
|
||||
else None,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking impersonation for node {node_id}: {e}")
|
||||
return web.json_response({"error": "Failed to check impersonation"}, status=500)
|
||||
|
||||
|
||||
@routes.get("/api/coverage/{node_id}")
|
||||
async def api_coverage(request):
|
||||
try:
|
||||
node_id = int(request.match_info["node_id"], 0)
|
||||
except (KeyError, ValueError):
|
||||
return web.json_response({"error": "Invalid node_id"}, status=400)
|
||||
|
||||
if not ITM_AVAILABLE:
|
||||
return web.json_response(
|
||||
{"error": "Coverage requires pyitm. Run: pip install -r requirements.txt"},
|
||||
status=503,
|
||||
)
|
||||
|
||||
def parse_float(name, default):
|
||||
value = request.query.get(name)
|
||||
if value is None:
|
||||
return default
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError as exc:
|
||||
raise web.HTTPBadRequest(
|
||||
text=json.dumps({"error": f"{name} must be a number"}),
|
||||
content_type="application/json",
|
||||
) from exc
|
||||
|
||||
try:
|
||||
freq_mhz = parse_float("freq_mhz", 907.0)
|
||||
tx_dbm = parse_float("tx_dbm", 20.0)
|
||||
tx_height_m = parse_float("tx_height_m", 5.0)
|
||||
rx_height_m = parse_float("rx_height_m", 1.5)
|
||||
radius_km = parse_float("radius_km", 40.0)
|
||||
step_km = parse_float("step_km", 0.25)
|
||||
reliability = parse_float("reliability", DEFAULT_RELIABILITY)
|
||||
threshold_dbm = parse_float("threshold_dbm", DEFAULT_THRESHOLD_DBM)
|
||||
except web.HTTPBadRequest as exc:
|
||||
raise exc
|
||||
|
||||
node = await store.get_node(node_id)
|
||||
if not node or not node.last_lat or not node.last_long:
|
||||
return web.json_response({"error": "Node not found or missing location"}, status=404)
|
||||
|
||||
lat = node.last_lat * 1e-7
|
||||
lon = node.last_long * 1e-7
|
||||
|
||||
mode = request.query.get("mode", "perimeter")
|
||||
if mode == "perimeter":
|
||||
perimeter = compute_perimeter(
|
||||
lat=round(lat, 7),
|
||||
lon=round(lon, 7),
|
||||
freq_mhz=round(freq_mhz, 3),
|
||||
tx_dbm=round(tx_dbm, 2),
|
||||
tx_height_m=round(tx_height_m, 2),
|
||||
rx_height_m=round(rx_height_m, 2),
|
||||
radius_km=round(radius_km, 2),
|
||||
step_km=round(step_km, 3),
|
||||
reliability=round(reliability, 3),
|
||||
threshold_dbm=round(threshold_dbm, 1),
|
||||
)
|
||||
return web.json_response(
|
||||
{"mode": "perimeter", "threshold_dbm": threshold_dbm, "perimeter": perimeter}
|
||||
)
|
||||
|
||||
points = compute_coverage(
|
||||
lat=round(lat, 7),
|
||||
lon=round(lon, 7),
|
||||
freq_mhz=round(freq_mhz, 3),
|
||||
tx_dbm=round(tx_dbm, 2),
|
||||
tx_height_m=round(tx_height_m, 2),
|
||||
rx_height_m=round(rx_height_m, 2),
|
||||
radius_km=round(radius_km, 2),
|
||||
step_km=round(step_km, 3),
|
||||
reliability=round(reliability, 3),
|
||||
)
|
||||
|
||||
min_dbm = DEFAULT_MIN_DBM
|
||||
max_dbm = DEFAULT_MAX_DBM
|
||||
if points:
|
||||
vals = [p[2] for p in points]
|
||||
min_dbm = min(min_dbm, min(vals))
|
||||
max_dbm = max(max_dbm, max(vals))
|
||||
|
||||
return web.json_response(
|
||||
{"mode": "heatmap", "min_dbm": min_dbm, "max_dbm": max_dbm, "points": points}
|
||||
)
|
||||
|
||||
@@ -48,7 +48,7 @@ dev = [
|
||||
# Linting
|
||||
target-version = "py313"
|
||||
line-length = 100
|
||||
extend-exclude = ["build", "dist", ".venv"]
|
||||
extend-exclude = ["build", "dist", ".venv", "meshtastic/protobuf", "nanopb_pb2.py"]
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "UP", "B"] # pick your rulesets
|
||||
@@ -56,4 +56,4 @@ ignore = ["E501"] # example; let formatter handle line len
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "preserve"
|
||||
indent-style = "space"
|
||||
indent-style = "space"
|
||||
|
||||
@@ -24,6 +24,7 @@ MarkupSafe~=3.0.2
|
||||
|
||||
# Graphs / diagrams
|
||||
pydot~=3.0.4
|
||||
pyitm~=0.3
|
||||
|
||||
|
||||
#############################
|
||||
@@ -47,4 +48,4 @@ objgraph~=3.6.2
|
||||
# Testing
|
||||
pytest~=8.3.4
|
||||
pytest-aiohttp~=1.0.5
|
||||
pytest-asyncio~=0.24.0
|
||||
pytest-asyncio~=0.24.0
|
||||
|
||||
@@ -76,12 +76,22 @@ port = 1883
|
||||
username = meshdev
|
||||
password = large4cats
|
||||
|
||||
# Optional list of node IDs to ignore. Comma-separated.
|
||||
skip_node_ids =
|
||||
|
||||
# Optional list of secondary AES keys (base64), comma-separated.
|
||||
secondary_keys =
|
||||
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Database Configuration
|
||||
# -------------------------
|
||||
[database]
|
||||
# SQLAlchemy connection string. This one uses SQLite with asyncio support.
|
||||
# SQLAlchemy async connection string.
|
||||
# Examples:
|
||||
# sqlite+aiosqlite:///packets.db
|
||||
# postgresql+asyncpg://user:pass@host:5432/meshview
|
||||
connection_string = sqlite+aiosqlite:///packets.db
|
||||
|
||||
|
||||
|
||||
126
scripts/update_meshtastic_protobufs.py
Normal file
126
scripts/update_meshtastic_protobufs.py
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run(cmd, cwd=None):
|
||||
subprocess.run(cmd, cwd=cwd, check=True)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Update Meshtastic protobufs")
|
||||
parser.add_argument(
|
||||
"--repo",
|
||||
default="https://github.com/meshtastic/protobufs.git",
|
||||
help="Meshtastic protobufs repo URL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ref",
|
||||
default="master",
|
||||
help="Git ref to fetch (branch, tag, or commit)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check",
|
||||
action="store_true",
|
||||
help="Only check if protobufs are up to date for the given ref",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
repo_root = Path(__file__).resolve().parents[1]
|
||||
out_root = repo_root
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="meshtastic-protobufs-") as tmp:
|
||||
tmp_path = Path(tmp)
|
||||
print(f"Cloning {args.repo} ({args.ref}) into {tmp_path}...")
|
||||
run(["git", "clone", "--depth", "1", "--branch", args.ref, args.repo, str(tmp_path)])
|
||||
upstream_rev = (
|
||||
subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=tmp_path).decode().strip()
|
||||
)
|
||||
|
||||
rev_file = out_root / "meshtastic" / "protobuf" / "UPSTREAM_REV.txt"
|
||||
current_rev = None
|
||||
if rev_file.exists():
|
||||
current_rev = rev_file.read_text(encoding="utf-8").strip()
|
||||
|
||||
if args.check:
|
||||
if current_rev == upstream_rev:
|
||||
print(f"Up to date: {current_rev}")
|
||||
return 0
|
||||
print(f"Out of date. Local: {current_rev or 'unknown'} / Upstream: {upstream_rev}")
|
||||
return 1
|
||||
|
||||
proto_root = None
|
||||
# Common locations in the meshtastic/protobufs repo
|
||||
candidates = [
|
||||
tmp_path / "meshtastic" / "protobuf",
|
||||
tmp_path / "protobufs",
|
||||
tmp_path / "protobuf",
|
||||
tmp_path / "proto",
|
||||
]
|
||||
for candidate in candidates:
|
||||
if candidate.exists() and list(candidate.glob("*.proto")):
|
||||
proto_root = candidate
|
||||
break
|
||||
|
||||
if proto_root is None:
|
||||
# Fallback: search for any directory containing .proto files
|
||||
for candidate in tmp_path.rglob("*.proto"):
|
||||
proto_root = candidate.parent
|
||||
break
|
||||
|
||||
if proto_root is None:
|
||||
print("Proto root not found in cloned repo.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
protos = sorted(proto_root.glob("*.proto"))
|
||||
if not protos:
|
||||
print(f"No .proto files found in {proto_root}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
rel_protos = [str(p.relative_to(tmp_path)) for p in protos]
|
||||
|
||||
protoc = shutil.which("protoc")
|
||||
if protoc:
|
||||
cmd = [
|
||||
protoc,
|
||||
f"-I{tmp_path}",
|
||||
f"--python_out={out_root}",
|
||||
*rel_protos,
|
||||
]
|
||||
print("Running protoc...")
|
||||
run(cmd, cwd=tmp_path)
|
||||
else:
|
||||
try:
|
||||
import grpc_tools.protoc # noqa: F401
|
||||
except Exception:
|
||||
print(
|
||||
"protoc not found. Install it with your package manager, "
|
||||
"or install grpcio-tools and re-run.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"grpc_tools.protoc",
|
||||
f"-I{tmp_path}",
|
||||
f"--python_out={out_root}",
|
||||
*rel_protos,
|
||||
]
|
||||
print("Running grpc_tools.protoc...")
|
||||
run(cmd, cwd=tmp_path)
|
||||
|
||||
rev_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
rev_file.write_text(upstream_rev + "\n", encoding="utf-8")
|
||||
|
||||
print("Protobufs updated in meshtastic/protobuf/.")
|
||||
print("Review changes and commit them if desired.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
42
startdb.py
42
startdb.py
@@ -7,9 +7,11 @@ import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy.engine.url import make_url
|
||||
|
||||
from meshview import migrations, models, mqtt_database, mqtt_reader, mqtt_store
|
||||
from meshview.config import CONFIG
|
||||
from meshview.deps import check_optional_deps
|
||||
|
||||
# -------------------------
|
||||
# Basic logging configuration
|
||||
@@ -65,18 +67,16 @@ async def backup_database(database_url: str, backup_dir: str = ".") -> None:
|
||||
backup_dir: Directory to store backups (default: current directory)
|
||||
"""
|
||||
try:
|
||||
# Extract database file path from connection string
|
||||
# Format: sqlite+aiosqlite:///path/to/db.db
|
||||
if not database_url.startswith("sqlite"):
|
||||
url = make_url(database_url)
|
||||
if not url.drivername.startswith("sqlite"):
|
||||
cleanup_logger.warning("Backup only supported for SQLite databases")
|
||||
return
|
||||
|
||||
db_path = database_url.split("///", 1)[1] if "///" in database_url else None
|
||||
if not db_path:
|
||||
if not url.database or url.database == ":memory:":
|
||||
cleanup_logger.error("Could not extract database path from connection string")
|
||||
return
|
||||
|
||||
db_file = Path(db_path)
|
||||
db_file = Path(url.database)
|
||||
if not db_file.exists():
|
||||
cleanup_logger.error(f"Database file not found: {db_file}")
|
||||
return
|
||||
@@ -153,11 +153,11 @@ async def daily_cleanup_at(
|
||||
cleanup_logger.info("Waiting 60 seconds for backup to complete...")
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# Local-time cutoff as string for SQLite DATETIME comparison
|
||||
cutoff = (datetime.datetime.now() - datetime.timedelta(days=days_to_keep)).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
cleanup_logger.info(f"Running cleanup for records older than {cutoff}...")
|
||||
cutoff_dt = (
|
||||
datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_to_keep)
|
||||
).replace(tzinfo=None)
|
||||
cutoff_us = int(cutoff_dt.timestamp() * 1_000_000)
|
||||
cleanup_logger.info(f"Running cleanup for records older than {cutoff_dt.isoformat()}...")
|
||||
|
||||
try:
|
||||
async with db_lock: # Pause ingestion
|
||||
@@ -168,7 +168,7 @@ async def daily_cleanup_at(
|
||||
# Packet
|
||||
# -------------------------
|
||||
result = await session.execute(
|
||||
delete(models.Packet).where(models.Packet.import_time < cutoff)
|
||||
delete(models.Packet).where(models.Packet.import_time_us < cutoff_us)
|
||||
)
|
||||
cleanup_logger.info(f"Deleted {result.rowcount} rows from Packet")
|
||||
|
||||
@@ -176,7 +176,9 @@ async def daily_cleanup_at(
|
||||
# PacketSeen
|
||||
# -------------------------
|
||||
result = await session.execute(
|
||||
delete(models.PacketSeen).where(models.PacketSeen.import_time < cutoff)
|
||||
delete(models.PacketSeen).where(
|
||||
models.PacketSeen.import_time_us < cutoff_us
|
||||
)
|
||||
)
|
||||
cleanup_logger.info(f"Deleted {result.rowcount} rows from PacketSeen")
|
||||
|
||||
@@ -184,7 +186,9 @@ async def daily_cleanup_at(
|
||||
# Traceroute
|
||||
# -------------------------
|
||||
result = await session.execute(
|
||||
delete(models.Traceroute).where(models.Traceroute.import_time < cutoff)
|
||||
delete(models.Traceroute).where(
|
||||
models.Traceroute.import_time_us < cutoff_us
|
||||
)
|
||||
)
|
||||
cleanup_logger.info(f"Deleted {result.rowcount} rows from Traceroute")
|
||||
|
||||
@@ -192,17 +196,19 @@ async def daily_cleanup_at(
|
||||
# Node
|
||||
# -------------------------
|
||||
result = await session.execute(
|
||||
delete(models.Node).where(models.Node.last_update < cutoff)
|
||||
delete(models.Node).where(models.Node.last_seen_us < cutoff_us)
|
||||
)
|
||||
cleanup_logger.info(f"Deleted {result.rowcount} rows from Node")
|
||||
|
||||
await session.commit()
|
||||
|
||||
if vacuum_db:
|
||||
if vacuum_db and mqtt_database.engine.dialect.name == "sqlite":
|
||||
cleanup_logger.info("Running VACUUM...")
|
||||
async with mqtt_database.engine.begin() as conn:
|
||||
await conn.exec_driver_sql("VACUUM;")
|
||||
cleanup_logger.info("VACUUM completed.")
|
||||
elif vacuum_db:
|
||||
cleanup_logger.info("VACUUM skipped (not supported for this database).")
|
||||
|
||||
cleanup_logger.info("Cleanup completed successfully.")
|
||||
cleanup_logger.info("Ingestion resumed after cleanup.")
|
||||
@@ -232,6 +238,7 @@ async def load_database_from_mqtt(
|
||||
# Main function
|
||||
# -------------------------
|
||||
async def main():
|
||||
check_optional_deps()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Initialize database
|
||||
@@ -260,6 +267,9 @@ async def main():
|
||||
await mqtt_database.create_tables()
|
||||
logger.info("Database tables created")
|
||||
|
||||
# Load MQTT gateway cache after DB init/migrations
|
||||
await mqtt_store.load_gateway_cache()
|
||||
|
||||
finally:
|
||||
# Clear migration in progress flag
|
||||
logger.info("Clearing migration status...")
|
||||
|
||||
Reference in New Issue
Block a user