Compare commits

...

6 Commits

Author SHA1 Message Date
l5y 546e009867 address missing id field ingestor bug (#469)
* address missing id field ingestor bug

* cover missing unit test vectors

* cover missing unit test vectors
2025-11-19 08:22:24 +01:00
l5y be46963744 merge secondary channels by name (#468)
* merge secondary channels by name

* cover missing unit test vectors
2025-11-18 18:33:02 +01:00
l5y 8f7adba65a rate limit host device telemetry (#467)
* rate limit host device telemetry

* Spec: add more unit tests
2025-11-18 18:04:40 +01:00
l5y e8b38ed65a add traceroutes to frontend (#466)
* add traceroutes to frontend

* Spec: add more unit tests
2025-11-18 13:12:14 +01:00
l5y 700fcef33f feat: implement traceroute app packet handling across the stack (#463)
* feat: implement traceroute app packet handling across the stack

* run linter

* tests: fix

* Spec: add more unit tests
2025-11-18 11:23:46 +01:00
l5y b23d864f1d Bump version and update changelog (#462)
* chore: bump version to 0.5.6 everywhere

* docs: update changelog

* chore: bump version to 0.5.6 everywhere
2025-11-16 17:38:41 +01:00
27 changed files with 2024 additions and 36 deletions
+28
View File
@@ -1,5 +1,33 @@
# CHANGELOG
## v0.5.5
* Added comprehensive helper unit tests by @l5yth in <https://github.com/l5yth/potato-mesh/pull/457>
* Added reaction-aware handling by @l5yth in <https://github.com/l5yth/potato-mesh/pull/455>
* Env: add map zoom by @l5yth in <https://github.com/l5yth/potato-mesh/pull/454>
* Charts: render aggregated telemetry charts for all nodes by @l5yth in <https://github.com/l5yth/potato-mesh/pull/453>
* Nodes: render charts detail pages as overlay by @l5yth in <https://github.com/l5yth/potato-mesh/pull/452>
* Fix telemetry parsing for charts by @l5yth in <https://github.com/l5yth/potato-mesh/pull/451>
* Nodes: improve charts on detail pages by @l5yth in <https://github.com/l5yth/potato-mesh/pull/450>
* Nodes: add charts to detail pages by @l5yth in <https://github.com/l5yth/potato-mesh/pull/449>
* Aggregate frontend snapshots across views by @l5yth in <https://github.com/l5yth/potato-mesh/pull/447>
* Remove added 1 if reply with emoji by @Alexkurd in <https://github.com/l5yth/potato-mesh/pull/443>
* Refine node detail view layout by @l5yth in <https://github.com/l5yth/potato-mesh/pull/442>
* Enable map centering from node table coordinates by @l5yth in <https://github.com/l5yth/potato-mesh/pull/439>
* Add node detail route and page by @l5yth in <https://github.com/l5yth/potato-mesh/pull/441>
* Ensure Meshtastic nodeinfo patch runs before importing interfaces by @l5yth in <https://github.com/l5yth/potato-mesh/pull/440>
* Filter zero-valued fields from API responses by @l5yth in <https://github.com/l5yth/potato-mesh/pull/438>
* Add debug payload tracing and ignored packet logging by @l5yth in <https://github.com/l5yth/potato-mesh/pull/437>
* Tighten map auto-fit behaviour by @l5yth in <https://github.com/l5yth/potato-mesh/pull/435>
* Fetch encrypted chat log entries for log tab by @l5yth in <https://github.com/l5yth/potato-mesh/pull/434>
* Add encrypted filter to messages API by @l5yth in <https://github.com/l5yth/potato-mesh/pull/432>
* Guard NodeInfo handler against missing IDs by @l5yth in <https://github.com/l5yth/potato-mesh/pull/431>
* Add standalone full-screen map, chat, and nodes views by @l5yth in <https://github.com/l5yth/potato-mesh/pull/429>
* Ensure chat history fetches full message limit by @l5yth in <https://github.com/l5yth/potato-mesh/pull/428>
* Fix ingestion of nodeinfo packets missing ids (#426) by @l5yth in <https://github.com/l5yth/potato-mesh/pull/427>
* Chore: update license headers by @l5yth in <https://github.com/l5yth/potato-mesh/pull/424>
* Chore: bump version to 0.5.5 by @l5yth in <https://github.com/l5yth/potato-mesh/pull/423>
## v0.5.4
* Handle naming when primary channel has a name by @l5yth in <https://github.com/l5yth/potato-mesh/pull/422>
+1 -1
View File
@@ -18,7 +18,7 @@ The ``data.mesh`` module exposes helpers for reading Meshtastic node and
message information before forwarding it to the accompanying web application.
"""
VERSION = "0.5.5"
VERSION = "0.5.6"
"""Semantic version identifier shared with the dashboard and front-end."""
__version__ = VERSION
+3
View File
@@ -284,6 +284,9 @@ def main(existing_interface=None) -> None:
active_candidate = resolved_target
interfaces._ensure_radio_metadata(iface)
interfaces._ensure_channel_metadata(iface)
handlers.register_host_node_id(
interfaces._extract_host_node_id(iface)
)
retry_delay = max(0.0, config._RECONNECT_INITIAL_DELAY_SECS)
initial_snapshot_sent = False
if not announced_target and resolved_target:
+229
View File
@@ -20,6 +20,7 @@ import base64
import contextlib
import importlib
import json
import math
import sys
import threading
import time
@@ -35,6 +36,15 @@ _IGNORED_PACKET_LOG_PATH = Path(__file__).resolve().parents[2] / "ignored.txt"
_IGNORED_PACKET_LOCK = threading.Lock()
"""Lock guarding writes to :data:`_IGNORED_PACKET_LOG_PATH`."""
_HOST_TELEMETRY_INTERVAL_SECS = 60 * 60
"""Minimum interval between accepted host telemetry packets."""
_host_node_id: str | None = None
"""Canonical ``!xxxxxxxx`` identifier for the connected host device."""
_host_telemetry_last_rx: int | None = None
"""Receive timestamp of the last accepted host telemetry packet."""
def _ignored_packet_default(value: object) -> object:
"""Return a JSON-serialisable representation for ignored packet data."""
@@ -90,6 +100,50 @@ from .serialization import (
)
def register_host_node_id(node_id: str | None) -> None:
"""Record the canonical identifier for the connected host device.
Parameters:
node_id: Identifier reported by the connected device. ``None`` clears
the current host assignment.
"""
global _host_node_id, _host_telemetry_last_rx
canonical = _canonical_node_id(node_id)
_host_node_id = canonical
_host_telemetry_last_rx = None
if canonical:
config._debug_log(
"Registered host device node id",
context="handlers.host_device",
host_node_id=canonical,
)
def host_node_id() -> str | None:
"""Return the canonical identifier for the connected host device."""
return _host_node_id
def _mark_host_telemetry_seen(rx_time: int) -> None:
"""Update the last receive time for the host telemetry window."""
global _host_telemetry_last_rx
_host_telemetry_last_rx = rx_time
def _host_telemetry_suppressed(rx_time: int) -> tuple[bool, int]:
"""Return suppression state and minutes remaining for host telemetry."""
if _host_telemetry_last_rx is None:
return False, 0
remaining_secs = (_host_telemetry_last_rx + _HOST_TELEMETRY_INTERVAL_SECS) - rx_time
if remaining_secs <= 0:
return False, 0
return True, int(math.ceil(remaining_secs / 60.0))
def _radio_metadata_fields() -> dict[str, object]:
"""Return the shared radio metadata fields for payload enrichment."""
@@ -372,6 +426,132 @@ def base64_payload(payload_bytes: bytes | None) -> str | None:
return base64.b64encode(payload_bytes).decode("ascii")
def _normalize_trace_hops(hops_value) -> list[int]:
"""Coerce hop entries to integers while preserving order."""
if hops_value is None:
return []
hop_entries = hops_value if isinstance(hops_value, list) else [hops_value]
normalized: list[int] = []
for hop in hop_entries:
hop_value = hop
if isinstance(hop, Mapping):
hop_value = _first(hop, "node_id", "nodeId", "id", "num", default=None)
canonical = _canonical_node_id(hop_value)
hop_id = _node_num_from_id(canonical or hop_value)
if hop_id is None:
hop_id = _coerce_int(hop_value)
if hop_id is not None:
normalized.append(hop_id)
return normalized
def store_traceroute_packet(packet: Mapping, decoded: Mapping) -> None:
"""Persist traceroute details and hop path to the API."""
traceroute_section = (
decoded.get("traceroute") if isinstance(decoded, Mapping) else None
)
request_id = _coerce_int(
_first(
traceroute_section,
"requestId",
"request_id",
default=_first(decoded, "req", "requestId", "request_id", default=None),
)
)
pkt_id = _coerce_int(_first(packet, "id", "packet_id", "packetId", default=None))
if pkt_id is None:
pkt_id = request_id
rx_time = _coerce_int(_first(packet, "rxTime", "rx_time", default=time.time()))
if rx_time is None:
rx_time = int(time.time())
src = _coerce_int(
_first(
decoded,
"src",
"source",
default=_first(packet, "fromId", "from_id", "from", default=None),
)
)
dest = _coerce_int(
_first(
decoded,
"dest",
"destination",
default=_first(packet, "toId", "to_id", "to", default=None),
)
)
metrics = traceroute_section if isinstance(traceroute_section, Mapping) else {}
rssi = _coerce_int(
_first(metrics, "rssi", default=_first(packet, "rssi", "rx_rssi", "rxRssi"))
)
snr = _coerce_float(
_first(metrics, "snr", default=_first(packet, "snr", "rx_snr", "rxSnr"))
)
elapsed_ms = _coerce_int(
_first(metrics, "elapsed_ms", "latency_ms", "latencyMs", default=None)
)
hop_candidates = (
_first(metrics, "hops", default=None),
_first(metrics, "path", default=None),
_first(metrics, "route", default=None),
_first(decoded, "hops", default=None),
_first(decoded, "path", default=None),
(
_first(traceroute_section, "route", default=None)
if isinstance(traceroute_section, Mapping)
else None
),
)
hops: list[int] = []
seen_hops: set[int] = set()
for candidate in hop_candidates:
for hop in _normalize_trace_hops(candidate):
if hop in seen_hops:
continue
seen_hops.add(hop)
hops.append(hop)
if pkt_id is None and request_id is None and not hops:
_record_ignored_packet(packet, reason="traceroute-missing-identifiers")
return
payload = {
"id": pkt_id,
"request_id": request_id,
"src": src,
"dest": dest,
"rx_time": rx_time,
"rx_iso": _iso(rx_time),
"hops": hops,
"rssi": rssi,
"snr": snr,
"elapsed_ms": elapsed_ms,
}
_queue_post_json(
"/api/traces",
_apply_radio_metadata(payload),
priority=queue._TRACE_POST_PRIORITY,
)
if config.DEBUG:
config._debug_log(
"Queued traceroute payload",
context="handlers.store_traceroute_packet",
request_id=request_id,
src=src,
dest=dest,
hop_count=len(hops),
)
def store_telemetry_packet(packet: Mapping, decoded: Mapping) -> None:
"""Persist telemetry metrics extracted from a packet.
@@ -408,6 +588,19 @@ def store_telemetry_packet(packet: Mapping, decoded: Mapping) -> None:
rx_time = int(time.time())
rx_iso = _iso(rx_time)
host_id = host_node_id()
if host_id is not None and node_id == host_id:
suppressed, minutes_remaining = _host_telemetry_suppressed(rx_time)
if suppressed:
config._debug_log(
"Suppressed host telemetry update",
context="handlers.store_telemetry",
host_node_id=host_id,
minutes_remaining=minutes_remaining,
)
return
_mark_host_telemetry_seen(rx_time)
telemetry_time = _coerce_int(_first(telemetry_section, "time", default=None))
channel = _coerce_int(_first(decoded, "channel", default=None))
@@ -1084,6 +1277,40 @@ def store_packet_dict(packet: Mapping) -> None:
store_telemetry_packet(packet, decoded)
return
traceroute_section = (
decoded.get("traceroute") if isinstance(decoded, Mapping) else None
)
traceroute_port_ints: set[int] = set()
for module_name in (
"meshtastic.portnums_pb2",
"meshtastic.protobuf.portnums_pb2",
):
module = sys.modules.get(module_name)
if module is None:
with contextlib.suppress(ModuleNotFoundError):
module = importlib.import_module(module_name)
if module is None:
continue
portnum_enum = getattr(module, "PortNum", None)
value_lookup = getattr(portnum_enum, "Value", None) if portnum_enum else None
if callable(value_lookup):
with contextlib.suppress(Exception):
candidate = _coerce_int(value_lookup("TRACEROUTE_APP"))
if candidate is not None:
traceroute_port_ints.add(candidate)
constant_value = getattr(module, "TRACEROUTE_APP", None)
candidate = _coerce_int(constant_value)
if candidate is not None:
traceroute_port_ints.add(candidate)
if (
portnum == "TRACEROUTE_APP"
or (portnum_int is not None and portnum_int in traceroute_port_ints)
or isinstance(traceroute_section, Mapping)
):
store_traceroute_packet(packet, decoded)
return
if portnum in {"5", "NODEINFO_APP"}:
store_nodeinfo_packet(packet, decoded)
return
@@ -1332,8 +1559,10 @@ def on_receive(packet, interface) -> None:
__all__ = [
"_queue_post_json",
"host_node_id",
"last_packet_monotonic",
"on_receive",
"register_host_node_id",
"store_neighborinfo_packet",
"store_nodeinfo_packet",
"store_packet_dict",
+107 -1
View File
@@ -113,6 +113,47 @@ def _candidate_node_id(mapping: Mapping | None) -> str | None:
return None
def _extract_host_node_id(iface) -> str | None:
"""Return the canonical node identifier for the connected host device."""
if iface is None:
return None
def _as_mapping(candidate) -> Mapping | None:
mapping = _ensure_mapping(candidate)
if mapping is not None:
return mapping
if callable(candidate):
with contextlib.suppress(Exception):
return _ensure_mapping(candidate())
return None
candidates: list[Mapping] = []
for attr in ("myInfo", "my_node_info", "myNodeInfo", "my_node", "localNode"):
mapping = _as_mapping(getattr(iface, attr, None))
if mapping is None:
continue
candidates.append(mapping)
nested_info = _ensure_mapping(mapping.get("info"))
if nested_info:
candidates.append(nested_info)
for mapping in candidates:
node_id = _candidate_node_id(mapping)
if node_id:
return node_id
for key in ("myNodeNum", "my_node_num", "myNodeId", "my_node_id"):
node_id = serialization._canonical_node_id(mapping.get(key))
if node_id:
return node_id
node_id = serialization._canonical_node_id(getattr(iface, "myNodeNum", None))
if node_id:
return node_id
return None
def _normalise_nodeinfo_packet(packet) -> dict | None:
"""Return a dictionary view of ``packet`` with a guaranteed ``id`` when known."""
@@ -132,6 +173,17 @@ def _normalise_nodeinfo_packet(packet) -> dict | None:
if node_id and normalised.get("id") != node_id:
normalised["id"] = node_id
decoded_section = _ensure_mapping(normalised.get("decoded"))
if decoded_section is not None:
decoded_dict = dict(decoded_section)
user_section = _ensure_mapping(decoded_dict.get("user"))
if user_section is not None:
user_dict = dict(user_section)
if node_id and user_dict.get("id") != node_id:
user_dict["id"] = node_id
decoded_dict["user"] = user_dict
normalised["decoded"] = decoded_dict
return normalised
@@ -161,8 +213,18 @@ def _patch_meshtastic_nodeinfo_handler() -> None:
with contextlib.suppress(Exception):
mesh_interface_module = importlib.import_module("meshtastic.mesh_interface")
safe_callback = original
if not getattr(original, "_potato_mesh_safe_wrapper", False):
module._onNodeInfoReceive = _build_safe_nodeinfo_callback(original)
safe_callback = _build_safe_nodeinfo_callback(original)
module._onNodeInfoReceive = safe_callback
if (
mesh_interface_module is not None
and getattr(mesh_interface_module, "_onNodeInfoReceive", None) is original
):
mesh_interface_module._onNodeInfoReceive = safe_callback
_patch_protocol_nodeinfo_callback(module, original, safe_callback)
_patch_protocol_nodeinfo_callback(mesh_interface_module, original, safe_callback)
_patch_nodeinfo_handler_class(mesh_interface_module, module)
@@ -186,6 +248,49 @@ def _build_safe_nodeinfo_callback(original):
return _safe_on_node_info_receive
def _replace_known_protocol_callback(protocol, replacement):
"""Return ``protocol`` with ``onReceive`` set to ``replacement``."""
replacer = getattr(protocol, "_replace", None)
if callable(replacer):
try:
return replacer(onReceive=replacement)
except Exception:
pass
protocol_cls = getattr(protocol, "__class__", None)
try:
return protocol_cls(
getattr(protocol, "name", None),
getattr(protocol, "protobufFactory", None),
replacement,
)
except Exception:
return protocol
def _patch_protocol_nodeinfo_callback(module, original, replacement) -> None:
"""Swap the NodeInfo protocol callback to ``replacement`` when needed."""
if module is None or replacement is None:
return
protocols = getattr(module, "protocols", None)
if not isinstance(protocols, Mapping):
return
portnums = getattr(module, "portnums_pb2", None)
portnum_enum = getattr(portnums, "PortNum", None)
try:
nodeinfo_key = getattr(portnum_enum, "NODEINFO_APP")
except Exception:
nodeinfo_key = None
for key, protocol in list(protocols.items()):
on_receive = getattr(protocol, "onReceive", None)
if key == nodeinfo_key or on_receive is original:
protocols[key] = _replace_known_protocol_callback(protocol, replacement)
def _update_nodeinfo_handler_aliases(original, replacement) -> None:
"""Ensure Meshtastic modules reference the patched ``NodeInfoHandler``."""
@@ -771,6 +876,7 @@ __all__ = [
"NoAvailableMeshInterface",
"_ensure_channel_metadata",
"_ensure_radio_metadata",
"_extract_host_node_id",
"_DummySerialInterface",
"_DEFAULT_TCP_PORT",
"_DEFAULT_TCP_TARGET",
+2
View File
@@ -75,6 +75,7 @@ def _payload_key_value_pairs(payload: Mapping[str, object]) -> str:
_MESSAGE_POST_PRIORITY = 10
_NEIGHBOR_POST_PRIORITY = 20
_TRACE_POST_PRIORITY = 25
_POSITION_POST_PRIORITY = 30
_TELEMETRY_POST_PRIORITY = 40
_NODE_POST_PRIORITY = 50
@@ -261,6 +262,7 @@ __all__ = [
"_NEIGHBOR_POST_PRIORITY",
"_NODE_POST_PRIORITY",
"_POSITION_POST_PRIORITY",
"_TRACE_POST_PRIORITY",
"_TELEMETRY_POST_PRIORITY",
"_clear_post_queue",
"_drain_post_queue",
+38
View File
@@ -0,0 +1,38 @@
-- Copyright © 2025-26 l5yth & contributors
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
CREATE TABLE IF NOT EXISTS traces (
id INTEGER PRIMARY KEY,
request_id INTEGER,
src INTEGER,
dest INTEGER,
rx_time INTEGER NOT NULL,
rx_iso TEXT NOT NULL,
rssi INTEGER,
snr REAL,
elapsed_ms INTEGER
);
CREATE TABLE IF NOT EXISTS trace_hops (
id INTEGER PRIMARY KEY,
trace_id INTEGER NOT NULL,
hop_index INTEGER NOT NULL,
node_id INTEGER NOT NULL,
FOREIGN KEY(trace_id) REFERENCES traces(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_traces_rx_time ON traces(rx_time);
CREATE INDEX IF NOT EXISTS idx_traces_request ON traces(request_id);
CREATE INDEX IF NOT EXISTS idx_trace_hops_trace ON trace_hops(trace_id);
CREATE INDEX IF NOT EXISTS idx_trace_hops_node ON trace_hops(node_id);
BIN
View File
Binary file not shown.

After

Width:  |  Height:  |  Size: 202 KiB

+365
View File
@@ -13,6 +13,7 @@
# limitations under the License.
import base64
from collections import namedtuple
import enum
import importlib
import json
@@ -228,6 +229,60 @@ def test_snapshot_interval_defaults_to_60_seconds(mesh_module):
assert mesh.SNAPSHOT_SECS == 60
def test_extract_host_node_id_prefers_my_info_fields(mesh_module):
mesh = mesh_module
class DummyInterface:
def __init__(self):
self.myInfo = {"my_node_num": 0x9E95CF60}
iface = DummyInterface()
assert mesh._extract_host_node_id(iface) == "!9e95cf60"
def test_extract_host_node_id_from_nested_info(mesh_module):
mesh = mesh_module
class DummyInterface:
def __init__(self):
self.myInfo = {"info": {"id": "!cafebabe"}}
iface = DummyInterface()
assert mesh._extract_host_node_id(iface) == "!cafebabe"
def test_extract_host_node_id_from_callable(mesh_module):
mesh = mesh_module
class CallableNoDict:
__slots__ = ()
def __call__(self):
return {"id": "!f00ba4"}
class DummyInterface:
def __init__(self):
self.localNode = CallableNoDict()
iface = DummyInterface()
assert mesh._extract_host_node_id(iface) == "!00f00ba4"
def test_extract_host_node_id_from_my_node_num_attribute(mesh_module):
mesh = mesh_module
class DummyInterface:
def __init__(self):
self.myNodeNum = 0xDEADBEEF
iface = DummyInterface()
assert mesh._extract_host_node_id(iface) == "!deadbeef"
@pytest.mark.parametrize("value", ["mock", "Mock", " disabled "])
def test_create_serial_interface_allows_mock(mesh_module, value):
mesh = mesh_module
@@ -1227,6 +1282,162 @@ def test_interfaces_patch_handles_preimported_serial():
sys.modules[name] = module
def test_nodeinfo_patch_updates_known_protocols(monkeypatch):
"""Ensure NodeInfo protocol callbacks are replaced with safe wrappers."""
from data.mesh_ingestor import interfaces
Protocol = namedtuple("Protocol", ("name", "protobufFactory", "onReceive"))
callbacks: list[dict] = []
def _unsafe_handler(iface, packet):
callbacks.append(packet)
user = packet["decoded"]["user"]
iface.nodes[user["id"]] = {"user": user}
return user["id"]
nodeinfo_value = 42
PortNum = enum.IntEnum("PortNum", {"NODEINFO_APP": nodeinfo_value})
portnums_pb2 = types.SimpleNamespace(PortNum=PortNum)
protocols = {PortNum.NODEINFO_APP: Protocol("user", object, _unsafe_handler)}
meshtastic_mod = types.ModuleType("meshtastic")
meshtastic_mod._onNodeInfoReceive = _unsafe_handler
meshtastic_mod.portnums_pb2 = portnums_pb2
meshtastic_mod.protocols = protocols
mesh_interface_mod = types.ModuleType("meshtastic.mesh_interface")
mesh_interface_mod.protocols = protocols
mesh_interface_mod.portnums_pb2 = portnums_pb2
monkeypatch.setitem(sys.modules, "meshtastic", meshtastic_mod)
monkeypatch.setitem(sys.modules, "meshtastic.mesh_interface", mesh_interface_mod)
monkeypatch.setattr(interfaces, "meshtastic", meshtastic_mod, raising=False)
interfaces._patch_meshtastic_nodeinfo_handler()
handler = meshtastic_mod.protocols[PortNum.NODEINFO_APP].onReceive
iface = types.SimpleNamespace(nodes={})
handler(iface, {"decoded": {"user": {"shortName": "anon"}}, "from": 0x01020304})
assert getattr(handler, "_potato_mesh_safe_wrapper", False)
assert callbacks, "Expected patched handler to call original callback"
assert iface.nodes["!01020304"]["user"]["id"] == "!01020304"
def test_nodeinfo_patch_updates_protocols_without_replace(monkeypatch):
"""Fallback protocol replacement path should still wrap unsafe callbacks."""
from data.mesh_ingestor import interfaces
class DummyProtocol:
def __init__(self, name, factory, on_receive):
self.name = name
self.protobufFactory = factory
self.onReceive = on_receive
callbacks: list[dict] = []
def _unsafe_handler(iface, packet):
callbacks.append(packet)
iface.nodes[packet["from"]] = {"user": packet["decoded"]["user"]}
return packet["from"]
nodeinfo_value = 7
PortNum = enum.IntEnum("PortNum", {"NODEINFO_APP": nodeinfo_value})
portnums_pb2 = types.SimpleNamespace(PortNum=PortNum)
protocol_obj = DummyProtocol("user", object, _unsafe_handler)
protocols = {
PortNum.NODEINFO_APP: protocol_obj,
99: DummyProtocol("other", object, lambda *_: None),
}
meshtastic_mod = types.ModuleType("meshtastic")
meshtastic_mod._onNodeInfoReceive = _unsafe_handler
meshtastic_mod.portnums_pb2 = portnums_pb2
meshtastic_mod.protocols = protocols
mesh_interface_mod = types.ModuleType("meshtastic.mesh_interface")
mesh_interface_mod.protocols = dict(protocols)
mesh_interface_mod.portnums_pb2 = portnums_pb2
mesh_interface_mod._onNodeInfoReceive = _unsafe_handler
monkeypatch.setitem(sys.modules, "meshtastic", meshtastic_mod)
monkeypatch.setitem(sys.modules, "meshtastic.mesh_interface", mesh_interface_mod)
monkeypatch.setattr(interfaces, "meshtastic", meshtastic_mod, raising=False)
interfaces._patch_meshtastic_nodeinfo_handler()
handler = meshtastic_mod.protocols[PortNum.NODEINFO_APP].onReceive
iface = types.SimpleNamespace(nodes={})
handler(iface, {"decoded": {"user": {"shortName": "anon"}}, "from": 0x01020304})
assert getattr(handler, "_potato_mesh_safe_wrapper", False)
assert callbacks, "Expected patched handler to call original callback"
assert callbacks[0]["decoded"]["user"]["id"] == "!01020304"
assert iface.nodes[0x01020304]["user"]["id"] == "!01020304"
assert (
getattr(mesh_interface_mod, "_onNodeInfoReceive").__name__ == handler.__name__
)
assert getattr(
mesh_interface_mod.protocols[nodeinfo_value].onReceive,
"_potato_mesh_safe_wrapper",
False,
)
def test_normalise_nodeinfo_packet_injects_decoded_user_id():
"""Ensure decoded user payloads inherit the inferred node id."""
from data.mesh_ingestor import interfaces
packet = {"decoded": {"user": {"shortName": "anon"}}, "from": 0x0A0B0C0D}
normalised = interfaces._normalise_nodeinfo_packet(packet)
assert normalised["id"] == "!0a0b0c0d"
assert normalised["decoded"]["user"]["id"] == "!0a0b0c0d"
def test_patch_protocol_nodeinfo_callback_without_portnum(monkeypatch):
"""Protocols lacking PortNum constants should still be wrapped."""
from data.mesh_ingestor import interfaces
captured: list[dict] = []
def _original(iface, packet):
captured.append(packet)
iface.nodes = {"observed": packet}
return packet.get("id")
class DummyProtocol:
def __init__(self, name, factory, on_receive):
self.name = name
self.protobufFactory = factory
self.onReceive = on_receive
module = types.SimpleNamespace(
protocols={123: DummyProtocol("user", object, _original)},
portnums_pb2=None,
)
safe_callback = interfaces._build_safe_nodeinfo_callback(_original)
interfaces._patch_protocol_nodeinfo_callback(module, _original, safe_callback)
handler = module.protocols[123].onReceive
iface = types.SimpleNamespace(nodes={})
handler(iface, {"decoded": {"user": {"shortName": "anon"}}, "from": 0x01020304})
assert getattr(handler, "_potato_mesh_safe_wrapper", False)
assert captured[0]["id"] == "!01020304"
assert iface.nodes["observed"]["decoded"]["user"]["id"] == "!01020304"
def test_store_packet_dict_ignores_non_text(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
@@ -1978,6 +2189,160 @@ def test_store_packet_dict_handles_environment_telemetry(mesh_module, monkeypatc
assert payload["modem_preset"] == "MediumFast"
def test_store_packet_dict_throttles_host_telemetry(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
logs = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
monkeypatch.setattr(
mesh.config,
"_debug_log",
lambda message, **metadata: logs.append((message, metadata)),
)
mesh.register_host_node_id("!9e95cf60")
base_packet = {
"id": 1_234,
"fromId": "!9e95cf60",
"decoded": {
"portnum": "TELEMETRY_APP",
"telemetry": {
"time": 1_000,
"deviceMetrics": {
"batteryLevel": 50,
},
},
},
}
mesh.store_packet_dict({**base_packet, "rxTime": 1_000})
mesh.store_packet_dict({**base_packet, "id": 1_235, "rxTime": 1_300})
mesh.store_packet_dict({**base_packet, "id": 1_236, "rxTime": 4_700})
assert len(captured) == 2
first_path, first_payload, _ = captured[0]
second_path, second_payload, _ = captured[1]
assert first_path == "/api/telemetry"
assert second_path == "/api/telemetry"
assert first_payload["id"] == 1_234
assert second_payload["id"] == 1_236
suppression_logs = [
entry for entry in logs if entry[0] == "Suppressed host telemetry update"
]
assert suppression_logs
assert suppression_logs[0][1]["host_node_id"] == "!9e95cf60"
assert suppression_logs[0][1]["minutes_remaining"] == 55
def test_store_packet_dict_handles_traceroute_packet(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
mesh.config.LORA_FREQ = 915
mesh.config.MODEM_PRESET = "LongFast"
packet = {
"id": 2_934_054_466,
"rxTime": 1_763_183_133,
"rssi": -70,
"snr": 10.25,
"fromId": "3664074452",
"decoded": {
"portnum": "PAXCOUNTER_APP",
"dest": "2660618080",
"traceroute": {
"requestId": 17,
"route": [3_663_643_096, "!beadf00d", "c0ffee99", 1_150_717_793],
"snrTowards": [42, -14, 41],
},
},
}
mesh.store_packet_dict(packet)
assert captured
path, payload, priority = captured[0]
assert path == "/api/traces"
assert priority == mesh._TRACE_POST_PRIORITY
assert payload["id"] == packet["id"]
assert payload["request_id"] == 17
assert payload["src"] == 3_664_074_452
assert payload["dest"] == 2_660_618_080
assert payload["rx_time"] == 1_763_183_133
assert payload["rx_iso"] == "2025-11-15T05:05:33Z"
assert payload["hops"] == [
3_663_643_096,
3_199_070_221,
3_237_998_233,
1_150_717_793,
]
assert payload["rssi"] == -70
assert payload["snr"] == pytest.approx(10.25)
assert "elapsed_ms" in payload
assert payload["lora_freq"] == 915
assert payload["modem_preset"] == "LongFast"
def test_traceroute_hop_normalization_supports_mappings(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 1_111,
"decoded": {
"portnum": "TRACEROUTE_APP",
"traceroute": {
"requestId": 42,
"route": [{"node_id": "!beadf00d"}, {"num": "0xc0ffee99"}, {"id": 123}],
},
},
}
mesh.store_packet_dict(packet)
assert captured
_, payload, _ = captured[0]
assert payload["hops"] == [0xBEADF00D, 0xC0FFEE99, 123]
def test_traceroute_packet_without_identifiers_is_ignored(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"decoded": {
"portnum": "TRACEROUTE_APP",
"traceroute": {},
},
"rxTime": 123,
}
mesh.store_packet_dict(packet)
assert captured == []
def test_post_queue_prioritises_messages(mesh_module, monkeypatch):
mesh = mesh_module
mesh._clear_post_queue()
@@ -777,6 +777,44 @@ module PotatoMesh
private :resolve_numeric_metric
# Normalise a traceroute hop entry to a numeric node identifier.
#
# @param hop [Object] raw hop entry from the payload.
# @return [Integer, nil] coerced node ID or nil when the value is unusable.
def coerce_trace_node_id(hop)
case hop
when Integer
return hop
when Numeric
return hop.to_i
when String
trimmed = hop.strip
return nil if trimmed.empty?
return Integer(trimmed, 10) if trimmed.match?(/\A-?\d+\z/)
parts = canonical_node_parts(trimmed)
return parts[1] if parts
when Hash
candidate = hop["node_id"] || hop[:node_id] || hop["id"] || hop[:id] || hop["num"] || hop[:num]
return coerce_trace_node_id(candidate)
end
nil
rescue ArgumentError
nil
end
# Extract hop identifiers from a traceroute payload preserving order.
#
# @param hops_value [Object] raw hops array or path collection.
# @return [Array<Integer>] ordered list of coerced hop identifiers.
def normalize_trace_hops(hops_value)
return [] if hops_value.nil?
hop_entries = hops_value.is_a?(Array) ? hops_value : [hops_value]
hop_entries.filter_map { |entry| coerce_trace_node_id(entry) }
end
def insert_telemetry(db, payload)
return unless payload.is_a?(Hash)
@@ -1206,6 +1244,74 @@ module PotatoMesh
})
end
# Persist a traceroute observation and its hop path.
#
# @param db [SQLite3::Database] open database handle.
# @param payload [Hash] traceroute payload as produced by the ingestor.
# @return [void]
def insert_trace(db, payload)
return unless payload.is_a?(Hash)
trace_identifier = coerce_integer(payload["id"] || payload["packet_id"] || payload["packetId"])
trace_identifier ||= coerce_integer(payload["trace_id"])
request_id = coerce_integer(payload["request_id"] || payload["req"])
trace_identifier ||= request_id
now = Time.now.to_i
rx_time = coerce_integer(payload["rx_time"])
rx_time = now if rx_time.nil? || rx_time > now
rx_iso = string_or_nil(payload["rx_iso"]) || Time.at(rx_time).utc.iso8601
metrics = normalize_json_object(payload["metrics"])
src = coerce_integer(payload["src"] || payload["source"] || payload["from"])
dest = coerce_integer(payload["dest"] || payload["destination"] || payload["to"])
rssi = coerce_integer(payload["rssi"]) || coerce_integer(metrics["rssi"])
snr = coerce_float(payload["snr"]) || coerce_float(metrics["snr"])
elapsed_ms = coerce_integer(
payload["elapsed_ms"] ||
payload["latency_ms"] ||
metrics&.[]("elapsed_ms") ||
metrics&.[]("latency_ms") ||
metrics&.[]("latencyMs"),
)
hops_value = payload.key?("hops") ? payload["hops"] : payload["path"]
hops = normalize_trace_hops(hops_value)
all_nodes = [src, dest, *hops].compact.uniq
all_nodes.each do |node|
ensure_unknown_node(db, node, node, heard_time: rx_time)
touch_node_last_seen(db, node, node, rx_time: rx_time, source: :trace)
end
with_busy_retry do
db.execute <<~SQL, [trace_identifier, request_id, src, dest, rx_time, rx_iso, rssi, snr, elapsed_ms]
INSERT INTO traces(id, request_id, src, dest, rx_time, rx_iso, rssi, snr, elapsed_ms)
VALUES(?,?,?,?,?,?,?,?,?)
ON CONFLICT(id) DO UPDATE SET
request_id=COALESCE(excluded.request_id,traces.request_id),
src=COALESCE(excluded.src,traces.src),
dest=COALESCE(excluded.dest,traces.dest),
rx_time=excluded.rx_time,
rx_iso=excluded.rx_iso,
rssi=COALESCE(excluded.rssi,traces.rssi),
snr=COALESCE(excluded.snr,traces.snr),
elapsed_ms=COALESCE(excluded.elapsed_ms,traces.elapsed_ms)
SQL
trace_id = trace_identifier || db.last_insert_row_id
return unless trace_id
db.execute("DELETE FROM trace_hops WHERE trace_id = ?", [trace_id])
hops.each_with_index do |hop_id, index|
db.execute(
"INSERT INTO trace_hops(trace_id, hop_index, node_id) VALUES(?,?,?)",
[trace_id, index, hop_id],
)
end
end
end
def insert_message(db, message)
return unless message.is_a?(Hash)
+12 -3
View File
@@ -81,10 +81,10 @@ module PotatoMesh
return false unless File.exist?(PotatoMesh::Config.db_path)
db = open_database(readonly: true)
required = %w[nodes messages positions telemetry neighbors instances]
required = %w[nodes messages positions telemetry neighbors instances traces trace_hops]
tables =
db.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name IN ('nodes','messages','positions','telemetry','neighbors','instances')",
"SELECT name FROM sqlite_master WHERE type='table' AND name IN ('nodes','messages','positions','telemetry','neighbors','instances','traces','trace_hops')",
).flatten
(required - tables).empty?
rescue SQLite3::Exception
@@ -99,7 +99,7 @@ module PotatoMesh
def init_db
FileUtils.mkdir_p(File.dirname(PotatoMesh::Config.db_path))
db = open_database
%w[nodes messages positions telemetry neighbors instances].each do |schema|
%w[nodes messages positions telemetry neighbors instances traces].each do |schema|
sql_file = File.expand_path("../../../../data/#{schema}.sql", __dir__)
db.execute_batch(File.read(sql_file))
end
@@ -178,6 +178,15 @@ module PotatoMesh
db.execute("ALTER TABLE telemetry ADD COLUMN #{name} #{type}")
telemetry_columns << name
end
trace_tables =
db.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name IN ('traces','trace_hops')",
).flatten
unless trace_tables.include?("traces") && trace_tables.include?("trace_hops")
traces_schema = File.expand_path("../../../../data/traces.sql", __dir__)
db.execute_batch(File.read(traces_schema))
end
rescue SQLite3::SQLException, Errno::ENOENT => e
warn_log(
"Failed to apply schema upgrade",
+74 -1
View File
@@ -110,7 +110,7 @@ module PotatoMesh
cleaned_strings = string_values.compact.map(&:to_s).map(&:strip).reject(&:empty?).uniq
cleaned_numbers = numeric_values.compact.map do |value|
begin
Integer(value, 10)
value.is_a?(String) ? Integer(value, 10) : Integer(value)
rescue ArgumentError, TypeError
nil
end
@@ -464,6 +464,79 @@ module PotatoMesh
ensure
db&.close
end
def query_traces(limit, node_ref: nil)
limit = coerce_query_limit(limit)
db = open_database(readonly: true)
db.results_as_hash = true
params = []
where_clauses = []
if node_ref
tokens = node_reference_tokens(node_ref)
numeric_values = tokens[:numeric_values]
if numeric_values.empty?
return []
end
placeholders = Array.new(numeric_values.length, "?").join(", ")
candidate_clauses = []
candidate_clauses << "src IN (#{placeholders})"
candidate_clauses << "dest IN (#{placeholders})"
candidate_clauses << "id IN (SELECT trace_id FROM trace_hops WHERE node_id IN (#{placeholders}))"
where_clauses << "(#{candidate_clauses.join(" OR ")})"
3.times { params.concat(numeric_values) }
end
sql = <<~SQL
SELECT id, request_id, src, dest, rx_time, rx_iso, rssi, snr, elapsed_ms
FROM traces
SQL
sql += " WHERE #{where_clauses.join(" AND ")}\n" if where_clauses.any?
sql += <<~SQL
ORDER BY rx_time DESC
LIMIT ?
SQL
params << limit
rows = db.execute(sql, params)
trace_ids = rows.map { |row| coerce_integer(row["id"]) }.compact
hops_by_trace = Hash.new { |hash, key| hash[key] = [] }
unless trace_ids.empty?
placeholders = Array.new(trace_ids.length, "?").join(", ")
hop_rows =
db.execute(
"SELECT trace_id, hop_index, node_id FROM trace_hops WHERE trace_id IN (#{placeholders}) ORDER BY trace_id, hop_index",
trace_ids,
)
hop_rows.each do |hop|
trace_id = coerce_integer(hop["trace_id"])
node_id = coerce_integer(hop["node_id"])
next unless trace_id && node_id
hops_by_trace[trace_id] << node_id
end
end
rows.each do |r|
rx_time = coerce_integer(r["rx_time"])
r["rx_time"] = rx_time if rx_time
r["rx_iso"] = Time.at(rx_time).utc.iso8601 if rx_time && string_or_nil(r["rx_iso"]).nil?
r["request_id"] = coerce_integer(r["request_id"])
r["src"] = coerce_integer(r["src"])
r["dest"] = coerce_integer(r["dest"])
r["rssi"] = coerce_integer(r["rssi"])
r["snr"] = coerce_float(r["snr"])
r["elapsed_ms"] = coerce_integer(r["elapsed_ms"])
trace_id = coerce_integer(r["id"])
if trace_id && hops_by_trace.key?(trace_id)
r["hops"] = hops_by_trace[trace_id]
end
end
rows.map { |row| compact_api_row(row) }
ensure
db&.close
end
end
end
end
@@ -135,6 +135,20 @@ module PotatoMesh
query_telemetry(limit, node_ref: node_ref).to_json
end
app.get "/api/traces" do
content_type :json
limit = [params["limit"]&.to_i || 200, 1000].min
query_traces(limit).to_json
end
app.get "/api/traces/:id" do
content_type :json
node_ref = string_or_nil(params["id"])
halt 400, { error: "missing node id" }.to_json unless node_ref
limit = [params["limit"]&.to_i || 200, 1000].min
query_traces(limit, node_ref: node_ref).to_json
end
app.get "/api/instances" do
# Prevent the federation catalog from being exposed when federation is disabled.
halt 404 unless federation_enabled?
@@ -321,6 +321,25 @@ module PotatoMesh
ensure
db&.close
end
app.post "/api/traces" do
require_token!
content_type :json
begin
data = JSON.parse(read_json_body)
rescue JSON::ParserError
halt 400, { error: "invalid JSON" }.to_json
end
trace_packets = data.is_a?(Array) ? data : [data]
halt 400, { error: "too many traces" }.to_json if trace_packets.size > 1000
db = open_database
trace_packets.each do |packet|
insert_trace(db, packet)
end
{ status: "ok" }.to_json
ensure
db&.close
end
end
end
end
+1 -1
View File
@@ -175,7 +175,7 @@ module PotatoMesh
#
# @return [String] semantic version identifier.
def version_fallback
"0.5.5"
"0.5.6"
end
# Default refresh interval for frontend polling routines.
+2 -2
View File
@@ -1,12 +1,12 @@
{
"name": "potato-mesh",
"version": "0.5.5",
"version": "0.5.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "potato-mesh",
"version": "0.5.5",
"version": "0.5.6",
"devDependencies": {
"istanbul-lib-coverage": "^3.2.2",
"istanbul-lib-report": "^3.0.1",
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "potato-mesh",
"version": "0.5.5",
"version": "0.5.6",
"type": "module",
"private": true,
"scripts": {
@@ -108,7 +108,7 @@ test('buildChatTabModel returns sorted nodes and channel buckets', () => {
const secondaryChannel = channelByLabel.BerlinMesh;
assert.equal(secondaryChannel.index, 1);
assert.equal(secondaryChannel.id, 'channel-1');
assert.equal(secondaryChannel.id, 'channel-secondary-berlinmesh');
assert.equal(secondaryChannel.entries.length, 2);
assert.deepEqual(secondaryChannel.entries.map(entry => entry.message.id), ['iso-ts', 'recent-alt']);
});
@@ -249,3 +249,80 @@ test('buildChatTabModel ignores plaintext log-only entries', () => {
assert.equal(encryptedEntries.length, 1);
assert.equal(encryptedEntries[0]?.message?.id, 'enc');
});
test('buildChatTabModel merges secondary channels with matching labels regardless of index', () => {
const primaryId = 'primary';
const secondaryFirstId = 'secondary-one';
const secondarySecondId = 'secondary-two';
const label = 'MeshTown';
const model = buildChatTabModel({
nodes: [],
messages: [
{ id: secondaryFirstId, rx_time: NOW - 12, channel: 7, channel_name: label },
{ id: primaryId, rx_time: NOW - 10, channel: 0, channel_name: label },
{ id: secondarySecondId, rx_time: NOW - 8, channel: 3, channel_name: ` ${label} ` }
],
nowSeconds: NOW,
windowSeconds: WINDOW
});
const meshChannels = model.channels.filter(channel => channel.label === label);
assert.equal(meshChannels.length, 2);
const primaryChannel = meshChannels.find(channel => channel.index === 0);
assert.ok(primaryChannel);
assert.equal(primaryChannel.entries.length, 1);
assert.equal(primaryChannel.entries[0]?.message?.id, primaryId);
const secondaryChannel = meshChannels.find(channel => channel.index > 0);
assert.ok(secondaryChannel);
assert.equal(secondaryChannel.id, 'channel-secondary-meshtown');
assert.equal(secondaryChannel.index, 3);
assert.deepEqual(secondaryChannel.entries.map(entry => entry.message.id), [secondaryFirstId, secondarySecondId]);
});
test('buildChatTabModel rekeys unnamed secondary buckets when a label later arrives', () => {
const unnamedId = 'unnamed';
const namedId = 'named';
const label = 'SideMesh';
const index = 4;
const model = buildChatTabModel({
nodes: [],
messages: [
{ id: unnamedId, rx_time: NOW - 15, channel: index },
{ id: namedId, rx_time: NOW - 10, channel: index, channel_name: label }
],
nowSeconds: NOW,
windowSeconds: WINDOW
});
const secondaryChannels = model.channels.filter(channel => channel.index === index);
assert.equal(secondaryChannels.length, 1);
const [secondaryChannel] = secondaryChannels;
assert.equal(secondaryChannel.id, 'channel-secondary-sidemesh');
assert.equal(secondaryChannel.label, label);
assert.deepEqual(secondaryChannel.entries.map(entry => entry.message.id), [unnamedId, namedId]);
});
test('buildChatTabModel merges unlabeled secondary messages into existing named buckets by index', () => {
const namedId = 'named';
const unlabeledId = 'unlabeled';
const label = 'MeshNorth';
const index = 5;
const model = buildChatTabModel({
nodes: [],
messages: [
{ id: namedId, rx_time: NOW - 12, channel: index, channel_name: label },
{ id: unlabeledId, rx_time: NOW - 8, channel: index }
],
nowSeconds: NOW,
windowSeconds: WINDOW
});
const secondaryChannels = model.channels.filter(channel => channel.index === index);
assert.equal(secondaryChannels.length, 1);
const [secondaryChannel] = secondaryChannels;
assert.equal(secondaryChannel.id, 'channel-secondary-meshnorth');
assert.equal(secondaryChannel.label, label);
assert.deepEqual(secondaryChannel.entries.map(entry => entry.message.id), [namedId, unlabeledId]);
});
@@ -45,10 +45,15 @@ const {
renderSingleNodeTable,
renderTelemetryCharts,
renderMessages,
renderTraceroutes,
renderTracePath,
extractTracePath,
normalizeTraceNodeRef,
renderNodeDetailHtml,
parseReferencePayload,
resolveRenderShortHtml,
fetchMessages,
fetchTracesForNode,
} = __testUtils;
test('format helpers normalise values as expected', () => {
@@ -340,6 +345,9 @@ test('renderNodeDetailHtml composes the table, neighbors, and messages', () => {
{ node_id: '!abcd', neighbor_id: '!ally', neighbor_short_name: 'ALLY', snr: 5.1 },
],
messages: [{ text: 'Hello', rx_time: 1_700_000_111 }],
traces: [
{ src: '!abcd', hops: ['!beef'], dest: '!ally' },
],
renderShortHtml: (short, role) => `<span class="short-name" data-role="${role}">${short}</span>`,
},
);
@@ -351,6 +359,8 @@ test('renderNodeDetailHtml composes the table, neighbors, and messages', () => {
assert.match(html, /<a class="node-long-link" href="\/nodes\/!abcd" data-node-detail-link="true" data-node-id="!abcd">Example Node<\/a>/);
assert.equal(html.includes('PEER'), true);
assert.equal(html.includes('ALLY'), true);
assert.equal(html.includes('Traceroutes'), true);
assert.match(html, /&rarr;/);
assert.match(html, /\[\d{4}-\d{2}-\d{2} \d{2}:\d{2}\]\[/);
assert.equal(html.includes('data-role="CLIENT"'), true);
});
@@ -389,16 +399,31 @@ test('renderNodeDetailHtml embeds telemetry charts when snapshots are present',
test('fetchNodeDetailHtml renders the node layout for overlays', async () => {
const reference = { nodeId: '!alpha' };
let fetchCalls = 0;
const calledUrls = [];
const fetchImpl = async url => {
fetchCalls += 1;
assert.match(url, /\/api\/messages\/!alpha/);
calledUrls.push(url);
if (url.startsWith('/api/messages/')) {
return {
ok: true,
status: 200,
async json() {
return [{ text: 'Overlay hello', rx_time: 1_700_000_000 }];
},
};
}
if (url.startsWith('/api/traces/')) {
return {
ok: true,
status: 200,
async json() {
return [{ src: '!alpha', dest: '!bravo', hops: [] }];
},
};
}
return {
ok: true,
status: 200,
async json() {
return [{ text: 'Overlay hello', rx_time: 1_700_000_000 }];
},
ok: false,
status: 404,
async json() { return []; },
};
};
const refreshImpl = async () => ({
@@ -415,9 +440,11 @@ test('fetchNodeDetailHtml renders the node layout for overlays', async () => {
fetchImpl,
renderShortHtml: short => `<span class="short-name">${short}</span>`,
});
assert.equal(fetchCalls, 1);
assert.equal(calledUrls.some(url => url.includes('/api/messages/!alpha')), true);
assert.equal(calledUrls.some(url => url.includes('/api/traces/!alpha')), true);
assert.equal(html.includes('Example Alpha'), true);
assert.equal(html.includes('Overlay hello'), true);
assert.equal(html.includes('Traceroutes'), true);
assert.equal(html.includes('node-detail__table'), true);
});
@@ -477,6 +504,97 @@ test('fetchMessages returns an empty list when the endpoint is missing', async (
assert.deepEqual(messages, []);
});
test('normalizeTraceNodeRef canonicalizes references and renderTracePath builds arrowed output', () => {
const ref = normalizeTraceNodeRef(1234);
assert.deepEqual(ref, { identifier: '!000004d2', numericId: 1234 });
const roleIndex = {
byId: new Map([['!000004d2', 'CLIENT']]),
byNum: new Map(),
detailsById: new Map([['!000004d2', { shortName: 'NODE', role: 'ROUTER' }]]),
detailsByNum: new Map(),
};
const path = extractTracePath({ src: 1234, hops: [0xbeef], dest: '!ally' });
const html = renderTracePath(path, (short, role) => `<span data-role="${role}">${short}</span>`, {
roleIndex,
node: { nodeId: '!000004d2', shortName: 'NODE', role: 'ROUTER' },
});
assert.notEqual(html, '');
assert.match(html, /data-role="ROUTER"/);
assert.match(html, /&rarr;/);
});
test('renderTraceroutes lists traceroute paths with badges', () => {
const traces = [
{ src: '!one', hops: ['!two'], dest: '!three' },
];
const html = renderTraceroutes(traces, short => `<span class="short-name">${short}</span>`, {
roleIndex: null,
});
assert.equal(html.includes('Traceroutes'), true);
assert.equal(html.includes('short-name'), true);
});
test('renderTraceroutes skips empty or single-hop paths and renderTracePath uses node metadata', () => {
const pathHtml = renderTracePath([{ identifier: '!self', numericId: 1 }], short => `<b>${short}</b>`, {
roleIndex: null,
node: { nodeId: '!self', shortName: 'SELF', role: 'ROUTER' },
});
assert.equal(pathHtml, '');
const html = renderTraceroutes(
[{ src: '!self', hops: [], dest: '!peer' }],
(short, role) => `<span data-role="${role}">${short}</span>`,
{
roleIndex: {
detailsById: new Map([['!self', { shortName: 'SELF', role: 'CLIENT' }]]),
detailsByNum: new Map(),
byId: new Map([['!peer', 'ROUTER']]),
byNum: new Map(),
},
node: { nodeId: '!self', shortName: 'SELF', role: 'ADMIN' },
},
);
assert.equal(html.includes('Traceroutes'), true);
assert.match(html, /data-role="ADMIN"/);
});
test('renderTrace helpers normalise references and short-circuit when traces are empty', () => {
assert.deepEqual(normalizeTraceNodeRef('!abcd'), { identifier: '!abcd', numericId: null });
assert.equal(extractTracePath(null).length, 0);
const html = renderTraceroutes([], () => '', { roleIndex: null });
assert.equal(html, '');
});
test('fetchTracesForNode requests traceroutes for the node', async () => {
const calls = [];
const fetchImpl = async (url, options) => {
calls.push({ url, options });
return {
status: 200,
ok: true,
json: async () => [{ src: '!abc', dest: '!def', hops: [] }],
};
};
const traces = await fetchTracesForNode('!abc', { fetchImpl });
assert.equal(traces.length, 1);
assert.equal(calls[0].url.includes('/api/traces/!abc'), true);
assert.equal(calls[0].options.cache, 'no-store');
});
test('fetchTracesForNode returns empty when identifier is missing', async () => {
const traces = await fetchTracesForNode(null, { fetchImpl: () => { throw new Error('should not run'); } });
assert.deepEqual(traces, []);
});
test('fetchTracesForNode throws on HTTP error', async () => {
await assert.rejects(
() => fetchTracesForNode('!err', {
fetchImpl: async () => ({ status: 500, ok: false, json: async () => ({}) }),
}),
/Failed to load traceroutes/,
);
});
test('initializeNodeDetailPage hydrates the container with node data', async () => {
const element = {
dataset: {
@@ -0,0 +1,123 @@
/*
* Copyright © 2025-26 l5yth & contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import test from 'node:test';
import assert from 'node:assert/strict';
import { buildTraceSegments, __testUtils } from '../trace-paths.js';
const { coerceFiniteNumber, findNode, resolveNodeCoordinates } = __testUtils;
const { buildNodeIndex } = __testUtils;
test('buildTraceSegments connects source, hops, and destination when coordinates exist', () => {
const traces = [
{ id: 9_001, src: 2658361180, hops: [19_088_743], dest: 4_242_424_242, rx_time: 1700 },
];
const nodes = [
{ node_id: '2658361180', latitude: 10.5, longitude: -71.2, role: 'ROUTER' },
{ node_id: '19088743', latitude: 11.1, longitude: -70.9, role: 'CLIENT' },
{ node_id: '4242424242', latitude: 12.3, longitude: -70.2, role: 'CLIENT_HIDDEN' },
];
const segments = buildTraceSegments(traces, nodes, {
colorForNode: node => `color:${node.role}`,
limitDistance: false,
});
assert.equal(segments.length, 2);
assert.deepEqual(segments[0].latlngs, [[10.5, -71.2], [11.1, -70.9]]);
assert.deepEqual(segments[1].latlngs, [[11.1, -70.9], [12.3, -70.2]]);
assert.equal(segments[0].color, 'color:ROUTER');
assert.equal(segments[1].color, 'color:CLIENT');
assert.equal(segments[0].rxTime, 1700);
});
test('buildTraceSegments drops paths through hops without locations', () => {
const traces = [
{ id: 9_002, src: 101, hops: [202], dest: 303 },
];
const nodes = [
{ node_id: '101', latitude: 1, longitude: 2, role: 'CLIENT' },
{ node_id: '202' },
{ node_id: '303', latitude: 3, longitude: 4, role: 'CLIENT' },
];
const segments = buildTraceSegments(traces, nodes);
assert.equal(segments.length, 0);
});
test('buildTraceSegments respects distance limits when evaluating coordinates', () => {
const traces = [
{ id: 9_003, src: 1, dest: 2 },
];
const nodes = [
{ node_id: '1', latitude: 0, longitude: 0, distance_km: 51 },
{ node_id: '2', latitude: 1, longitude: 1, distance_km: 3 },
];
const segments = buildTraceSegments(traces, nodes, { limitDistance: true, maxDistanceKm: 50 });
assert.equal(segments.length, 0);
});
test('buildTraceSegments skips invalid inputs and uses numeric lookup fallbacks', () => {
const traces = [
{ id: 9_004, src: '1001', hops: [], dest: '1002' },
];
const nodes = [
{ node_id: ' ', node_num: '1001', latitude: 0, longitude: 0 },
{ node_id: '1002', latitude: 0, longitude: 1 },
];
const segments = buildTraceSegments(traces, nodes, {
limitDistance: false,
maxDistanceKm: null,
colorForNode: () => '#123456',
});
assert.equal(segments.length, 1);
assert.deepEqual(segments[0].latlngs, [[0, 0], [0, 1]]);
assert.equal(segments[0].color, '#123456');
});
test('helper utilities coerce values and locate nodes', () => {
assert.equal(coerceFiniteNumber(null), null);
assert.equal(coerceFiniteNumber(' '), null);
assert.equal(coerceFiniteNumber('7'), 7);
const byId = new Map([['!id', { node_id: '!id', latitude: 1, longitude: 2 }]]);
const byNum = new Map([[99, { node_id: '!other', latitude: 0, longitude: 0 }]]);
assert.equal(findNode(byId, byNum, '!id').node_id, '!id');
assert.equal(findNode(byId, byNum, 99).node_id, '!other');
assert.equal(findNode(byId, byNum, 100), null);
const coords = resolveNodeCoordinates({ latitude: 5, longitude: 6, distance_km: 10 }, { limitDistance: true, maxDistanceKm: 15 });
assert.deepEqual(coords, [5, 6]);
const outOfRange = resolveNodeCoordinates({ latitude: 0, longitude: 0, distance_km: 20 }, { limitDistance: true, maxDistanceKm: 15 });
assert.equal(outOfRange, null);
});
test('buildNodeIndex tolerates non-array inputs and buildTraceSegments short-circuits', () => {
const index = buildNodeIndex(null);
assert.ok(index.byId instanceof Map);
assert.ok(index.byNum instanceof Map);
assert.equal(index.byId.size, 0);
assert.equal(index.byNum.size, 0);
const segments = buildTraceSegments(null, null);
assert.deepEqual(segments, []);
});
+60 -5
View File
@@ -189,8 +189,28 @@ export function buildChatTabModel({
modemPreset,
envFallbackLabel: primaryChannelEnvLabel
});
const bucketKey = buildChannelBucketKey(safeIndex, safeIndex === 0 && labelInfo.label !== '0' ? labelInfo.label : null);
const nameBucketKey = safeIndex > 0 ? buildSecondaryNameBucketKey(labelInfo) : null;
const primaryBucketKey = safeIndex === 0 && labelInfo.label !== '0' ? buildPrimaryBucketKey(labelInfo.label) : '0';
let bucketKey = safeIndex === 0 ? primaryBucketKey : nameBucketKey ?? String(safeIndex);
let bucket = channelBuckets.get(bucketKey);
if (!bucket && safeIndex > 0) {
const existingBucketKey = findExistingBucketKeyByIndex(channelBuckets, safeIndex);
if (existingBucketKey) {
bucketKey = existingBucketKey;
bucket = channelBuckets.get(existingBucketKey);
}
}
if (bucket && nameBucketKey && bucket.key !== nameBucketKey) {
channelBuckets.delete(bucket.key);
bucket.key = nameBucketKey;
bucket.id = buildChannelTabId(nameBucketKey);
channelBuckets.set(nameBucketKey, bucket);
bucketKey = nameBucketKey;
}
if (!bucket) {
bucket = {
key: bucketKey,
@@ -208,6 +228,9 @@ export function buildChatTabModel({
bucket.label = labelInfo.label;
bucket.labelPriority = labelInfo.priority;
}
if (Number.isFinite(safeIndex)) {
bucket.index = Math.min(bucket.index ?? safeIndex, safeIndex);
}
}
bucket.entries.push({ ts, message });
@@ -425,15 +448,47 @@ export function normaliseChannelName(value) {
return null;
}
function buildChannelBucketKey(index, primaryChannelLabel) {
const safeIndex = Number.isFinite(index) ? Math.max(0, Math.trunc(index)) : 0;
if (safeIndex === 0 && primaryChannelLabel) {
function buildPrimaryBucketKey(primaryChannelLabel) {
if (primaryChannelLabel) {
const trimmed = primaryChannelLabel.trim();
if (trimmed.length > 0 && trimmed !== '0') {
return `0::${trimmed.toLowerCase()}`;
}
}
return String(safeIndex);
return '0';
}
function buildSecondaryNameBucketKey(labelInfo) {
const label = labelInfo?.label ?? null;
const priority = labelInfo?.priority ?? CHANNEL_LABEL_PRIORITY.INDEX;
if (priority !== CHANNEL_LABEL_PRIORITY.NAME || !label) {
return null;
}
const trimmedLabel = label.trim().toLowerCase();
if (!trimmedLabel.length) {
return null;
}
return `secondary::${trimmedLabel}`;
}
function findExistingBucketKeyByIndex(channelBuckets, targetIndex) {
if (!channelBuckets || !Number.isFinite(targetIndex) || targetIndex <= 0) {
return null;
}
const normalizedTarget = Math.trunc(targetIndex);
for (const [key, bucket] of channelBuckets.entries()) {
if (!bucket || !Number.isFinite(bucket.index)) {
continue;
}
if (Math.trunc(bucket.index) !== normalizedTarget) {
continue;
}
if (bucket.index === 0) {
continue;
}
return key;
}
return null;
}
function buildChannelTabId(bucketKey) {
+61 -3
View File
@@ -56,6 +56,7 @@ import {
aggregateTelemetrySnapshots,
} from './snapshot-aggregator.js';
import { normalizeNodeCollection } from './node-snapshot-normalizer.js';
import { buildTraceSegments } from './trace-paths.js';
/**
* Entry point for the interactive dashboard. Wires up event listeners,
@@ -148,6 +149,8 @@ export function initializeApp(config) {
/** @type {Array<Object>} */
let allNeighbors = [];
/** @type {Array<Object>} */
let allTraces = [];
/** @type {Array<Object>} */
let allMessages = [];
/** @type {Array<Object>} */
let allEncryptedMessages = [];
@@ -156,8 +159,8 @@ export function initializeApp(config) {
/** @type {Array<Object>} */
let allPositionEntries = [];
/** @type {Map<string, Object>} */
let nodesById = new Map();
let messagesById = new Map();
let nodesById = new Map();
let messagesById = new Map();
let nodesByNum = new Map();
const messageNodeHydrator = createMessageNodeHydrator({
fetchNodeById,
@@ -165,6 +168,7 @@ let messagesById = new Map();
logger: console,
});
const NODE_LIMIT = 1000;
const TRACE_LIMIT = 200;
const SNAPSHOT_LIMIT = SNAPSHOT_WINDOW;
const CHAT_LIMIT = MESSAGE_LIMIT;
const CHAT_RECENT_WINDOW_SECONDS = 7 * 24 * 60 * 60;
@@ -3314,6 +3318,20 @@ let messagesById = new Map();
return r.json();
}
/**
* Fetch traceroute observations from the JSON API.
*
* @param {number} [limit=TRACE_LIMIT] Maximum number of records.
* @returns {Promise<Array<Object>>} Parsed trace payloads.
*/
async function fetchTraces(limit = TRACE_LIMIT) {
const safeLimit = Number.isFinite(limit) && limit > 0 ? Math.floor(limit) : TRACE_LIMIT;
const effectiveLimit = Math.min(safeLimit, NODE_LIMIT);
const r = await fetch(`/api/traces?limit=${effectiveLimit}`, { cache: 'no-store' });
if (!r.ok) throw new Error('HTTP ' + r.status);
return r.json();
}
/**
* Fetch telemetry entries from the JSON API.
*
@@ -3654,6 +3672,13 @@ let messagesById = new Map();
if (typeof nodeId !== 'string' || nodeId.length === 0) continue;
nodesById.set(nodeId, node);
}
const traceSegments = neighborLinesLayer
? buildTraceSegments(allTraces, nodes, {
limitDistance: LIMIT_DISTANCE,
maxDistanceKm: MAX_DISTANCE_KM,
colorForNode: node => getRoleColor(node.role)
})
: [];
if (neighborLinesLayer && Array.isArray(allNeighbors) && allNeighbors.length) {
const neighborSegments = [];
@@ -3767,6 +3792,25 @@ let messagesById = new Map();
});
}
if (neighborLinesLayer && traceSegments.length) {
traceSegments
.sort((a, b) => {
const rxA = Number.isFinite(a.rxTime) ? a.rxTime : -Infinity;
const rxB = Number.isFinite(b.rxTime) ? b.rxTime : -Infinity;
if (rxA === rxB) return 0;
return rxA - rxB;
})
.forEach(segment => {
L.polyline(segment.latlngs, {
color: segment.color,
weight: 2,
opacity: 0.42,
dashArray: '6 6',
className: 'neighbor-connection-line trace-connection-line'
}).addTo(neighborLinesLayer);
});
}
const nodesByRenderOrder = nodes
.map((node, index) => ({ node, index }))
.sort((a, b) => {
@@ -3961,14 +4005,27 @@ let messagesById = new Map();
console.warn('position refresh failed; continuing without updates', err);
return [];
});
const tracesPromise = fetchTraces().catch(err => {
console.warn('trace refresh failed; continuing without traceroutes', err);
return [];
});
const encryptedMessagesPromise = fetchMessages(MESSAGE_LIMIT, { encrypted: true }).catch(err => {
console.warn('encrypted message refresh failed; continuing without encrypted entries', err);
return [];
});
const [nodes, positions, neighborTuples, messages, telemetryEntries, encryptedMessages] = await Promise.all([
const [
nodes,
positions,
neighborTuples,
traceEntries,
messages,
telemetryEntries,
encryptedMessages
] = await Promise.all([
fetchNodes(),
positionsPromise,
neighborPromise,
tracesPromise,
fetchMessages(MESSAGE_LIMIT),
telemetryPromise,
encryptedMessagesPromise
@@ -3993,6 +4050,7 @@ let messagesById = new Map();
allTelemetryEntries = aggregatedTelemetry;
allPositionEntries = aggregatedPositions;
allNeighbors = aggregatedNeighbors;
allTraces = Array.isArray(traceEntries) ? traceEntries : [];
applyFilter();
if (statusEl) {
statusEl.textContent = 'updated ' + new Date().toLocaleTimeString();
+173 -8
View File
@@ -39,6 +39,7 @@ const HOUR_MS = 3_600_000;
const TELEMETRY_WINDOW_MS = DAY_MS * 7;
const DEFAULT_CHART_DIMENSIONS = Object.freeze({ width: 660, height: 360 });
const DEFAULT_CHART_MARGIN = Object.freeze({ top: 28, right: 80, bottom: 64, left: 80 });
const TRACE_LIMIT = 200;
/**
* Telemetry chart definitions describing axes and series metadata.
*
@@ -1997,20 +1998,145 @@ function renderMessages(messages, renderShortHtml, node) {
return `<ul class="node-detail__list">${items.join('')}</ul>`;
}
/**
* Normalise a trace node reference into identifier and numeric forms.
*
* @param {*} value Raw trace endpoint/hop reference.
* @returns {{ identifier: (string|null), numericId: (number|null) }|null} Normalised reference.
*/
function normalizeTraceNodeRef(value) {
const numericId = numberOrNull(value);
const identifier = (() => {
const stringId = stringOrNull(value);
if (numericId != null) {
const hex = (numericId >>> 0).toString(16).padStart(8, '0');
return `!${hex}`;
}
return stringId;
})();
if (identifier == null && numericId == null) {
return null;
}
return { identifier, numericId };
}
/**
* Extract an ordered trace path containing the source, hops, and destination.
*
* @param {Object} trace Trace payload.
* @returns {Array<{identifier: (string|null), numericId: (number|null)}>} Normalised path entries.
*/
function extractTracePath(trace) {
if (!trace || typeof trace !== 'object') return [];
const path = [];
const append = ref => {
const normalized = normalizeTraceNodeRef(ref);
if (!normalized) return;
path.push(normalized);
};
append(trace.src ?? trace.source ?? trace.from);
const hops = Array.isArray(trace.hops) ? trace.hops : [];
hops.forEach(append);
append(trace.dest ?? trace.destination ?? trace.to);
return path;
}
/**
* Render a trace path using short-name badges.
*
* @param {Array<{identifier: (string|null), numericId: (number|null)}>} path Ordered path references.
* @param {Function} renderShortHtml Badge rendering function.
* @param {{ roleIndex?: Object|null, node?: Object|null }} options Rendering helpers.
* @returns {string} HTML fragment for the trace or ``''`` when unsuitable.
*/
function renderTracePath(path, renderShortHtml, { roleIndex = null, node = null } = {}) {
if (!Array.isArray(path) || path.length < 2 || typeof renderShortHtml !== 'function') {
return '';
}
const nodeIdNormalized = normalizeNodeId(node?.nodeId ?? node?.node_id);
const nodeNumNormalized = numberOrNull(node?.nodeNum ?? node?.node_num ?? node?.num);
const renderBadge = ref => {
const identifier = ref?.identifier ?? null;
const numericId = ref?.numericId ?? null;
const normalizedId = normalizeNodeId(identifier);
const matchesNode =
(normalizedId && nodeIdNormalized && normalizedId === nodeIdNormalized) ||
(numericId != null && nodeNumNormalized != null && numericId === nodeNumNormalized);
let details = lookupNeighborDetails(roleIndex, { identifier, numericId }) ?? undefined;
if (matchesNode && node) {
details = {
...(details || {}),
role: node.role ?? details?.role ?? 'CLIENT',
shortName: node.shortName ?? node.short_name ?? details?.shortName ?? null,
longName: node.longName ?? node.long_name ?? details?.longName ?? null,
};
}
return renderRoleAwareBadge(renderShortHtml, {
shortName: details?.shortName ?? null,
longName: details?.longName ?? null,
role: details?.role ?? null,
identifier,
numericId,
source: details,
});
};
const items = path
.map(renderBadge)
.filter(fragment => stringOrNull(fragment));
if (items.length < 2) {
return '';
}
const arrow = '<span class="node-detail__trace-arrow" aria-hidden="true">&rarr;</span>';
return `<li class="node-detail__trace">${items.join(arrow)}</li>`;
}
/**
* Render all traceroutes associated with the node.
*
* @param {Array<Object>} traces Trace payloads.
* @param {Function} renderShortHtml Badge renderer.
* @param {{ roleIndex?: Object|null, node?: Object|null }} options Rendering helpers.
* @returns {string} HTML fragment or ``''`` when absent.
*/
function renderTraceroutes(traces, renderShortHtml, { roleIndex = null, node = null } = {}) {
if (!Array.isArray(traces) || traces.length === 0 || typeof renderShortHtml !== 'function') {
return '';
}
const items = traces
.map(trace => renderTracePath(extractTracePath(trace), renderShortHtml, { roleIndex, node }))
.filter(fragment => stringOrNull(fragment));
if (items.length === 0) {
return '';
}
return `
<section class="node-detail__section node-detail__traceroutes">
<h3>Traceroutes</h3>
<ul class="node-detail__trace-list">${items.join('')}</ul>
</section>
`;
}
/**
* Render the node detail layout to an HTML fragment.
*
* @param {Object} node Normalised node payload.
* @param {{
* neighbors?: Array<Object>,
* messages?: Array<Object>,
* renderShortHtml: Function,
* }} options Rendering options.
* neighbors?: Array<Object>,
* messages?: Array<Object>,
* traces?: Array<Object>,
* renderShortHtml: Function,
* }} options Rendering options.
* @returns {string} HTML fragment representing the detail view.
*/
function renderNodeDetailHtml(node, {
neighbors = [],
messages = [],
traces = [],
renderShortHtml,
neighborRoleIndex = null,
chartNowMs = Date.now(),
@@ -2028,12 +2154,16 @@ function renderNodeDetailHtml(node, {
const tableHtml = renderSingleNodeTable(node, renderShortHtml);
const chartsHtml = renderTelemetryCharts(node, { nowMs: chartNowMs });
const neighborsHtml = renderNeighborGroups(node, neighbors, renderShortHtml, { roleIndex: neighborRoleIndex });
const tracesHtml = renderTraceroutes(traces, renderShortHtml, { roleIndex: neighborRoleIndex, node });
const messagesHtml = renderMessages(messages, renderShortHtml, node);
const sections = [];
if (neighborsHtml) {
sections.push(neighborsHtml);
}
if (tracesHtml) {
sections.push(tracesHtml);
}
if (Array.isArray(messages) && messages.length > 0 && messagesHtml) {
sections.push(`<section class="node-detail__section"><h3>Messages</h3>${messagesHtml}</section>`);
}
@@ -2135,6 +2265,32 @@ async function fetchMessages(identifier, { fetchImpl, includeEncrypted = false,
return Array.isArray(payload) ? payload : [];
}
/**
* Fetch traceroute records for a node reference.
*
* @param {string|number} identifier Canonical node identifier or number.
* @param {{fetchImpl?: Function}} options Fetch options.
* @returns {Promise<Array<Object>>} Resolved trace collection.
*/
async function fetchTracesForNode(identifier, { fetchImpl } = {}) {
if (identifier == null) {
return [];
}
const fetchFn = typeof fetchImpl === 'function' ? fetchImpl : globalThis.fetch;
if (typeof fetchFn !== 'function') {
throw new TypeError('A fetch implementation is required to load traceroutes');
}
const encodedId = encodeURIComponent(String(identifier));
const url = `/api/traces/${encodedId}?limit=${TRACE_LIMIT}`;
const response = await fetchFn(url, DEFAULT_FETCH_OPTIONS);
if (response.status === 404) return [];
if (!response.ok) {
throw new Error(`Failed to load traceroutes (HTTP ${response.status})`);
}
const payload = await response.json();
return Array.isArray(payload) ? payload : [];
}
/**
* Initialise the node detail page by hydrating the DOM with fetched data.
*
@@ -2166,13 +2322,17 @@ export async function fetchNodeDetailHtml(referenceData, options = {}) {
normalized.nodeId ??
stringOrNull(node.nodeId ?? node.node_id) ??
(normalized.nodeNum != null ? normalized.nodeNum : null);
const messages = await fetchMessages(messageIdentifier, {
fetchImpl: options.fetchImpl,
privateMode: options.privateMode === true,
});
const [messages, traces] = await Promise.all([
fetchMessages(messageIdentifier, {
fetchImpl: options.fetchImpl,
privateMode: options.privateMode === true,
}),
fetchTracesForNode(messageIdentifier, { fetchImpl: options.fetchImpl }),
]);
return renderNodeDetailHtml(node, {
neighbors: node.neighbors,
messages,
traces,
renderShortHtml,
neighborRoleIndex,
});
@@ -2256,10 +2416,15 @@ export const __testUtils = {
renderSingleNodeTable,
renderTelemetryCharts,
renderMessages,
renderTraceroutes,
renderTracePath,
extractTracePath,
normalizeTraceNodeRef,
renderNodeDetailHtml,
parseReferencePayload,
resolveRenderShortHtml,
fetchMessages,
fetchTracesForNode,
fetchNodeDetailHtml,
normalizeNodeReference,
};
+196
View File
@@ -0,0 +1,196 @@
/*
* Copyright © 2025-26 l5yth & contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Convert raw numeric identifiers into finite numbers.
*
* @param {*} value Candidate numeric value.
* @returns {number|null} Finite number or ``null`` when invalid.
*/
function coerceFiniteNumber(value) {
if (value == null) return null;
if (typeof value === 'string' && value.trim().length === 0) return null;
const num = typeof value === 'number' ? value : Number(value);
return Number.isFinite(num) ? num : null;
}
/**
* Build lookup tables for locating nodes by identifier.
*
* @param {Array<Object>} nodes Node payloads.
* @returns {{ byId: Map<string, Object>, byNum: Map<number, Object> }} Lookup maps.
*/
function buildNodeIndex(nodes) {
const byId = new Map();
const byNum = new Map();
if (!Array.isArray(nodes)) {
return { byId, byNum };
}
for (const node of nodes) {
if (!node || typeof node !== 'object') continue;
const nodeIdRaw = typeof node.node_id === 'string'
? node.node_id
: (typeof node.nodeId === 'string' ? node.nodeId : null);
if (nodeIdRaw) {
const trimmed = nodeIdRaw.trim();
if (trimmed.length) {
byId.set(trimmed, node);
const numericFromId = coerceFiniteNumber(trimmed);
if (numericFromId != null && !byNum.has(numericFromId)) {
byNum.set(numericFromId, node);
}
}
}
const candidates = [node.num, node.node_num, node.nodeNum];
for (const candidate of candidates) {
const num = coerceFiniteNumber(candidate);
if (num == null || byNum.has(num)) continue;
byNum.set(num, node);
}
}
return { byId, byNum };
}
/**
* Locate a node by either string identifier or numeric reference.
*
* @param {Map<string, Object>} byId Lookup keyed by canonical identifier.
* @param {Map<number, Object>} byNum Lookup keyed by numeric identifier.
* @param {*} ref Raw reference number or string.
* @returns {Object|null} Node payload or ``null`` when absent.
*/
function findNode(byId, byNum, ref) {
const numeric = coerceFiniteNumber(ref);
const stringId = typeof ref === 'string' ? ref.trim() : null;
if (stringId && byId.has(stringId)) {
return byId.get(stringId) || null;
}
if (numeric != null) {
if (byNum.has(numeric)) return byNum.get(numeric) || null;
const asString = String(numeric);
if (byId.has(asString)) return byId.get(asString) || null;
}
return null;
}
/**
* Resolve a coordinate pair for a node when a valid location is present and
* optionally within the configured range.
*
* @param {Object} node Node payload.
* @param {{ limitDistance?: boolean, maxDistanceKm?: number }} options Distance filtering options.
* @returns {[number, number]|null} ``[lat, lon]`` tuple or ``null`` when unusable.
*/
function resolveNodeCoordinates(node, { limitDistance = false, maxDistanceKm = null } = {}) {
if (!node || typeof node !== 'object') return null;
const lat = coerceFiniteNumber(node.latitude ?? node.lat);
const lon = coerceFiniteNumber(node.longitude ?? node.lon);
if (lat == null || lon == null) return null;
const enforceDistance = Boolean(limitDistance) && Number.isFinite(maxDistanceKm);
if (enforceDistance) {
const distance = coerceFiniteNumber(node.distance_km ?? node.distanceKm);
if (distance != null && distance > maxDistanceKm) return null;
}
return [lat, lon];
}
/**
* Normalise a traceroute payload into a list of ordered node references.
*
* @param {Object} trace Trace payload.
* @returns {Array<number>} Ordered identifiers including source, hops, and destination.
*/
function extractTracePath(trace) {
if (!trace || typeof trace !== 'object') return [];
const path = [];
const source = coerceFiniteNumber(trace.src ?? trace.source ?? trace.from);
if (source != null) {
path.push(source);
}
const hops = Array.isArray(trace.hops) ? trace.hops : [];
for (const hop of hops) {
const hopId = coerceFiniteNumber(hop);
if (hopId != null) {
path.push(hopId);
}
}
const dest = coerceFiniteNumber(trace.dest ?? trace.destination ?? trace.to);
if (dest != null) {
path.push(dest);
}
return path;
}
/**
* Build drawable line segments for traceroute records using available node
* coordinates. Segments are only created when both endpoints have valid
* locations; missing hops break the chain rather than skipping ahead.
*
* @param {Array<Object>} traces Trace payloads fetched from the API.
* @param {Array<Object>} nodes Node payloads currently in view.
* @param {{
* limitDistance?: boolean,
* maxDistanceKm?: number,
* colorForNode?: (node: Object) => string
* }} [options] Rendering options.
* @returns {Array<Object>} Drawable segment descriptors.
*/
export function buildTraceSegments(traces, nodes, { limitDistance = false, maxDistanceKm = null, colorForNode } = {}) {
if (!Array.isArray(traces) || !Array.isArray(nodes) || nodes.length === 0) {
return [];
}
const { byId, byNum } = buildNodeIndex(nodes);
const segments = [];
const colorResolver = typeof colorForNode === 'function'
? colorForNode
: () => '#3388ff';
for (const trace of traces) {
const path = extractTracePath(trace);
if (path.length < 2) continue;
const rxTime = coerceFiniteNumber(trace.rx_time ?? trace.rxTime);
let previous = null;
for (const ref of path) {
const node = findNode(byId, byNum, ref);
const coords = resolveNodeCoordinates(node, { limitDistance, maxDistanceKm });
if (!node || !coords) {
previous = null;
continue;
}
if (previous) {
segments.push({
latlngs: [previous.coords, coords],
color: colorResolver(previous.node),
traceId: trace.id ?? trace.packet_id ?? trace.trace_id,
rxTime,
});
}
previous = { node, coords };
}
}
return segments;
}
export const __testUtils = {
coerceFiniteNumber,
buildNodeIndex,
findNode,
resolveNodeCoordinates,
extractTracePath,
};
+24
View File
@@ -117,6 +117,10 @@ tbody tr:nth-child(even) td {
cursor: pointer;
}
.trace-connection-line {
stroke-dasharray: 6 6;
}
.neighbor-snr {
margin-left: 4px;
color: var(--muted);
@@ -1108,6 +1112,26 @@ body.dark .node-detail__chart-grid-line {
font-size: 0.95rem;
}
.node-detail__trace-list {
list-style: none;
margin: 0;
padding: 0;
display: flex;
flex-direction: column;
gap: 6px;
}
.node-detail__trace {
display: flex;
align-items: center;
gap: 10px;
}
.node-detail__trace-arrow {
color: var(--muted);
font-size: 0.95rem;
}
.node-detail__section h3 {
margin: 0 0 10px;
font-size: 1.1rem;
+162
View File
@@ -96,6 +96,8 @@ RSpec.describe "Potato Mesh Sinatra app" do
def clear_database
with_db do |db|
db.execute("DELETE FROM instances")
db.execute("DELETE FROM trace_hops")
db.execute("DELETE FROM traces")
db.execute("DELETE FROM neighbors")
db.execute("DELETE FROM messages")
db.execute("DELETE FROM nodes")
@@ -270,6 +272,30 @@ RSpec.describe "Potato Mesh Sinatra app" do
let(:nodes_fixture) { JSON.parse(File.read(fixture_path("nodes.json"))) }
let(:messages_fixture) { JSON.parse(File.read(fixture_path("messages.json"))) }
let(:telemetry_fixture) { JSON.parse(File.read(fixture_path("telemetry.json"))) }
let(:trace_fixture) do
[
{
"id" => 9_001,
"request_id" => 17,
"src" => 2_658_361_180,
"dest" => 4_242_424_242,
"rx_time" => reference_time.to_i - 2,
"hops" => [2_658_361_180, 19_088_743, 4_242_424_242],
"rssi" => -83,
"snr" => 5.0,
"elapsed_ms" => 842,
},
{
"packet_id" => 9_002,
"req" => 21,
"from" => 19_088_743,
"destination" => 2_658_361_180,
"rx_time" => reference_time.to_i - 5,
"path" => [{ "node_id" => "0xbeadf00d" }, { "node_id" => 19_088_743 }],
"metrics" => { "snr" => 3.5, "latency_ms" => 1_020 },
},
]
end
let(:reference_time) do
latest = nodes_fixture.map { |node| node["last_heard"] }.compact.max
Time.at((latest || Time.now.to_i) + 1000)
@@ -3186,6 +3212,88 @@ RSpec.describe "Potato Mesh Sinatra app" do
end
end
describe "POST /api/traces" do
it "stores traces with hop paths and updates last heard timestamps" do
payload = trace_fixture
post "/api/traces", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
traces = db.execute("SELECT * FROM traces ORDER BY rx_time DESC")
expect(traces.size).to eq(payload.size)
primary = traces.find { |row| row["id"] == payload.first["id"] }
expect(primary["request_id"]).to eq(payload.first["request_id"])
expect(primary["src"]).to eq(payload.first["src"])
expect(primary["dest"]).to eq(payload.first["dest"])
expect(primary["rx_time"]).to eq(payload.first["rx_time"])
expect(primary["rx_iso"]).to eq(Time.at(payload.first["rx_time"]).utc.iso8601)
expect(primary["rssi"]).to eq(payload.first["rssi"])
expect(primary["snr"]).to eq(payload.first["snr"])
expect(primary["elapsed_ms"]).to eq(payload.first["elapsed_ms"])
primary_hops = db.execute(
"SELECT hop_index, node_id FROM trace_hops WHERE trace_id = ? ORDER BY hop_index",
[primary["id"]],
)
expect(primary_hops.map { |row| row["node_id"] }).to eq(payload.first["hops"])
secondary = traces.find { |row| row["id"] == payload.last["packet_id"] }
expect(secondary["request_id"]).to eq(payload.last["req"])
expect(secondary["src"]).to eq(payload.last["from"])
expect(secondary["dest"]).to eq(payload.last["destination"])
expect(secondary["rssi"]).to be_nil
expect(secondary["snr"]).to eq(payload.last.dig("metrics", "snr"))
expect(secondary["elapsed_ms"]).to eq(payload.last.dig("metrics", "latency_ms"))
secondary_hops = db.execute(
"SELECT hop_index, node_id FROM trace_hops WHERE trace_id = ? ORDER BY hop_index",
[secondary["id"]],
)
expect(secondary_hops.map { |row| row["node_id"] }).to eq([0xBEADF00D, 19_088_743])
node_ids = [
payload.first["src"],
payload.first["dest"],
payload.first["hops"][1],
0xBEADF00D,
].map { |num| format("!%08x", num & 0xFFFFFFFF) }
placeholders = node_ids.map { "?" }.join(",")
rows = db.execute("SELECT node_id, last_heard FROM nodes WHERE node_id IN (#{placeholders})", node_ids)
expect(rows.size).to eq(node_ids.size)
latest_last_heard = rows.map { |row| row["last_heard"] }.max
expect(latest_last_heard).to eq(payload.first["rx_time"])
end
end
it "returns 400 when the payload is not valid JSON" do
post "/api/traces", "{", auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "invalid JSON")
end
it "returns 400 when more than 1000 traces are provided" do
payload = Array.new(1001) { |i| { "id" => i + 1, "rx_time" => reference_time.to_i - i } }
post "/api/traces", payload.to_json, auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "too many traces")
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM traces")
expect(count).to eq(0)
end
end
end
it "returns 400 when more than 1000 messages are provided" do
payload = Array.new(1001) { |i| { "packet_id" => i + 1 } }
@@ -4242,6 +4350,60 @@ RSpec.describe "Potato Mesh Sinatra app" do
end
end
describe "GET /api/traces" do
it "returns stored traces ordered by receive time" do
clear_database
post "/api/traces", trace_fixture.to_json, auth_headers
expect(last_response).to be_ok
get "/api/traces"
expect(last_response).to be_ok
payload = JSON.parse(last_response.body)
expect(payload.length).to eq(trace_fixture.length)
expect(payload.map { |row| row["id"] }).to eq([trace_fixture.first["id"], trace_fixture.last["packet_id"]])
latest = payload.first
expect(latest["request_id"]).to eq(trace_fixture.first["request_id"])
expect(latest["src"]).to eq(trace_fixture.first["src"])
expect(latest["dest"]).to eq(trace_fixture.first["dest"])
expect(latest["hops"]).to eq(trace_fixture.first["hops"])
expect(latest["rx_iso"]).to eq(Time.at(trace_fixture.first["rx_time"]).utc.iso8601)
earlier = payload.last
expect(earlier["request_id"]).to eq(trace_fixture.last["req"])
expect(earlier["hops"]).to eq([0xBEADF00D, 19_088_743])
expect(earlier["elapsed_ms"]).to eq(trace_fixture.last.dig("metrics", "latency_ms"))
end
it "filters traces by node reference across sources" do
clear_database
post "/api/traces", trace_fixture.to_json, auth_headers
expect(last_response).to be_ok
get "/api/traces/#{trace_fixture.first["src"]}"
expect(last_response).to be_ok
filtered = JSON.parse(last_response.body)
expect(filtered.map { |row| row["id"] }).to include(trace_fixture.first["id"], trace_fixture.last["packet_id"])
get "/api/traces/!beadf00d"
expect(last_response).to be_ok
bead_filtered = JSON.parse(last_response.body)
expect(bead_filtered.map { |row| row["id"] }).to eq([trace_fixture.last["packet_id"]])
expect(bead_filtered.first["hops"]).to eq([0xBEADF00D, 19_088_743])
end
it "returns an empty list when no traces are stored" do
clear_database
get "/api/traces"
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq([])
end
end
describe "GET /nodes/:id" do
before do
import_nodes_fixture
+18
View File
@@ -165,4 +165,22 @@ RSpec.describe PotatoMesh::App::Database do
expect(telemetry_columns).to include("soil_temperature", "lux", "iaq")
expect(telemetry_columns).to include("rx_time", "battery_level")
end
it "creates trace tables when absent" do
SQLite3::Database.new(PotatoMesh::Config.db_path) do |db|
db.execute("CREATE TABLE nodes(node_id TEXT)")
db.execute("CREATE TABLE messages(id INTEGER PRIMARY KEY)")
end
expect(column_names_for("traces")).to be_empty
expect(column_names_for("trace_hops")).to be_empty
harness_class.ensure_schema_upgrades
traces_columns = column_names_for("traces")
expect(traces_columns).to include("request_id", "src", "dest", "rx_time", "rx_iso", "elapsed_ms")
hop_columns = column_names_for("trace_hops")
expect(hop_columns).to include("trace_id", "hop_index", "node_id")
end
end