This commit is contained in:
Jack Kingsman
2026-03-06 23:12:21 -08:00
parent 9c54ea623e
commit 3edc7d9bd1
17 changed files with 234 additions and 125 deletions

View File

@@ -95,7 +95,7 @@ def calculate_channel_hash(channel_key: bytes) -> str:
return format(hash_bytes[0], "02x")
def _decode_path_metadata(path_byte: int) -> tuple[int, int, int]:
def decode_path_metadata(path_byte: int) -> tuple[int, int, int]:
"""Decode the packed path byte into hop count and byte length."""
path_hash_size = (path_byte >> 6) + 1
path_length = path_byte & 0x3F
@@ -135,7 +135,7 @@ def extract_payload(raw_packet: bytes) -> bytes | None:
# Decode packed path metadata
if len(raw_packet) < offset + 1:
return None
path_length, _path_hash_size, path_byte_length = _decode_path_metadata(raw_packet[offset])
path_length, _path_hash_size, path_byte_length = decode_path_metadata(raw_packet[offset])
offset += 1
# Skip path data
@@ -171,7 +171,7 @@ def parse_packet(raw_packet: bytes) -> PacketInfo | None:
# Decode packed path metadata
if len(raw_packet) < offset + 1:
return None
path_length, path_hash_size, path_byte_length = _decode_path_metadata(raw_packet[offset])
path_length, path_hash_size, path_byte_length = decode_path_metadata(raw_packet[offset])
offset += 1
# Extract path data

View File

@@ -44,8 +44,13 @@ def _format_body(data: dict, *, include_path: bool) -> str:
via = ""
if include_path:
paths = data.get("paths")
if paths and isinstance(paths, list) and len(paths) > 0:
path_str = paths[0].get("path", "") if isinstance(paths[0], dict) else ""
first_path = (
paths[0]
if isinstance(paths, list) and len(paths) > 0 and isinstance(paths[0], dict)
else None
)
if first_path is not None:
path_str = first_path.get("path", "")
else:
path_str = None
@@ -56,7 +61,7 @@ def _format_body(data: dict, *, include_path: bool) -> str:
if path_str == "":
via = " **via:** [`direct`]"
else:
path_len = paths[0].get("path_len") if isinstance(paths[0], dict) else None
path_len = first_path.get("path_len") if first_path is not None else None
hop_chars = (
len(path_str) // path_len
if isinstance(path_len, int) and path_len > 0 and len(path_str) % path_len == 0

View File

@@ -23,6 +23,7 @@ from typing import Any, Protocol
import aiomqtt
import nacl.bindings
from app.decoder import decode_path_metadata
from app.fanout.mqtt_base import BaseMqttPublisher
logger = logging.getLogger(__name__)
@@ -146,16 +147,16 @@ def _calculate_packet_hash(raw_bytes: bytes) -> str:
if has_transport:
offset += 4 # Skip 4 bytes of transport codes
# Read path_len (1 byte on wire). Invalid/truncated packets map to zero hash.
# Read packed path metadata. Invalid/truncated packets map to zero hash.
if offset >= len(raw_bytes):
return "0" * 16
path_len = raw_bytes[offset]
path_len, _path_hash_size, path_byte_length = decode_path_metadata(raw_bytes[offset])
offset += 1
# Skip past path to get to payload. Invalid/truncated packets map to zero hash.
if len(raw_bytes) < offset + path_len:
if len(raw_bytes) < offset + path_byte_length:
return "0" * 16
payload_start = offset + path_len
payload_start = offset + path_byte_length
payload_data = raw_bytes[payload_start:]
# Hash: payload_type(1 byte) [+ path_len as uint16_t LE for TRACE] + payload_data
@@ -202,20 +203,24 @@ def _decode_packet_fields(raw_bytes: bytes) -> tuple[str, str, str, list[str], i
if len(raw_bytes) <= offset:
return route, packet_type, payload_len, path_values, payload_type
path_len = raw_bytes[offset]
path_len, path_hash_size, path_byte_length = decode_path_metadata(raw_bytes[offset])
offset += 1
if len(raw_bytes) < offset + path_len:
if len(raw_bytes) < offset + path_byte_length:
return route, packet_type, payload_len, path_values, payload_type
path_bytes = raw_bytes[offset : offset + path_len]
offset += path_len
path_bytes = raw_bytes[offset : offset + path_byte_length]
offset += path_byte_length
payload_type = (header >> 2) & 0x0F
route = _ROUTE_MAP.get(route_type, "U")
packet_type = str(payload_type)
payload_len = str(max(0, len(raw_bytes) - offset))
path_values = [f"{b:02x}" for b in path_bytes]
path_values = [
path_bytes[i : i + path_hash_size].hex()
for i in range(0, len(path_bytes), path_hash_size)
if i + path_hash_size <= len(path_bytes)
]
return route, packet_type, payload_len, path_values, payload_type
except Exception:

View File

@@ -13,6 +13,8 @@ from hashlib import sha256
import aiosqlite
from app.decoder import extract_payload, parse_packet
logger = logging.getLogger(__name__)
@@ -442,35 +444,7 @@ def _extract_payload_for_hash(raw_packet: bytes) -> bytes | None:
Returns the payload bytes, or None if packet is malformed.
"""
if len(raw_packet) < 2:
return None
try:
header = raw_packet[0]
route_type = header & 0x03
offset = 1
# Skip transport codes if present (TRANSPORT_FLOOD=0, TRANSPORT_DIRECT=3)
if route_type in (0x00, 0x03):
if len(raw_packet) < offset + 4:
return None
offset += 4
# Get path length
if len(raw_packet) < offset + 1:
return None
path_length = raw_packet[offset]
offset += 1
# Skip path bytes
if len(raw_packet) < offset + path_length:
return None
offset += path_length
# Rest is payload (may be empty, matching decoder.py behavior)
return raw_packet[offset:]
except (IndexError, ValueError):
return None
return extract_payload(raw_packet)
async def _migrate_005_backfill_payload_hashes(conn: aiosqlite.Connection) -> None:
@@ -624,34 +598,10 @@ def _extract_path_from_packet(raw_packet: bytes) -> str | None:
Returns the path as a hex string, or None if packet is malformed.
"""
if len(raw_packet) < 2:
return None
try:
header = raw_packet[0]
route_type = header & 0x03
offset = 1
# Skip transport codes if present (TRANSPORT_FLOOD=0, TRANSPORT_DIRECT=3)
if route_type in (0x00, 0x03):
if len(raw_packet) < offset + 4:
return None
offset += 4
# Get path length
if len(raw_packet) < offset + 1:
return None
path_length = raw_packet[offset]
offset += 1
# Extract path bytes
if len(raw_packet) < offset + path_length:
return None
path_bytes = raw_packet[offset : offset + path_length]
return path_bytes.hex()
except (IndexError, ValueError):
packet_info = parse_packet(raw_packet)
if packet_info is None:
return None
return packet_info.path.hex()
async def _migrate_007_backfill_message_paths(conn: aiosqlite.Connection) -> None:

View File

@@ -102,7 +102,7 @@ class ContactAdvertPath(BaseModel):
path: str = Field(description="Hex-encoded routing path (empty string for direct)")
path_len: int = Field(description="Number of hops in the path")
next_hop: str | None = Field(
default=None, description="First hop toward us (2-char hex), or null for direct"
default=None, description="First hop toward us, or null for direct"
)
first_seen: int = Field(description="Unix timestamp of first observation")
last_seen: int = Field(description="Unix timestamp of most recent observation")
@@ -201,9 +201,7 @@ class MessagePath(BaseModel):
path: str = Field(description="Hex-encoded routing path")
received_at: int = Field(description="Unix timestamp when this path was received")
path_len: int | None = Field(
default=None, description="Number of hops in the path, when known"
)
path_len: int | None = Field(default=None, description="Number of hops in the path, when known")
class Message(BaseModel):

View File

@@ -91,9 +91,7 @@ async def _handle_duplicate_message(
# Add path if provided
if path is not None:
paths = await MessageRepository.add_path(
existing_msg.id, path, received, path_len=path_len
)
paths = await MessageRepository.add_path(existing_msg.id, path, received, path_len=path_len)
else:
# Get current paths for broadcast
paths = existing_msg.paths or []
@@ -731,6 +729,7 @@ async def _process_advertisement(
path_hex=new_path_hex,
timestamp=timestamp,
max_paths=10,
path_len=new_path_len,
)
# Record name history

32
app/path_utils.py Normal file
View File

@@ -0,0 +1,32 @@
"""Helpers for working with hex-encoded routing paths."""
def get_path_hop_width(path_hex: str | None, path_len: int | None) -> int:
"""Return hop width in hex chars, falling back to legacy 1-byte hops."""
if not path_hex:
return 2
if isinstance(path_len, int) and path_len > 0 and len(path_hex) % path_len == 0:
hop_width = len(path_hex) // path_len
if hop_width > 0 and hop_width % 2 == 0:
return hop_width
return 2
def split_path_hops(path_hex: str | None, path_len: int | None) -> list[str]:
"""Split a hex path string into hop-sized chunks."""
if not path_hex:
return []
hop_width = get_path_hop_width(path_hex, path_len)
normalized = path_hex.lower()
return [
normalized[i : i + hop_width]
for i in range(0, len(normalized), hop_width)
if i + hop_width <= len(normalized)
]
def first_path_hop(path_hex: str | None, path_len: int | None) -> str | None:
"""Return the first hop from a hex path string, if any."""
hops = split_path_hops(path_hex, path_len)
return hops[0] if hops else None

View File

@@ -8,6 +8,7 @@ from app.models import (
ContactAdvertPathSummary,
ContactNameHistory,
)
from app.path_utils import first_path_hop
class AmbiguousPublicKeyPrefixError(ValueError):
@@ -287,7 +288,7 @@ class ContactAdvertPathRepository:
@staticmethod
def _row_to_path(row) -> ContactAdvertPath:
path = row["path_hex"] or ""
next_hop = path[:2].lower() if len(path) >= 2 else None
next_hop = first_path_hop(path, row["path_len"])
return ContactAdvertPath(
path=path,
path_len=row["path_len"],
@@ -303,6 +304,7 @@ class ContactAdvertPathRepository:
path_hex: str,
timestamp: int,
max_paths: int = 10,
path_len: int | None = None,
) -> None:
"""
Upsert a unique advert path observation for a contact and prune to N most recent.
@@ -312,7 +314,7 @@ class ContactAdvertPathRepository:
normalized_key = public_key.lower()
normalized_path = path_hex.lower()
path_len = len(normalized_path) // 2
normalized_path_len = path_len if isinstance(path_len, int) else len(normalized_path) // 2
await db.conn.execute(
"""
@@ -324,7 +326,7 @@ class ContactAdvertPathRepository:
path_len = excluded.path_len,
heard_count = contact_advert_paths.heard_count + 1
""",
(normalized_key, normalized_path, path_len, timestamp, timestamp),
(normalized_key, normalized_path, normalized_path_len, timestamp, timestamp),
)
# Keep only the N most recent unique paths per contact.

View File

@@ -16,6 +16,7 @@ from app.models import (
TraceResponse,
)
from app.packet_processor import start_historical_dm_decryption
from app.path_utils import first_path_hop
from app.radio import radio_manager
from app.repository import (
AmbiguousPublicKeyPrefixError,
@@ -201,11 +202,11 @@ async def get_contact_detail(public_key: str) -> ContactDetail:
if span_hours > 0:
advert_frequency = round(total_observations / span_hours, 2)
# Compute nearest repeaters from first-hop prefixes in advert paths
first_hop_stats: dict[str, dict] = {} # prefix -> {heard_count, path_len, last_seen}
# Compute nearest repeaters from first hops in advert paths
first_hop_stats: dict[str, dict] = {} # first hop -> {heard_count, path_len, last_seen}
for p in advert_paths:
if p.path and len(p.path) >= 2:
prefix = p.path[:2].lower()
prefix = first_path_hop(p.path, p.path_len)
if prefix:
if prefix not in first_hop_stats:
first_hop_stats[prefix] = {
"heard_count": 0,

View File

@@ -3,44 +3,10 @@ import { GroupTextCracker, type ProgressReport } from 'meshcore-hashtag-cracker'
import NoSleep from 'nosleep.js';
import type { RawPacket, Channel } from '../types';
import { api } from '../api';
import { extractRawPacketPayload } from '../utils/rawPacketPayload';
import { toast } from './ui/sonner';
import { cn } from '@/lib/utils';
/**
* Extract the payload from a raw packet hex string, skipping header and path.
* Returns the payload as a hex string, or null if malformed.
*/
function extractPayload(packetHex: string): string | null {
if (packetHex.length < 4) return null; // Need at least 2 bytes
try {
const header = parseInt(packetHex.slice(0, 2), 16);
const routeType = header & 0x03;
let offset = 2; // 1 byte = 2 hex chars
// Skip transport codes if present (TRANSPORT_FLOOD=0, TRANSPORT_DIRECT=3)
if (routeType === 0x00 || routeType === 0x03) {
if (packetHex.length < offset + 8) return null; // Need 4 more bytes
offset += 8; // 4 bytes = 8 hex chars
}
// Get path length
if (packetHex.length < offset + 2) return null;
const pathLength = parseInt(packetHex.slice(offset, offset + 2), 16);
offset += 2;
// Skip path data
const pathBytes = pathLength * 2; // hex chars
if (packetHex.length < offset + pathBytes) return null;
offset += pathBytes;
// Rest is payload
return packetHex.slice(offset);
} catch {
return null;
}
}
interface CrackedRoom {
roomName: string;
key: string;
@@ -177,7 +143,7 @@ export function CrackerPanel({
for (const packet of undecryptedGroupText) {
if (!newQueue.has(packet.id)) {
// Extract payload and check for duplicates
const payload = extractPayload(packet.data);
const payload = extractRawPacketPayload(packet.data);
if (payload && seenPayloadsRef.current.has(payload)) {
// Skip - we already have a packet with this payload queued
newSkipped++;

View File

@@ -573,9 +573,7 @@ describe('formatHopCounts', () => {
});
it('uses explicit path_len for multi-byte hop counts', () => {
const result = formatHopCounts([
{ path: '1A2B3C4D', path_len: 2, received_at: 1700000000 },
]);
const result = formatHopCounts([{ path: '1A2B3C4D', path_len: 2, received_at: 1700000000 }]);
expect(result.display).toBe('2');
expect(result.allDirect).toBe(false);
expect(result.hasMultiple).toBe(false);

View File

@@ -0,0 +1,21 @@
import { describe, expect, it } from 'vitest';
import { extractRawPacketPayload } from '../utils/rawPacketPayload';
describe('extractRawPacketPayload', () => {
it('extracts payload for legacy one-byte hops', () => {
expect(extractRawPacketPayload('1502AABBDEADBEEF')).toBe('DEADBEEF');
});
it('extracts payload for multi-byte hops', () => {
expect(extractRawPacketPayload('154220273031DEADBEEF')).toBe('DEADBEEF');
});
it('extracts payload for transport packets with multi-byte hops', () => {
expect(extractRawPacketPayload('14010203044220273031DEADBEEF')).toBe('DEADBEEF');
});
it('returns null for truncated multi-byte path data', () => {
expect(extractRawPacketPayload('15422027')).toBeNull();
});
});

View File

@@ -0,0 +1,39 @@
function decodePathMetadata(pathByteHex: string): { pathByteLength: number } {
const pathByte = parseInt(pathByteHex, 16);
const pathHashSize = (pathByte >> 6) + 1;
const pathLength = pathByte & 0x3f;
return {
pathByteLength: pathLength * pathHashSize,
};
}
/**
* Extract the payload from a raw packet hex string, skipping header and path.
* Returns the payload as a hex string, or null if malformed.
*/
export function extractRawPacketPayload(packetHex: string): string | null {
if (packetHex.length < 4) return null;
try {
const header = parseInt(packetHex.slice(0, 2), 16);
const routeType = header & 0x03;
let offset = 2;
if (routeType === 0x00 || routeType === 0x03) {
if (packetHex.length < offset + 8) return null;
offset += 8;
}
if (packetHex.length < offset + 2) return null;
const { pathByteLength } = decodePathMetadata(packetHex.slice(offset, offset + 2));
offset += 2;
const pathChars = pathByteLength * 2;
if (packetHex.length < offset + pathChars) return null;
offset += pathChars;
return packetHex.slice(offset);
} catch {
return null;
}
}

View File

@@ -260,6 +260,12 @@ class TestPacketFormatConversion:
assert result["route"] == "D"
assert result["path"] == "aa,bb"
def test_adds_path_for_multi_byte_direct_route(self):
data = {"timestamp": 0, "data": "024220273031CC", "snr": 1.0, "rssi": -70}
result = _format_raw_packet(data, "Node", "AA" * 32)
assert result["route"] == "D"
assert result["path"] == "2027,3031"
def test_direct_route_includes_empty_path_field(self):
data = {"timestamp": 0, "data": "0200", "snr": 1.0, "rssi": -70}
result = _format_raw_packet(data, "Node", "AA" * 32)
@@ -359,6 +365,30 @@ class TestCalculatePacketHash:
expected = hashlib.sha256(bytes([2]) + payload).hexdigest()[:16].upper()
assert result == expected
def test_multi_byte_path_uses_hop_count_for_trace_hash(self):
import hashlib
payload = b"\x99\x88"
raw = bytes([0x25, 0x42, 0x20, 0x27, 0x30, 0x31]) + payload
result = _calculate_packet_hash(raw)
expected = (
hashlib.sha256(bytes([9]) + (2).to_bytes(2, byteorder="little") + payload)
.hexdigest()[:16]
.upper()
)
assert result == expected
def test_multi_byte_path_skips_full_byte_length(self):
import hashlib
payload = b"\xde\xad\xbe\xef"
raw = bytes([0x09, 0x42, 0x20, 0x27, 0x30, 0x31]) + payload
result = _calculate_packet_hash(raw)
expected = hashlib.sha256(bytes([2]) + payload).hexdigest()[:16].upper()
assert result == expected
def test_truncated_packet_returns_zeroes(self):
# Header says TRANSPORT_FLOOD, but missing path_len at required offset.
raw = bytes([0x10, 0x01, 0x02])

View File

@@ -214,6 +214,22 @@ class TestAdvertPaths:
assert data[0]["path"] == ""
assert data[0]["next_hop"] is None
@pytest.mark.asyncio
async def test_get_contact_advert_paths_with_multi_byte_hops(self, test_db, client):
repeater_key = KEY_A
await _insert_contact(repeater_key, "R1", type=2)
await ContactAdvertPathRepository.record_observation(
repeater_key, "a1b2c3d4", 1000, path_len=2
)
response = await client.get(f"/api/contacts/{repeater_key}/advert-paths")
assert response.status_code == 200
data = response.json()
assert len(data) == 1
assert data[0]["path_len"] == 2
assert data[0]["next_hop"] == "a1b2"
@pytest.mark.asyncio
async def test_get_contact_advert_paths_works_for_non_repeater(self, test_db, client):
await _insert_contact(KEY_A, "Alice", type=1)
@@ -326,6 +342,25 @@ class TestContactDetail:
assert repeater["name"] == "Relay1"
assert repeater["heard_count"] == 2
@pytest.mark.asyncio
async def test_detail_nearest_repeaters_resolved_for_multi_byte_hops(self, test_db, client):
await _insert_contact(KEY_A, "Alice", type=1)
repeater_key = "b1c2" + "dd" * 30
await _insert_contact(repeater_key, "RelayWide", type=2)
await ContactAdvertPathRepository.record_observation(KEY_A, "b1c2eeff", 1000, path_len=2)
await ContactAdvertPathRepository.record_observation(KEY_A, "b1c21122", 1010, path_len=2)
response = await client.get(f"/api/contacts/{KEY_A}/detail")
assert response.status_code == 200
data = response.json()
assert len(data["nearest_repeaters"]) == 1
repeater = data["nearest_repeaters"][0]
assert repeater["public_key"] == repeater_key
assert repeater["name"] == "RelayWide"
assert repeater["heard_count"] == 2
@pytest.mark.asyncio
async def test_detail_advert_frequency_computed(self, test_db, client):
"""Advert frequency is computed from path observations over time span."""

View File

@@ -3,7 +3,13 @@
import aiosqlite
import pytest
from app.migrations import get_version, run_migrations, set_version
from app.migrations import (
_extract_path_from_packet,
_extract_payload_for_hash,
get_version,
run_migrations,
set_version,
)
class TestMigrationSystem:
@@ -30,6 +36,14 @@ class TestMigrationSystem:
finally:
await conn.close()
def test_extract_payload_for_hash_handles_multi_byte_hops(self):
raw = bytes([0x15, 0x42, 0x20, 0x27, 0x30, 0x31]) + b"\xde\xad\xbe\xef"
assert _extract_payload_for_hash(raw) == b"\xde\xad\xbe\xef"
def test_extract_path_from_packet_handles_multi_byte_hops(self):
raw = bytes([0x15, 0x42, 0x20, 0x27, 0x30, 0x31]) + b"\xde\xad\xbe\xef"
assert _extract_path_from_packet(raw) == "20273031"
class TestMigration001:
"""Test migration 001: add last_read_at columns."""

View File

@@ -274,6 +274,20 @@ class TestContactAdvertPathRepository:
assert paths[0].last_seen == 1010
assert paths[0].heard_count == 2
@pytest.mark.asyncio
async def test_record_observation_preserves_multi_byte_next_hop(self, test_db):
repeater_key = "ab" * 32
await ContactRepository.upsert({"public_key": repeater_key, "name": "R3", "type": 2})
await ContactAdvertPathRepository.record_observation(
repeater_key, "a1b2c3d4", 1000, path_len=2
)
paths = await ContactAdvertPathRepository.get_recent_for_contact(repeater_key, limit=10)
assert len(paths) == 1
assert paths[0].path_len == 2
assert paths[0].next_hop == "a1b2"
@pytest.mark.asyncio
async def test_prunes_to_most_recent_n_unique_paths(self, test_db):
repeater_key = "bb" * 32