Add multipath tracking

This commit is contained in:
Jack Kingsman
2026-01-18 20:00:32 -08:00
parent 0fea2889b2
commit c4ef8ec9cd
30 changed files with 1115 additions and 311 deletions
+6 -1
View File
@@ -90,6 +90,11 @@ async def on_contact_message(event: "Event") -> None:
logger.debug("Duplicate direct message from %s ignored", sender_pubkey[:12])
return
# Build paths array for broadcast
# Use "is not None" to include empty string (direct/0-hop messages)
path = payload.get("path")
paths = [{"path": path or "", "received_at": received_at}] if path is not None else None
# Broadcast only genuinely new messages
broadcast_event(
"message",
@@ -100,7 +105,7 @@ async def on_contact_message(event: "Event") -> None:
"text": payload.get("text", ""),
"sender_timestamp": payload.get("sender_timestamp"),
"received_at": received_at,
"path": payload.get("path"),
"paths": paths,
"txt_type": payload.get("txt_type", 0),
"signature": payload.get("signature"),
"outgoing": False,
+68
View File
@@ -86,6 +86,13 @@ async def run_migrations(conn: aiosqlite.Connection) -> int:
await set_version(conn, 7)
applied += 1
# Migration 8: Convert path column to paths JSON array for multiple delivery paths
if version < 8:
logger.info("Applying migration 8: convert path to paths JSON array")
await _migrate_008_convert_path_to_paths_array(conn)
await set_version(conn, 8)
applied += 1
if applied > 0:
logger.info(
"Applied %d migration(s), schema now at version %d", applied, await get_version(conn)
@@ -514,3 +521,64 @@ async def _migrate_007_backfill_message_paths(conn: aiosqlite.Connection) -> Non
await conn.commit()
logger.info("Path backfill complete: %d messages updated", updated)
async def _migrate_008_convert_path_to_paths_array(conn: aiosqlite.Connection) -> None:
"""
Convert path TEXT column to paths TEXT column storing JSON array.
The new format stores multiple paths as a JSON array of objects:
[{"path": "1A2B", "received_at": 1234567890}, ...]
This enables tracking multiple delivery paths for the same message
(e.g., when a message is received via different repeater routes).
"""
import json
# First, add the new paths column
try:
await conn.execute("ALTER TABLE messages ADD COLUMN paths TEXT")
logger.debug("Added paths column to messages table")
except aiosqlite.OperationalError as e:
if "duplicate column name" in str(e).lower():
logger.debug("messages.paths already exists, skipping column add")
else:
raise
# Migrate existing path data to paths array format
cursor = await conn.execute(
"SELECT id, path, received_at FROM messages WHERE path IS NOT NULL AND paths IS NULL"
)
rows = await cursor.fetchall()
if rows:
logger.info("Converting %d messages from path to paths array format...", len(rows))
for row in rows:
message_id = row[0]
old_path = row[1]
received_at = row[2]
# Convert single path to array format
paths_json = json.dumps([{"path": old_path, "received_at": received_at}])
await conn.execute(
"UPDATE messages SET paths = ? WHERE id = ?",
(paths_json, message_id),
)
logger.info("Converted %d messages to paths array format", len(rows))
# Try to drop the old path column (SQLite 3.35.0+ only)
try:
await conn.execute("ALTER TABLE messages DROP COLUMN path")
logger.debug("Dropped path column from messages table")
except aiosqlite.OperationalError as e:
error_msg = str(e).lower()
if "no such column" in error_msg:
logger.debug("messages.path already dropped, skipping")
elif "syntax error" in error_msg or "drop column" in error_msg:
# SQLite version doesn't support DROP COLUMN - harmless, column stays
logger.debug("SQLite doesn't support DROP COLUMN, path column will remain")
else:
raise
await conn.commit()
+10 -1
View File
@@ -67,6 +67,13 @@ class Channel(BaseModel):
last_read_at: int | None = None # Server-side read state tracking
class MessagePath(BaseModel):
"""A single path that a message took to reach us."""
path: str = Field(description="Hex-encoded routing path (2 chars per hop)")
received_at: int = Field(description="Unix timestamp when this path was received")
class Message(BaseModel):
id: int
type: str = Field(description="PRIV or CHAN")
@@ -74,7 +81,9 @@ class Message(BaseModel):
text: str
sender_timestamp: int | None = None
received_at: int
path: str | None = Field(default=None, description="Hex-encoded routing path (2 chars per hop)")
paths: list[MessagePath] | None = Field(
default=None, description="List of routing paths this message arrived via"
)
txt_type: int = 0
signature: str | None = None
outgoing: bool = False
+87 -82
View File
@@ -17,6 +17,7 @@ import logging
import time
from app.decoder import (
PacketInfo,
PayloadType,
parse_advertisement,
parse_packet,
@@ -34,13 +35,6 @@ from app.websocket import broadcast_event
logger = logging.getLogger(__name__)
# Pending repeats for outgoing message ACK detection
# Key: (channel_key, text_hash, timestamp) -> message_id
_pending_repeats: dict[tuple[str, str, int], int] = {}
_pending_repeat_expiry: dict[tuple[str, str, int], float] = {}
REPEAT_EXPIRY_SECONDS = 30
async def create_message_from_decrypted(
packet_id: int,
channel_key: str,
@@ -66,9 +60,7 @@ async def create_message_from_decrypted(
Returns the message ID if created, None if duplicate.
"""
import time as time_module
received = received_at or int(time_module.time())
received = received_at or int(time.time())
# Format the message text with sender prefix if present
text = f"{sender}: {message_text}" if sender else message_text
@@ -87,14 +79,56 @@ async def create_message_from_decrypted(
)
if msg_id is None:
# This shouldn't happen - raw packets are deduplicated by payload hash,
# so the same message content shouldn't be created twice. Log a warning.
logger.warning(
"Unexpected duplicate message for channel %s (packet_id=%d) - "
"this may indicate a bug in payload deduplication",
channel_key_normalized[:8],
packet_id,
# Duplicate message detected - this happens when:
# 1. Our own outgoing message echoes back (flood routing)
# 2. Same message arrives via multiple paths before first is committed
# In either case, add the path to the existing message.
existing_msg = await MessageRepository.get_by_content(
msg_type="CHAN",
conversation_key=channel_key_normalized,
text=text,
sender_timestamp=timestamp,
)
if not existing_msg:
logger.warning(
"Duplicate message for channel %s but couldn't find existing",
channel_key_normalized[:8],
)
return None
logger.debug(
"Duplicate message for channel %s (msg_id=%d, outgoing=%s) - adding path",
channel_key_normalized[:8],
existing_msg.id,
existing_msg.outgoing,
)
# Add path if provided
if path is not None:
paths = await MessageRepository.add_path(existing_msg.id, path, received)
else:
# Get current paths for broadcast
paths = existing_msg.paths or []
# Increment ack count for outgoing messages (echo confirmation)
if existing_msg.outgoing:
ack_count = await MessageRepository.increment_ack_count(existing_msg.id)
else:
ack_count = await MessageRepository.get_ack_count(existing_msg.id)
# Broadcast updated paths
broadcast_event(
"message_acked",
{
"message_id": existing_msg.id,
"ack_count": ack_count,
"paths": [p.model_dump() for p in paths] if paths else [],
},
)
# Mark this packet as decrypted
await RawPacketRepository.mark_decrypted(packet_id, existing_msg.id)
return None
logger.info("Stored channel message %d for channel %s", msg_id, channel_key_normalized[:8])
@@ -102,6 +136,10 @@ async def create_message_from_decrypted(
# Mark the raw packet as decrypted
await RawPacketRepository.mark_decrypted(packet_id, msg_id)
# Build paths array for broadcast
# Use "is not None" to include empty string (direct/0-hop messages)
paths = [{"path": path or "", "received_at": received}] if path is not None else None
# Broadcast new message to connected clients
broadcast_event(
"message",
@@ -112,7 +150,7 @@ async def create_message_from_decrypted(
"text": text,
"sender_timestamp": timestamp,
"received_at": received,
"path": path,
"paths": paths,
"txt_type": 0,
"signature": None,
"outgoing": False,
@@ -123,24 +161,6 @@ async def create_message_from_decrypted(
return msg_id
def track_pending_repeat(channel_key: str, text: str, timestamp: int, message_id: int) -> None:
"""Track an outgoing channel message for repeat detection."""
text_hash = str(hash(text))
key = (channel_key.upper(), text_hash, timestamp)
_pending_repeats[key] = message_id
_pending_repeat_expiry[key] = time.time() + REPEAT_EXPIRY_SECONDS
logger.debug("Tracking repeat for channel %s, message %d", channel_key[:8], message_id)
def _cleanup_expired_repeats() -> None:
"""Remove expired pending repeats."""
now = time.time()
expired = [k for k, exp in _pending_repeat_expiry.items() if exp < now]
for k in expired:
_pending_repeats.pop(k, None)
_pending_repeat_expiry.pop(k, None)
async def process_raw_packet(
raw_bytes: bytes,
timestamp: int | None = None,
@@ -167,6 +187,16 @@ async def process_raw_packet(
payload_type = packet_info.payload_type if packet_info else None
payload_type_name = payload_type.name if payload_type else "Unknown"
# Log packet arrival at debug level
path_hex = packet_info.path.hex() if packet_info and packet_info.path else ""
logger.debug(
"Packet received: type=%s, is_new=%s, packet_id=%d, path='%s'",
payload_type_name,
is_new_packet,
packet_id,
path_hex[:8] if path_hex else "(direct)",
)
result = {
"packet_id": packet_id,
"timestamp": ts,
@@ -180,22 +210,24 @@ async def process_raw_packet(
"sender": None,
}
# Only process new packets - duplicates were already processed when first received
if is_new_packet:
# Try to decrypt/parse based on payload type
if payload_type == PayloadType.GROUP_TEXT:
decrypt_result = await _process_group_text(raw_bytes, packet_id, ts, packet_info)
if decrypt_result:
result.update(decrypt_result)
# Process packets based on payload type
# For GROUP_TEXT, we always try to decrypt even for duplicate packets - the message
# deduplication in create_message_from_decrypted handles adding paths to existing messages.
# This is more reliable than trying to look up the message via raw packet linking.
if payload_type == PayloadType.GROUP_TEXT:
decrypt_result = await _process_group_text(raw_bytes, packet_id, ts, packet_info)
if decrypt_result:
result.update(decrypt_result)
elif payload_type == PayloadType.ADVERT:
await _process_advertisement(raw_bytes, ts, packet_info)
elif payload_type == PayloadType.ADVERT and is_new_packet:
# Only process new advertisements (duplicates don't add value)
await _process_advertisement(raw_bytes, ts, packet_info)
# TODO: Add TEXT_MESSAGE (direct message) decryption when private key is available
# elif payload_type == PayloadType.TEXT_MESSAGE:
# decrypt_result = await _process_direct_message(raw_bytes, packet_id, ts, packet_info)
# if decrypt_result:
# result.update(decrypt_result)
# TODO: Add TEXT_MESSAGE (direct message) decryption when private key is available
# elif payload_type == PayloadType.TEXT_MESSAGE:
# decrypt_result = await _process_direct_message(raw_bytes, packet_id, ts, packet_info)
# if decrypt_result:
# result.update(decrypt_result)
# Always broadcast raw packet for the packet feed UI (even duplicates)
# This enables the frontend cracker to see all incoming packets in real-time
@@ -223,14 +255,13 @@ async def _process_group_text(
raw_bytes: bytes,
packet_id: int,
timestamp: int,
packet_info,
packet_info: PacketInfo | None,
) -> dict | None:
"""
Process a GroupText (channel message) packet.
Tries all known channel keys to decrypt.
Creates a message entry if successful.
Handles repeat detection for outgoing message ACKs.
Creates a message entry if successful (or adds path to existing if duplicate).
"""
# Try to decrypt with all known channel keys
channels = await ChannelRepository.get_all()
@@ -249,34 +280,8 @@ async def _process_group_text(
# Successfully decrypted!
logger.debug("Decrypted GroupText for channel %s: %s", channel.name, decrypted.message[:50])
# Check for repeat detection (our own message echoed back)
is_repeat = False
_cleanup_expired_repeats()
text_hash = str(hash(decrypted.message))
for ts_offset in range(-5, 6):
key = (channel.key, text_hash, decrypted.timestamp + ts_offset)
if key in _pending_repeats:
message_id = _pending_repeats[key]
# Don't pop - let it expire naturally so subsequent repeats via
# different radio paths are also caught as duplicates
logger.info("Repeat detected for channel message %d", message_id)
ack_count = await MessageRepository.increment_ack_count(message_id)
broadcast_event("message_acked", {"message_id": message_id, "ack_count": ack_count})
is_repeat = True
break
if is_repeat:
# Mark packet as decrypted but don't create new message
await RawPacketRepository.mark_decrypted(packet_id, message_id)
return {
"decrypted": True,
"channel_name": channel.name,
"sender": decrypted.sender,
"message_id": message_id,
}
# Use shared function to create message, handle duplicates, and broadcast
# Create message (or add path to existing if duplicate)
# This handles both new messages and echoes of our own outgoing messages
msg_id = await create_message_from_decrypted(
packet_id=packet_id,
channel_key=channel.key,
@@ -301,7 +306,7 @@ async def _process_group_text(
async def _process_advertisement(
raw_bytes: bytes,
timestamp: int,
packet_info=None,
packet_info: PacketInfo | None = None,
) -> None:
"""
Process an advertisement packet.
+127 -5
View File
@@ -1,3 +1,4 @@
import json
import logging
import sqlite3
import time
@@ -6,7 +7,7 @@ from typing import Any
from app.database import db
from app.decoder import extract_payload
from app.models import Channel, Contact, Message, RawPacket
from app.models import Channel, Contact, Message, MessagePath, RawPacket
logger = logging.getLogger(__name__)
@@ -286,6 +287,24 @@ class ChannelRepository:
class MessageRepository:
@staticmethod
def _parse_paths(paths_json: str | None) -> list[MessagePath] | None:
"""Parse paths JSON string to list of MessagePath objects."""
if not paths_json:
return None
try:
paths_data = json.loads(paths_json)
return [MessagePath(**p) for p in paths_data]
except (json.JSONDecodeError, TypeError, KeyError):
return None
@staticmethod
def _serialize_paths(paths: list[dict] | None) -> str | None:
"""Serialize paths list to JSON string."""
if not paths:
return None
return json.dumps(paths)
@staticmethod
async def create(
msg_type: str,
@@ -303,11 +322,18 @@ class MessageRepository:
Uses INSERT OR IGNORE to handle the UNIQUE constraint on
(type, conversation_key, text, sender_timestamp). This prevents
duplicate messages when the same message arrives via multiple RF paths.
The path parameter is converted to the paths JSON array format.
"""
# Convert single path to paths array format
paths_json = None
if path is not None:
paths_json = json.dumps([{"path": path, "received_at": received_at}])
cursor = await db.conn.execute(
"""
INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp,
received_at, path, txt_type, signature, outgoing)
received_at, paths, txt_type, signature, outgoing)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -316,7 +342,7 @@ class MessageRepository:
text,
sender_timestamp,
received_at,
path,
paths_json,
txt_type,
signature,
outgoing,
@@ -328,6 +354,44 @@ class MessageRepository:
return None
return cursor.lastrowid
@staticmethod
async def add_path(
message_id: int, path: str, received_at: int | None = None
) -> list[MessagePath]:
"""Add a new path to an existing message.
This is used when a repeat/echo of a message arrives via a different route.
Returns the updated list of paths.
"""
ts = received_at or int(time.time())
# Get current paths
cursor = await db.conn.execute("SELECT paths FROM messages WHERE id = ?", (message_id,))
row = await cursor.fetchone()
if not row:
return []
# Parse existing paths or start with empty list
existing_paths = []
if row["paths"]:
try:
existing_paths = json.loads(row["paths"])
except json.JSONDecodeError:
existing_paths = []
# Add new path
existing_paths.append({"path": path, "received_at": ts})
# Update database
paths_json = json.dumps(existing_paths)
await db.conn.execute(
"UPDATE messages SET paths = ? WHERE id = ?",
(paths_json, message_id),
)
await db.conn.commit()
return [MessagePath(**p) for p in existing_paths]
@staticmethod
async def get_all(
limit: int = 100,
@@ -359,7 +423,7 @@ class MessageRepository:
text=row["text"],
sender_timestamp=row["sender_timestamp"],
received_at=row["received_at"],
path=row["path"],
paths=MessageRepository._parse_paths(row["paths"]),
txt_type=row["txt_type"],
signature=row["signature"],
outgoing=bool(row["outgoing"]),
@@ -377,6 +441,59 @@ class MessageRepository:
row = await cursor.fetchone()
return row["acked"] if row else 1
@staticmethod
async def get_ack_count(message_id: int) -> int:
"""Get the current ack count for a message."""
cursor = await db.conn.execute("SELECT acked FROM messages WHERE id = ?", (message_id,))
row = await cursor.fetchone()
return row["acked"] if row else 0
@staticmethod
async def get_by_content(
msg_type: str,
conversation_key: str,
text: str,
sender_timestamp: int | None,
) -> "Message | None":
"""Look up a message by its unique content fields."""
cursor = await db.conn.execute(
"""
SELECT id, type, conversation_key, text, sender_timestamp, received_at,
paths, txt_type, signature, outgoing, acked
FROM messages
WHERE type = ? AND conversation_key = ? AND text = ?
AND (sender_timestamp = ? OR (sender_timestamp IS NULL AND ? IS NULL))
""",
(msg_type, conversation_key, text, sender_timestamp, sender_timestamp),
)
row = await cursor.fetchone()
if not row:
return None
paths = None
if row["paths"]:
try:
paths_data = json.loads(row["paths"])
paths = [
MessagePath(path=p["path"], received_at=p["received_at"]) for p in paths_data
]
except (json.JSONDecodeError, KeyError):
pass
return Message(
id=row["id"],
type=row["type"],
conversation_key=row["conversation_key"],
text=row["text"],
sender_timestamp=row["sender_timestamp"],
received_at=row["received_at"],
paths=paths,
txt_type=row["txt_type"],
signature=row["signature"],
outgoing=bool(row["outgoing"]),
acked=row["acked"],
)
@staticmethod
async def get_bulk(
conversations: list[dict],
@@ -419,7 +536,7 @@ class MessageRepository:
text=row["text"],
sender_timestamp=row["sender_timestamp"],
received_at=row["received_at"],
path=row["path"],
paths=MessageRepository._parse_paths(row["paths"]),
txt_type=row["txt_type"],
signature=row["signature"],
outgoing=bool(row["outgoing"]),
@@ -462,6 +579,11 @@ class RawPacketRepository:
if existing:
# Duplicate - return existing packet ID
logger.info(
"Duplicate payload detected (hash=%s..., existing_id=%d)",
payload_hash[:12],
existing["id"],
)
return (existing["id"], False)
# New packet - insert with hash
+7 -7
View File
@@ -7,7 +7,6 @@ from meshcore import EventType
from app.dependencies import require_connected
from app.event_handlers import track_pending_ack
from app.models import Message, SendChannelMessageRequest, SendDirectMessageRequest
from app.packet_processor import track_pending_repeat
from app.repository import MessageRepository
logger = logging.getLogger(__name__)
@@ -184,12 +183,16 @@ async def send_channel_message(request: SendChannelMessageRequest) -> Message:
if result.type == EventType.ERROR:
raise HTTPException(status_code=500, detail=f"Failed to send message: {result.payload}")
# Store outgoing message
# Store outgoing message with sender prefix (to match echo format)
# The radio includes "SenderName: " prefix when broadcasting, so we store it the same way
# to enable proper deduplication when the echo comes back
now = int(time.time())
channel_key_upper = request.channel_key.upper()
radio_name = mc.self_info.get("name", "") if mc.self_info else ""
text_with_sender = f"{radio_name}: {request.text}" if radio_name else request.text
message_id = await MessageRepository.create(
msg_type="CHAN",
text=request.text,
text=text_with_sender,
conversation_key=channel_key_upper,
sender_timestamp=now,
received_at=now,
@@ -201,14 +204,11 @@ async def send_channel_message(request: SendChannelMessageRequest) -> Message:
detail="Failed to store outgoing message - unexpected duplicate",
)
# Track for repeat detection (flood messages get confirmed by hearing repeats)
track_pending_repeat(channel_key_upper, request.text, now, message_id)
return Message(
id=message_id,
type="CHAN",
conversation_key=channel_key_upper,
text=request.text,
text=text_with_sender,
sender_timestamp=now,
received_at=now,
outgoing=True,