Add multipath tracking

This commit is contained in:
Jack Kingsman
2026-01-18 20:00:32 -08:00
parent 0fea2889b2
commit c4ef8ec9cd
30 changed files with 1115 additions and 311 deletions

View File

@@ -90,6 +90,11 @@ async def on_contact_message(event: "Event") -> None:
logger.debug("Duplicate direct message from %s ignored", sender_pubkey[:12])
return
# Build paths array for broadcast
# Use "is not None" to include empty string (direct/0-hop messages)
path = payload.get("path")
paths = [{"path": path or "", "received_at": received_at}] if path is not None else None
# Broadcast only genuinely new messages
broadcast_event(
"message",
@@ -100,7 +105,7 @@ async def on_contact_message(event: "Event") -> None:
"text": payload.get("text", ""),
"sender_timestamp": payload.get("sender_timestamp"),
"received_at": received_at,
"path": payload.get("path"),
"paths": paths,
"txt_type": payload.get("txt_type", 0),
"signature": payload.get("signature"),
"outgoing": False,

View File

@@ -86,6 +86,13 @@ async def run_migrations(conn: aiosqlite.Connection) -> int:
await set_version(conn, 7)
applied += 1
# Migration 8: Convert path column to paths JSON array for multiple delivery paths
if version < 8:
logger.info("Applying migration 8: convert path to paths JSON array")
await _migrate_008_convert_path_to_paths_array(conn)
await set_version(conn, 8)
applied += 1
if applied > 0:
logger.info(
"Applied %d migration(s), schema now at version %d", applied, await get_version(conn)
@@ -514,3 +521,64 @@ async def _migrate_007_backfill_message_paths(conn: aiosqlite.Connection) -> Non
await conn.commit()
logger.info("Path backfill complete: %d messages updated", updated)
async def _migrate_008_convert_path_to_paths_array(conn: aiosqlite.Connection) -> None:
"""
Convert path TEXT column to paths TEXT column storing JSON array.
The new format stores multiple paths as a JSON array of objects:
[{"path": "1A2B", "received_at": 1234567890}, ...]
This enables tracking multiple delivery paths for the same message
(e.g., when a message is received via different repeater routes).
"""
import json
# First, add the new paths column
try:
await conn.execute("ALTER TABLE messages ADD COLUMN paths TEXT")
logger.debug("Added paths column to messages table")
except aiosqlite.OperationalError as e:
if "duplicate column name" in str(e).lower():
logger.debug("messages.paths already exists, skipping column add")
else:
raise
# Migrate existing path data to paths array format
cursor = await conn.execute(
"SELECT id, path, received_at FROM messages WHERE path IS NOT NULL AND paths IS NULL"
)
rows = await cursor.fetchall()
if rows:
logger.info("Converting %d messages from path to paths array format...", len(rows))
for row in rows:
message_id = row[0]
old_path = row[1]
received_at = row[2]
# Convert single path to array format
paths_json = json.dumps([{"path": old_path, "received_at": received_at}])
await conn.execute(
"UPDATE messages SET paths = ? WHERE id = ?",
(paths_json, message_id),
)
logger.info("Converted %d messages to paths array format", len(rows))
# Try to drop the old path column (SQLite 3.35.0+ only)
try:
await conn.execute("ALTER TABLE messages DROP COLUMN path")
logger.debug("Dropped path column from messages table")
except aiosqlite.OperationalError as e:
error_msg = str(e).lower()
if "no such column" in error_msg:
logger.debug("messages.path already dropped, skipping")
elif "syntax error" in error_msg or "drop column" in error_msg:
# SQLite version doesn't support DROP COLUMN - harmless, column stays
logger.debug("SQLite doesn't support DROP COLUMN, path column will remain")
else:
raise
await conn.commit()

View File

@@ -67,6 +67,13 @@ class Channel(BaseModel):
last_read_at: int | None = None # Server-side read state tracking
class MessagePath(BaseModel):
"""A single path that a message took to reach us."""
path: str = Field(description="Hex-encoded routing path (2 chars per hop)")
received_at: int = Field(description="Unix timestamp when this path was received")
class Message(BaseModel):
id: int
type: str = Field(description="PRIV or CHAN")
@@ -74,7 +81,9 @@ class Message(BaseModel):
text: str
sender_timestamp: int | None = None
received_at: int
path: str | None = Field(default=None, description="Hex-encoded routing path (2 chars per hop)")
paths: list[MessagePath] | None = Field(
default=None, description="List of routing paths this message arrived via"
)
txt_type: int = 0
signature: str | None = None
outgoing: bool = False

View File

@@ -17,6 +17,7 @@ import logging
import time
from app.decoder import (
PacketInfo,
PayloadType,
parse_advertisement,
parse_packet,
@@ -34,13 +35,6 @@ from app.websocket import broadcast_event
logger = logging.getLogger(__name__)
# Pending repeats for outgoing message ACK detection
# Key: (channel_key, text_hash, timestamp) -> message_id
_pending_repeats: dict[tuple[str, str, int], int] = {}
_pending_repeat_expiry: dict[tuple[str, str, int], float] = {}
REPEAT_EXPIRY_SECONDS = 30
async def create_message_from_decrypted(
packet_id: int,
channel_key: str,
@@ -66,9 +60,7 @@ async def create_message_from_decrypted(
Returns the message ID if created, None if duplicate.
"""
import time as time_module
received = received_at or int(time_module.time())
received = received_at or int(time.time())
# Format the message text with sender prefix if present
text = f"{sender}: {message_text}" if sender else message_text
@@ -87,14 +79,56 @@ async def create_message_from_decrypted(
)
if msg_id is None:
# This shouldn't happen - raw packets are deduplicated by payload hash,
# so the same message content shouldn't be created twice. Log a warning.
logger.warning(
"Unexpected duplicate message for channel %s (packet_id=%d) - "
"this may indicate a bug in payload deduplication",
channel_key_normalized[:8],
packet_id,
# Duplicate message detected - this happens when:
# 1. Our own outgoing message echoes back (flood routing)
# 2. Same message arrives via multiple paths before first is committed
# In either case, add the path to the existing message.
existing_msg = await MessageRepository.get_by_content(
msg_type="CHAN",
conversation_key=channel_key_normalized,
text=text,
sender_timestamp=timestamp,
)
if not existing_msg:
logger.warning(
"Duplicate message for channel %s but couldn't find existing",
channel_key_normalized[:8],
)
return None
logger.debug(
"Duplicate message for channel %s (msg_id=%d, outgoing=%s) - adding path",
channel_key_normalized[:8],
existing_msg.id,
existing_msg.outgoing,
)
# Add path if provided
if path is not None:
paths = await MessageRepository.add_path(existing_msg.id, path, received)
else:
# Get current paths for broadcast
paths = existing_msg.paths or []
# Increment ack count for outgoing messages (echo confirmation)
if existing_msg.outgoing:
ack_count = await MessageRepository.increment_ack_count(existing_msg.id)
else:
ack_count = await MessageRepository.get_ack_count(existing_msg.id)
# Broadcast updated paths
broadcast_event(
"message_acked",
{
"message_id": existing_msg.id,
"ack_count": ack_count,
"paths": [p.model_dump() for p in paths] if paths else [],
},
)
# Mark this packet as decrypted
await RawPacketRepository.mark_decrypted(packet_id, existing_msg.id)
return None
logger.info("Stored channel message %d for channel %s", msg_id, channel_key_normalized[:8])
@@ -102,6 +136,10 @@ async def create_message_from_decrypted(
# Mark the raw packet as decrypted
await RawPacketRepository.mark_decrypted(packet_id, msg_id)
# Build paths array for broadcast
# Use "is not None" to include empty string (direct/0-hop messages)
paths = [{"path": path or "", "received_at": received}] if path is not None else None
# Broadcast new message to connected clients
broadcast_event(
"message",
@@ -112,7 +150,7 @@ async def create_message_from_decrypted(
"text": text,
"sender_timestamp": timestamp,
"received_at": received,
"path": path,
"paths": paths,
"txt_type": 0,
"signature": None,
"outgoing": False,
@@ -123,24 +161,6 @@ async def create_message_from_decrypted(
return msg_id
def track_pending_repeat(channel_key: str, text: str, timestamp: int, message_id: int) -> None:
"""Track an outgoing channel message for repeat detection."""
text_hash = str(hash(text))
key = (channel_key.upper(), text_hash, timestamp)
_pending_repeats[key] = message_id
_pending_repeat_expiry[key] = time.time() + REPEAT_EXPIRY_SECONDS
logger.debug("Tracking repeat for channel %s, message %d", channel_key[:8], message_id)
def _cleanup_expired_repeats() -> None:
"""Remove expired pending repeats."""
now = time.time()
expired = [k for k, exp in _pending_repeat_expiry.items() if exp < now]
for k in expired:
_pending_repeats.pop(k, None)
_pending_repeat_expiry.pop(k, None)
async def process_raw_packet(
raw_bytes: bytes,
timestamp: int | None = None,
@@ -167,6 +187,16 @@ async def process_raw_packet(
payload_type = packet_info.payload_type if packet_info else None
payload_type_name = payload_type.name if payload_type else "Unknown"
# Log packet arrival at debug level
path_hex = packet_info.path.hex() if packet_info and packet_info.path else ""
logger.debug(
"Packet received: type=%s, is_new=%s, packet_id=%d, path='%s'",
payload_type_name,
is_new_packet,
packet_id,
path_hex[:8] if path_hex else "(direct)",
)
result = {
"packet_id": packet_id,
"timestamp": ts,
@@ -180,22 +210,24 @@ async def process_raw_packet(
"sender": None,
}
# Only process new packets - duplicates were already processed when first received
if is_new_packet:
# Try to decrypt/parse based on payload type
if payload_type == PayloadType.GROUP_TEXT:
decrypt_result = await _process_group_text(raw_bytes, packet_id, ts, packet_info)
if decrypt_result:
result.update(decrypt_result)
# Process packets based on payload type
# For GROUP_TEXT, we always try to decrypt even for duplicate packets - the message
# deduplication in create_message_from_decrypted handles adding paths to existing messages.
# This is more reliable than trying to look up the message via raw packet linking.
if payload_type == PayloadType.GROUP_TEXT:
decrypt_result = await _process_group_text(raw_bytes, packet_id, ts, packet_info)
if decrypt_result:
result.update(decrypt_result)
elif payload_type == PayloadType.ADVERT:
await _process_advertisement(raw_bytes, ts, packet_info)
elif payload_type == PayloadType.ADVERT and is_new_packet:
# Only process new advertisements (duplicates don't add value)
await _process_advertisement(raw_bytes, ts, packet_info)
# TODO: Add TEXT_MESSAGE (direct message) decryption when private key is available
# elif payload_type == PayloadType.TEXT_MESSAGE:
# decrypt_result = await _process_direct_message(raw_bytes, packet_id, ts, packet_info)
# if decrypt_result:
# result.update(decrypt_result)
# TODO: Add TEXT_MESSAGE (direct message) decryption when private key is available
# elif payload_type == PayloadType.TEXT_MESSAGE:
# decrypt_result = await _process_direct_message(raw_bytes, packet_id, ts, packet_info)
# if decrypt_result:
# result.update(decrypt_result)
# Always broadcast raw packet for the packet feed UI (even duplicates)
# This enables the frontend cracker to see all incoming packets in real-time
@@ -223,14 +255,13 @@ async def _process_group_text(
raw_bytes: bytes,
packet_id: int,
timestamp: int,
packet_info,
packet_info: PacketInfo | None,
) -> dict | None:
"""
Process a GroupText (channel message) packet.
Tries all known channel keys to decrypt.
Creates a message entry if successful.
Handles repeat detection for outgoing message ACKs.
Creates a message entry if successful (or adds path to existing if duplicate).
"""
# Try to decrypt with all known channel keys
channels = await ChannelRepository.get_all()
@@ -249,34 +280,8 @@ async def _process_group_text(
# Successfully decrypted!
logger.debug("Decrypted GroupText for channel %s: %s", channel.name, decrypted.message[:50])
# Check for repeat detection (our own message echoed back)
is_repeat = False
_cleanup_expired_repeats()
text_hash = str(hash(decrypted.message))
for ts_offset in range(-5, 6):
key = (channel.key, text_hash, decrypted.timestamp + ts_offset)
if key in _pending_repeats:
message_id = _pending_repeats[key]
# Don't pop - let it expire naturally so subsequent repeats via
# different radio paths are also caught as duplicates
logger.info("Repeat detected for channel message %d", message_id)
ack_count = await MessageRepository.increment_ack_count(message_id)
broadcast_event("message_acked", {"message_id": message_id, "ack_count": ack_count})
is_repeat = True
break
if is_repeat:
# Mark packet as decrypted but don't create new message
await RawPacketRepository.mark_decrypted(packet_id, message_id)
return {
"decrypted": True,
"channel_name": channel.name,
"sender": decrypted.sender,
"message_id": message_id,
}
# Use shared function to create message, handle duplicates, and broadcast
# Create message (or add path to existing if duplicate)
# This handles both new messages and echoes of our own outgoing messages
msg_id = await create_message_from_decrypted(
packet_id=packet_id,
channel_key=channel.key,
@@ -301,7 +306,7 @@ async def _process_group_text(
async def _process_advertisement(
raw_bytes: bytes,
timestamp: int,
packet_info=None,
packet_info: PacketInfo | None = None,
) -> None:
"""
Process an advertisement packet.

View File

@@ -1,3 +1,4 @@
import json
import logging
import sqlite3
import time
@@ -6,7 +7,7 @@ from typing import Any
from app.database import db
from app.decoder import extract_payload
from app.models import Channel, Contact, Message, RawPacket
from app.models import Channel, Contact, Message, MessagePath, RawPacket
logger = logging.getLogger(__name__)
@@ -286,6 +287,24 @@ class ChannelRepository:
class MessageRepository:
@staticmethod
def _parse_paths(paths_json: str | None) -> list[MessagePath] | None:
"""Parse paths JSON string to list of MessagePath objects."""
if not paths_json:
return None
try:
paths_data = json.loads(paths_json)
return [MessagePath(**p) for p in paths_data]
except (json.JSONDecodeError, TypeError, KeyError):
return None
@staticmethod
def _serialize_paths(paths: list[dict] | None) -> str | None:
"""Serialize paths list to JSON string."""
if not paths:
return None
return json.dumps(paths)
@staticmethod
async def create(
msg_type: str,
@@ -303,11 +322,18 @@ class MessageRepository:
Uses INSERT OR IGNORE to handle the UNIQUE constraint on
(type, conversation_key, text, sender_timestamp). This prevents
duplicate messages when the same message arrives via multiple RF paths.
The path parameter is converted to the paths JSON array format.
"""
# Convert single path to paths array format
paths_json = None
if path is not None:
paths_json = json.dumps([{"path": path, "received_at": received_at}])
cursor = await db.conn.execute(
"""
INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp,
received_at, path, txt_type, signature, outgoing)
received_at, paths, txt_type, signature, outgoing)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -316,7 +342,7 @@ class MessageRepository:
text,
sender_timestamp,
received_at,
path,
paths_json,
txt_type,
signature,
outgoing,
@@ -328,6 +354,44 @@ class MessageRepository:
return None
return cursor.lastrowid
@staticmethod
async def add_path(
message_id: int, path: str, received_at: int | None = None
) -> list[MessagePath]:
"""Add a new path to an existing message.
This is used when a repeat/echo of a message arrives via a different route.
Returns the updated list of paths.
"""
ts = received_at or int(time.time())
# Get current paths
cursor = await db.conn.execute("SELECT paths FROM messages WHERE id = ?", (message_id,))
row = await cursor.fetchone()
if not row:
return []
# Parse existing paths or start with empty list
existing_paths = []
if row["paths"]:
try:
existing_paths = json.loads(row["paths"])
except json.JSONDecodeError:
existing_paths = []
# Add new path
existing_paths.append({"path": path, "received_at": ts})
# Update database
paths_json = json.dumps(existing_paths)
await db.conn.execute(
"UPDATE messages SET paths = ? WHERE id = ?",
(paths_json, message_id),
)
await db.conn.commit()
return [MessagePath(**p) for p in existing_paths]
@staticmethod
async def get_all(
limit: int = 100,
@@ -359,7 +423,7 @@ class MessageRepository:
text=row["text"],
sender_timestamp=row["sender_timestamp"],
received_at=row["received_at"],
path=row["path"],
paths=MessageRepository._parse_paths(row["paths"]),
txt_type=row["txt_type"],
signature=row["signature"],
outgoing=bool(row["outgoing"]),
@@ -377,6 +441,59 @@ class MessageRepository:
row = await cursor.fetchone()
return row["acked"] if row else 1
@staticmethod
async def get_ack_count(message_id: int) -> int:
"""Get the current ack count for a message."""
cursor = await db.conn.execute("SELECT acked FROM messages WHERE id = ?", (message_id,))
row = await cursor.fetchone()
return row["acked"] if row else 0
@staticmethod
async def get_by_content(
msg_type: str,
conversation_key: str,
text: str,
sender_timestamp: int | None,
) -> "Message | None":
"""Look up a message by its unique content fields."""
cursor = await db.conn.execute(
"""
SELECT id, type, conversation_key, text, sender_timestamp, received_at,
paths, txt_type, signature, outgoing, acked
FROM messages
WHERE type = ? AND conversation_key = ? AND text = ?
AND (sender_timestamp = ? OR (sender_timestamp IS NULL AND ? IS NULL))
""",
(msg_type, conversation_key, text, sender_timestamp, sender_timestamp),
)
row = await cursor.fetchone()
if not row:
return None
paths = None
if row["paths"]:
try:
paths_data = json.loads(row["paths"])
paths = [
MessagePath(path=p["path"], received_at=p["received_at"]) for p in paths_data
]
except (json.JSONDecodeError, KeyError):
pass
return Message(
id=row["id"],
type=row["type"],
conversation_key=row["conversation_key"],
text=row["text"],
sender_timestamp=row["sender_timestamp"],
received_at=row["received_at"],
paths=paths,
txt_type=row["txt_type"],
signature=row["signature"],
outgoing=bool(row["outgoing"]),
acked=row["acked"],
)
@staticmethod
async def get_bulk(
conversations: list[dict],
@@ -419,7 +536,7 @@ class MessageRepository:
text=row["text"],
sender_timestamp=row["sender_timestamp"],
received_at=row["received_at"],
path=row["path"],
paths=MessageRepository._parse_paths(row["paths"]),
txt_type=row["txt_type"],
signature=row["signature"],
outgoing=bool(row["outgoing"]),
@@ -462,6 +579,11 @@ class RawPacketRepository:
if existing:
# Duplicate - return existing packet ID
logger.info(
"Duplicate payload detected (hash=%s..., existing_id=%d)",
payload_hash[:12],
existing["id"],
)
return (existing["id"], False)
# New packet - insert with hash

View File

@@ -7,7 +7,6 @@ from meshcore import EventType
from app.dependencies import require_connected
from app.event_handlers import track_pending_ack
from app.models import Message, SendChannelMessageRequest, SendDirectMessageRequest
from app.packet_processor import track_pending_repeat
from app.repository import MessageRepository
logger = logging.getLogger(__name__)
@@ -184,12 +183,16 @@ async def send_channel_message(request: SendChannelMessageRequest) -> Message:
if result.type == EventType.ERROR:
raise HTTPException(status_code=500, detail=f"Failed to send message: {result.payload}")
# Store outgoing message
# Store outgoing message with sender prefix (to match echo format)
# The radio includes "SenderName: " prefix when broadcasting, so we store it the same way
# to enable proper deduplication when the echo comes back
now = int(time.time())
channel_key_upper = request.channel_key.upper()
radio_name = mc.self_info.get("name", "") if mc.self_info else ""
text_with_sender = f"{radio_name}: {request.text}" if radio_name else request.text
message_id = await MessageRepository.create(
msg_type="CHAN",
text=request.text,
text=text_with_sender,
conversation_key=channel_key_upper,
sender_timestamp=now,
received_at=now,
@@ -201,14 +204,11 @@ async def send_channel_message(request: SendChannelMessageRequest) -> Message:
detail="Failed to store outgoing message - unexpected duplicate",
)
# Track for repeat detection (flood messages get confirmed by hearing repeats)
track_pending_repeat(channel_key_upper, request.text, now, message_id)
return Message(
id=message_id,
type="CHAN",
conversation_key=channel_key_upper,
text=request.text,
text=text_with_sender,
sender_timestamp=now,
received_at=now,
outgoing=True,

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -13,8 +13,8 @@
<link rel="shortcut icon" href="/favicon.ico" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
<link rel="manifest" href="/site.webmanifest" />
<script type="module" crossorigin src="/assets/index-CVHdyvV4.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-DIRlMkt4.css">
<script type="module" crossorigin src="/assets/index-CmYHoR07.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-CObwAA2o.css">
</head>
<body>
<div id="root"></div>

View File

@@ -33,6 +33,7 @@ import type {
Conversation,
HealthStatus,
Message,
MessagePath,
RawPacket,
RadioConfig,
RadioConfigUpdate,
@@ -220,8 +221,8 @@ export function App() {
return updated;
});
},
onMessageAcked: (messageId: number, ackCount: number) => {
updateMessageAck(messageId, ackCount);
onMessageAcked: (messageId: number, ackCount: number, paths?: MessagePath[]) => {
updateMessageAck(messageId, ackCount, paths);
},
}),
[addMessageIfNew, trackNewMessage, incrementUnread, updateMessageAck, checkMention]

View File

@@ -1,9 +1,9 @@
import { useEffect, useLayoutEffect, useRef, useCallback, useState, type ReactNode } from 'react';
import type { Contact, Message, RadioConfig } from '../types';
import type { Contact, Message, MessagePath, RadioConfig } from '../types';
import { CONTACT_TYPE_REPEATER } from '../types';
import { formatTime, parseSenderFromText } from '../utils/messageParser';
import { pubkeysMatch } from '../utils/pubkey';
import { getHopCount, type SenderInfo } from '../utils/pathUtils';
import { formatHopCounts, type SenderInfo } from '../utils/pathUtils';
import { ContactAvatar } from './ContactAvatar';
import { PathModal } from './PathModal';
import { cn } from '@/lib/utils';
@@ -62,6 +62,40 @@ function renderTextWithMentions(text: string, radioName?: string): ReactNode {
return parts.length > 0 ? parts : text;
}
// Clickable hop count badge that opens the path modal
interface HopCountBadgeProps {
paths: MessagePath[];
onClick: () => void;
variant: 'header' | 'inline';
}
function HopCountBadge({ paths, onClick, variant }: HopCountBadgeProps) {
const hopInfo = formatHopCounts(paths);
// Single direct: "(d)", otherwise "(d/1/3 hops)"
const label =
hopInfo.allDirect && !hopInfo.hasMultiple
? `(${hopInfo.display})`
: `(${hopInfo.display} hops)`;
const className =
variant === 'header'
? 'font-normal text-muted-foreground/70 ml-1 text-[11px] cursor-pointer hover:text-primary hover:underline'
: 'text-[10px] text-muted-foreground/50 ml-1 cursor-pointer hover:text-primary hover:underline';
return (
<span
className={className}
onClick={(e) => {
e.stopPropagation();
onClick();
}}
title="View message path"
>
{label}
</span>
);
}
export function MessageList({
messages,
contacts,
@@ -78,7 +112,7 @@ export function MessageList({
const isInitialLoadRef = useRef<boolean>(true);
const [showScrollToBottom, setShowScrollToBottom] = useState(false);
const [selectedPath, setSelectedPath] = useState<{
path: string;
paths: MessagePath[];
senderInfo: SenderInfo;
} | null>(null);
@@ -336,27 +370,18 @@ export function MessageList({
<span className="font-normal text-muted-foreground/70 ml-2 text-[11px]">
{formatTime(msg.sender_timestamp || msg.received_at)}
</span>
{!msg.outgoing &&
msg.path &&
(getHopCount(msg.path) === 0 ? (
<span className="font-normal text-muted-foreground/70 ml-1 text-[11px]">
(direct)
</span>
) : (
<span
className="font-normal text-muted-foreground/70 ml-1 text-[11px] cursor-pointer hover:text-primary hover:underline"
onClick={(e) => {
e.stopPropagation();
setSelectedPath({
path: msg.path!,
senderInfo: getSenderInfo(msg, contact, sender),
});
}}
title="View message path"
>
({getHopCount(msg.path)} hop{getHopCount(msg.path) !== 1 ? 's' : ''})
</span>
))}
{!msg.outgoing && msg.paths && msg.paths.length > 0 && (
<HopCountBadge
paths={msg.paths}
variant="header"
onClick={() =>
setSelectedPath({
paths: msg.paths!,
senderInfo: getSenderInfo(msg, contact, sender),
})
}
/>
)}
</div>
)}
<div className="break-words whitespace-pre-wrap">
@@ -371,27 +396,18 @@ export function MessageList({
<span className="text-[10px] text-muted-foreground/50 ml-2">
{formatTime(msg.sender_timestamp || msg.received_at)}
</span>
{!msg.outgoing &&
msg.path &&
(getHopCount(msg.path) === 0 ? (
<span className="text-[10px] text-muted-foreground/50 ml-1">
(direct)
</span>
) : (
<span
className="text-[10px] text-muted-foreground/50 ml-1 cursor-pointer hover:text-primary hover:underline"
onClick={(e) => {
e.stopPropagation();
setSelectedPath({
path: msg.path!,
senderInfo: getSenderInfo(msg, contact, sender),
});
}}
title="View message path"
>
({getHopCount(msg.path)} hop{getHopCount(msg.path) !== 1 ? 's' : ''})
</span>
))}
{!msg.outgoing && msg.paths && msg.paths.length > 0 && (
<HopCountBadge
paths={msg.paths}
variant="inline"
onClick={() =>
setSelectedPath({
paths: msg.paths!,
senderInfo: getSenderInfo(msg, contact, sender),
})
}
/>
)}
</>
)}
{msg.outgoing && (msg.acked > 0 ? `${msg.acked > 1 ? msg.acked : ''}` : ' ?')}
@@ -431,7 +447,7 @@ export function MessageList({
<PathModal
open={true}
onClose={() => setSelectedPath(null)}
path={selectedPath.path}
paths={selectedPath.paths}
senderInfo={selectedPath.senderInfo}
contacts={contacts}
config={config ?? null}

View File

@@ -1,4 +1,4 @@
import type { Contact, RadioConfig } from '../types';
import type { Contact, RadioConfig, MessagePath } from '../types';
import {
Dialog,
DialogContent,
@@ -17,34 +17,59 @@ import {
type ResolvedPath,
type PathHop,
} from '../utils/pathUtils';
import { formatTime } from '../utils/messageParser';
import { getMapFocusHash } from '../utils/urlHash';
interface PathModalProps {
open: boolean;
onClose: () => void;
path: string;
paths: MessagePath[];
senderInfo: SenderInfo;
contacts: Contact[];
config: RadioConfig | null;
}
export function PathModal({ open, onClose, path, senderInfo, contacts, config }: PathModalProps) {
const resolved = resolvePath(path, senderInfo, contacts, config);
export function PathModal({ open, onClose, paths, senderInfo, contacts, config }: PathModalProps) {
// Resolve all paths
const resolvedPaths = paths.map((p) => ({
...p,
resolved: resolvePath(p.path, senderInfo, contacts, config),
}));
const hasSinglePath = paths.length === 1;
return (
<Dialog open={open} onOpenChange={(isOpen) => !isOpen && onClose()}>
<DialogContent className="max-w-md max-h-[80vh] flex flex-col">
<DialogHeader>
<DialogTitle>Message Path</DialogTitle>
<DialogTitle>Message Path{!hasSinglePath && `s (${paths.length})`}</DialogTitle>
<DialogDescription>
This shows <em>one route</em> that this message traveled through the mesh network. Flood
messages may arrive via multiple paths, and routers may be incorrectly identified due to
prefix collisions between heard and non-heard router advertisements.
{hasSinglePath ? (
<>
This shows <em>one route</em> that this message traveled through the mesh network.
Routers may be incorrectly identified due to prefix collisions between heard and
non-heard router advertisements.
</>
) : (
<>
This message was received via <strong>{paths.length} different routes</strong>.
Routers may be incorrectly identified due to prefix collisions.
</>
)}
</DialogDescription>
</DialogHeader>
<div className="flex-1 overflow-y-auto py-2">
<PathVisualization resolved={resolved} senderInfo={senderInfo} />
<div className="flex-1 overflow-y-auto py-2 space-y-4">
{resolvedPaths.map((pathData, index) => (
<div key={index}>
{!hasSinglePath && (
<div className="text-xs text-muted-foreground font-medium mb-2 pb-1 border-b border-border">
Path {index + 1} received {formatTime(pathData.received_at)}
</div>
)}
<PathVisualization resolved={pathData.resolved} senderInfo={senderInfo} />
</div>
))}
</div>
<DialogFooter>

View File

@@ -100,7 +100,7 @@ function createLocalMessage(conversationKey: string, text: string, outgoing: boo
text,
sender_timestamp: now,
received_at: now,
path: null,
paths: null,
txt_type: 0,
signature: null,
outgoing,

View File

@@ -1,7 +1,7 @@
import { useState, useCallback, useEffect, useRef } from 'react';
import { toast } from '../components/ui/sonner';
import { api } from '../api';
import type { Conversation, Message } from '../types';
import type { Conversation, Message, MessagePath } from '../types';
const MESSAGE_PAGE_SIZE = 200;
@@ -19,7 +19,7 @@ export interface UseConversationMessagesResult {
fetchMessages: (showLoading?: boolean) => Promise<void>;
fetchOlderMessages: () => Promise<void>;
addMessageIfNew: (msg: Message) => boolean;
updateMessageAck: (messageId: number, ackCount: number) => void;
updateMessageAck: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
}
export function useConversationMessages(
@@ -145,18 +145,25 @@ export function useConversationMessages(
return true;
}, []);
// Update a message's ack count
const updateMessageAck = useCallback((messageId: number, ackCount: number) => {
setMessages((prev) => {
const idx = prev.findIndex((m) => m.id === messageId);
if (idx >= 0) {
const updated = [...prev];
updated[idx] = { ...prev[idx], acked: ackCount };
return updated;
}
return prev;
});
}, []);
// Update a message's ack count and paths
const updateMessageAck = useCallback(
(messageId: number, ackCount: number, paths?: MessagePath[]) => {
setMessages((prev) => {
const idx = prev.findIndex((m) => m.id === messageId);
if (idx >= 0) {
const updated = [...prev];
updated[idx] = {
...prev[idx],
acked: ackCount,
...(paths !== undefined && { paths }),
};
return updated;
}
return prev;
});
},
[]
);
return {
messages,

View File

@@ -99,7 +99,7 @@ function createLocalMessage(
text,
sender_timestamp: now,
received_at: now,
path: null,
paths: null,
txt_type: 0,
signature: null,
outgoing,

View File

@@ -268,7 +268,7 @@ describe('Integration: ACK Events', () => {
text: 'Hello',
sender_timestamp: 1700000000,
received_at: 1700000000,
path: null,
paths: null,
txt_type: 0,
signature: null,
outgoing: true,
@@ -301,7 +301,7 @@ describe('Integration: ACK Events', () => {
text: 'Hello',
sender_timestamp: 1700000000,
received_at: 1700000000,
path: null,
paths: null,
txt_type: 0,
signature: null,
outgoing: true,

View File

@@ -7,6 +7,7 @@ import {
getHopCount,
resolvePath,
formatDistance,
formatHopCounts,
} from '../utils/pathUtils';
import type { Contact, RadioConfig } from '../types';
import { CONTACT_TYPE_REPEATER, CONTACT_TYPE_CLIENT } from '../types';
@@ -573,3 +574,66 @@ describe('formatDistance', () => {
expect(formatDistance(0.001)).toBe('1m');
});
});
describe('formatHopCounts', () => {
it('returns empty for null paths', () => {
const result = formatHopCounts(null);
expect(result.display).toBe('');
expect(result.allDirect).toBe(true);
expect(result.hasMultiple).toBe(false);
});
it('returns empty for empty paths array', () => {
const result = formatHopCounts([]);
expect(result.display).toBe('');
expect(result.allDirect).toBe(true);
expect(result.hasMultiple).toBe(false);
});
it('formats single direct path as "d"', () => {
const result = formatHopCounts([{ path: '', received_at: 1700000000 }]);
expect(result.display).toBe('d');
expect(result.allDirect).toBe(true);
expect(result.hasMultiple).toBe(false);
});
it('formats single multi-hop path with hop count', () => {
const result = formatHopCounts([{ path: '1A2B', received_at: 1700000000 }]);
expect(result.display).toBe('2');
expect(result.allDirect).toBe(false);
expect(result.hasMultiple).toBe(false);
});
it('formats multiple paths sorted by hop count', () => {
const result = formatHopCounts([
{ path: '1A2B3C', received_at: 1700000000 }, // 3 hops
{ path: '', received_at: 1700000001 }, // direct
{ path: '1A', received_at: 1700000002 }, // 1 hop
{ path: '1A2B3C', received_at: 1700000003 }, // 3 hops
]);
expect(result.display).toBe('d/1/3/3');
expect(result.allDirect).toBe(false);
expect(result.hasMultiple).toBe(true);
});
it('formats multiple direct paths', () => {
const result = formatHopCounts([
{ path: '', received_at: 1700000000 },
{ path: '', received_at: 1700000001 },
]);
expect(result.display).toBe('d/d');
expect(result.allDirect).toBe(true);
expect(result.hasMultiple).toBe(true);
});
it('handles mixed paths with multiple direct routes', () => {
const result = formatHopCounts([
{ path: '1A', received_at: 1700000000 }, // 1 hop
{ path: '', received_at: 1700000001 }, // direct
{ path: '', received_at: 1700000002 }, // direct
]);
expect(result.display).toBe('d/d/1');
expect(result.allDirect).toBe(false);
expect(result.hasMultiple).toBe(true);
});
});

View File

@@ -72,7 +72,7 @@ describe('shouldIncrementUnread', () => {
text: 'Test',
sender_timestamp: null,
received_at: Date.now(),
path: null,
paths: null,
txt_type: 0,
signature: null,
outgoing: false,

View File

@@ -16,7 +16,7 @@ function createMessage(overrides: Partial<Message> = {}): Message {
text: 'Hello world',
sender_timestamp: 1700000000,
received_at: 1700000001,
path: null,
paths: null,
txt_type: 0,
signature: null,
outgoing: false,
@@ -110,3 +110,75 @@ describe('getMessageContentKey', () => {
expect(key).toContain('Hello: World! @user #channel');
});
});
describe('updateMessageAck logic', () => {
// Test the logic that updateMessageAck applies to messages
// This simulates what the setMessages callback does
function applyAckUpdate(
messages: Message[],
messageId: number,
ackCount: number,
paths?: { path: string; received_at: number }[]
): Message[] {
const idx = messages.findIndex((m) => m.id === messageId);
if (idx >= 0) {
const updated = [...messages];
updated[idx] = {
...messages[idx],
acked: ackCount,
...(paths !== undefined && { paths }),
};
return updated;
}
return messages;
}
it('updates ack count for existing message', () => {
const messages = [createMessage({ id: 42, acked: 0 })];
const updated = applyAckUpdate(messages, 42, 3);
expect(updated[0].acked).toBe(3);
});
it('updates paths when provided', () => {
const messages = [createMessage({ id: 42, acked: 0, paths: null })];
const newPaths = [
{ path: '1A2B', received_at: 1700000000 },
{ path: '1A3C', received_at: 1700000005 },
];
const updated = applyAckUpdate(messages, 42, 2, newPaths);
expect(updated[0].acked).toBe(2);
expect(updated[0].paths).toEqual(newPaths);
});
it('does not modify paths when not provided', () => {
const existingPaths = [{ path: '1A2B', received_at: 1700000000 }];
const messages = [createMessage({ id: 42, acked: 1, paths: existingPaths })];
const updated = applyAckUpdate(messages, 42, 2);
expect(updated[0].acked).toBe(2);
expect(updated[0].paths).toEqual(existingPaths); // Unchanged
});
it('returns unchanged array for unknown message id', () => {
const messages = [createMessage({ id: 42, acked: 0 })];
const updated = applyAckUpdate(messages, 999, 3);
expect(updated).toEqual(messages);
expect(updated[0].acked).toBe(0); // Unchanged
});
it('handles empty paths array', () => {
const messages = [createMessage({ id: 42, acked: 0, paths: null })];
const updated = applyAckUpdate(messages, 42, 1, []);
expect(updated[0].paths).toEqual([]);
});
});

View File

@@ -6,7 +6,7 @@
*/
import { describe, it, expect, vi } from 'vitest';
import type { HealthStatus, Contact, Channel, Message, RawPacket } from '../types';
import type { HealthStatus, Contact, Channel, Message, MessagePath, RawPacket } from '../types';
/**
* Parse and route a WebSocket message.
@@ -21,7 +21,7 @@ function parseWebSocketMessage(
onMessage?: (message: Message) => void;
onContact?: (contact: Contact) => void;
onRawPacket?: (packet: RawPacket) => void;
onMessageAcked?: (messageId: number, ackCount: number) => void;
onMessageAcked?: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
}
): { type: string; handled: boolean } {
try {
@@ -47,8 +47,12 @@ function parseWebSocketMessage(
handlers.onRawPacket?.(msg.data as RawPacket);
return { type: msg.type, handled: !!handlers.onRawPacket };
case 'message_acked': {
const ackData = msg.data as { message_id: number; ack_count: number };
handlers.onMessageAcked?.(ackData.message_id, ackData.ack_count);
const ackData = msg.data as {
message_id: number;
ack_count: number;
paths?: MessagePath[];
};
handlers.onMessageAcked?.(ackData.message_id, ackData.ack_count, ackData.paths);
return { type: msg.type, handled: !!handlers.onMessageAcked };
}
case 'pong':
@@ -90,7 +94,25 @@ describe('parseWebSocketMessage', () => {
expect(result.type).toBe('message_acked');
expect(result.handled).toBe(true);
expect(onMessageAcked).toHaveBeenCalledWith(42, 3);
expect(onMessageAcked).toHaveBeenCalledWith(42, 3, undefined);
});
it('routes message_acked with paths array', () => {
const onMessageAcked = vi.fn();
const paths = [
{ path: '1A2B', received_at: 1700000000 },
{ path: '1A3C', received_at: 1700000005 },
];
const data = JSON.stringify({
type: 'message_acked',
data: { message_id: 42, ack_count: 2, paths },
});
const result = parseWebSocketMessage(data, { onMessageAcked });
expect(result.type).toBe('message_acked');
expect(result.handled).toBe(true);
expect(onMessageAcked).toHaveBeenCalledWith(42, 2, paths);
});
it('routes new message to onMessage handler', () => {

View File

@@ -74,6 +74,14 @@ export interface Channel {
last_read_at: number | null;
}
/** A single path that a message took to reach us */
export interface MessagePath {
/** Hex-encoded routing path (2 chars per hop) */
path: string;
/** Unix timestamp when this path was received */
received_at: number;
}
export interface Message {
id: number;
type: 'PRIV' | 'CHAN';
@@ -82,8 +90,8 @@ export interface Message {
text: string;
sender_timestamp: number | null;
received_at: number;
/** Hex-encoded routing path (2 chars per hop). Null for outgoing messages. */
path: string | null;
/** List of routing paths this message arrived via. Null for outgoing messages. */
paths: MessagePath[] | null;
txt_type: number;
signature: string | null;
outgoing: boolean;

View File

@@ -1,5 +1,5 @@
import { useEffect, useRef, useCallback, useState } from 'react';
import type { HealthStatus, Contact, Channel, Message, RawPacket } from './types';
import type { HealthStatus, Contact, Channel, Message, MessagePath, RawPacket } from './types';
interface WebSocketMessage {
type: string;
@@ -18,7 +18,7 @@ interface UseWebSocketOptions {
onMessage?: (message: Message) => void;
onContact?: (contact: Contact) => void;
onRawPacket?: (packet: RawPacket) => void;
onMessageAcked?: (messageId: number, ackCount: number) => void;
onMessageAcked?: (messageId: number, ackCount: number, paths?: MessagePath[]) => void;
onError?: (error: ErrorEvent) => void;
}
@@ -83,8 +83,12 @@ export function useWebSocket(options: UseWebSocketOptions) {
options.onRawPacket?.(msg.data as RawPacket);
break;
case 'message_acked': {
const ackData = msg.data as { message_id: number; ack_count: number };
options.onMessageAcked?.(ackData.message_id, ackData.ack_count);
const ackData = msg.data as {
message_id: number;
ack_count: number;
paths?: MessagePath[];
};
options.onMessageAcked?.(ackData.message_id, ackData.ack_count, ackData.paths);
break;
}
case 'error':

View File

@@ -1,4 +1,4 @@
import type { Contact, RadioConfig } from '../types';
import type { Contact, RadioConfig, MessagePath } from '../types';
import { CONTACT_TYPE_REPEATER } from '../types';
export interface PathHop {
@@ -155,6 +155,33 @@ export function getHopCount(path: string | null | undefined): number {
return Math.floor(path.length / 2);
}
/**
* Format hop counts from multiple paths for display.
* Returns something like "d/1/3/3" for direct, 1-hop, 3-hop, 3-hop paths.
* Returns null if no paths or only direct.
*/
export function formatHopCounts(paths: MessagePath[] | null | undefined): {
display: string;
allDirect: boolean;
hasMultiple: boolean;
} {
if (!paths || paths.length === 0) {
return { display: '', allDirect: true, hasMultiple: false };
}
// Get hop counts for all paths and sort ascending
const hopCounts = paths.map((p) => getHopCount(p.path)).sort((a, b) => a - b);
const allDirect = hopCounts.every((h) => h === 0);
const hasMultiple = paths.length > 1;
// Format: "d" for 0, numbers for others
const parts = hopCounts.map((h) => (h === 0 ? 'd' : h.toString()));
const display = parts.join('/');
return { display, allDirect, hasMultiple };
}
/**
* Build complete path resolution with sender, hops, and receiver
*/

View File

@@ -1,7 +1,7 @@
"""Tests for event handler logic.
These tests verify the ACK tracking and repeat detection mechanisms
that determine message delivery confirmation.
These tests verify the ACK tracking mechanism for direct message
delivery confirmation.
"""
import time
@@ -16,25 +16,15 @@ from app.event_handlers import (
register_event_handlers,
track_pending_ack,
)
from app.packet_processor import (
_cleanup_expired_repeats,
_pending_repeat_expiry,
_pending_repeats,
track_pending_repeat,
)
@pytest.fixture(autouse=True)
def clear_test_state():
"""Clear pending ACKs, repeats, and subscriptions before each test."""
"""Clear pending ACKs and subscriptions before each test."""
_pending_acks.clear()
_pending_repeats.clear()
_pending_repeat_expiry.clear()
_active_subscriptions.clear()
yield
_pending_acks.clear()
_pending_repeats.clear()
_pending_repeat_expiry.clear()
_active_subscriptions.clear()
@@ -84,67 +74,6 @@ class TestAckTracking:
assert "recent" in _pending_acks
class TestRepeatTracking:
"""Test repeat tracking for channel/flood messages."""
def test_track_pending_repeat_stores_correctly(self):
"""Pending repeats are stored with channel key, text hash, and timestamp."""
channel_key = "0123456789ABCDEF0123456789ABCDEF"
track_pending_repeat(
channel_key=channel_key, text="Hello", timestamp=1700000000, message_id=99
)
# Key is (channel_key, text_hash, timestamp)
text_hash = str(hash("Hello"))
key = (channel_key, text_hash, 1700000000)
assert key in _pending_repeats
assert _pending_repeats[key] == 99
def test_same_message_different_channels_tracked_separately(self):
"""Same message on different channels creates separate entries."""
track_pending_repeat(
channel_key="AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1",
text="Test",
timestamp=1000,
message_id=1,
)
track_pending_repeat(
channel_key="AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA2",
text="Test",
timestamp=1000,
message_id=2,
)
assert len(_pending_repeats) == 2
def test_same_message_different_timestamps_tracked_separately(self):
"""Same message with different timestamps creates separate entries."""
channel_key = "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"
track_pending_repeat(channel_key=channel_key, text="Test", timestamp=1000, message_id=1)
track_pending_repeat(channel_key=channel_key, text="Test", timestamp=1001, message_id=2)
assert len(_pending_repeats) == 2
def test_cleanup_removes_old_repeats(self):
"""Expired repeats are removed during cleanup."""
channel_key = "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC"
text_hash = str(hash("test"))
old_key = (channel_key, text_hash, 1000)
new_key = (channel_key, text_hash, 2000)
# Set up entries with expiry times
_pending_repeats[old_key] = 1
_pending_repeats[new_key] = 2
_pending_repeat_expiry[old_key] = time.time() - 10 # Already expired
_pending_repeat_expiry[new_key] = time.time() + 30 # Still valid
_cleanup_expired_repeats()
assert old_key not in _pending_repeats
assert new_key in _pending_repeats
class TestAckEventHandler:
"""Test the on_ack event handler."""

View File

@@ -100,8 +100,8 @@ class TestMigration001:
# Run migrations
applied = await run_migrations(conn)
assert applied == 7 # All 7 migrations run
assert await get_version(conn) == 7
assert applied == 8 # All 8 migrations run
assert await get_version(conn) == 8
# Verify columns exist by inserting and selecting
await conn.execute(
@@ -183,9 +183,9 @@ class TestMigration001:
applied1 = await run_migrations(conn)
applied2 = await run_migrations(conn)
assert applied1 == 7 # All 7 migrations run
assert applied1 == 8 # All 8 migrations run
assert applied2 == 0 # No migrations on second run
assert await get_version(conn) == 7
assert await get_version(conn) == 8
finally:
await conn.close()
@@ -245,9 +245,9 @@ class TestMigration001:
# Run migrations - should not fail
applied = await run_migrations(conn)
# All 7 migrations applied (version incremented) but no error
assert applied == 7
assert await get_version(conn) == 7
# All 8 migrations applied (version incremented) but no error
assert applied == 8
assert await get_version(conn) == 8
finally:
await conn.close()

View File

@@ -430,7 +430,7 @@ class TestCreateMessageFromDecrypted:
assert broadcast["text"] == "TestSender: Hello world"
assert broadcast["sender_timestamp"] == 1700000000
assert broadcast["received_at"] == 1700000001
assert broadcast["path"] is None # Historical decryption has no path info
assert broadcast["paths"] is None # Historical decryption has no path info
assert broadcast["outgoing"] is False
assert broadcast["acked"] == 0
@@ -551,8 +551,11 @@ class TestMessageBroadcastStructure:
broadcast = message_broadcasts[0]["data"]
# Real-time processing extracts path from packet (flood packets have empty path)
assert "path" in broadcast
# The test packet is a flood packet, so path should be empty string ""
assert "paths" in broadcast
# The test packet is a flood packet, so paths should contain a single entry with empty path
assert broadcast["paths"] is not None
assert len(broadcast["paths"]) == 1
assert broadcast["paths"][0]["path"] == "" # Empty string = direct/flood
class TestRawPacketStorage:

417
tests/test_repository.py Normal file
View File

@@ -0,0 +1,417 @@
"""Tests for repository layer, specifically the add_path method."""
import json
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
class TestMessageRepositoryAddPath:
"""Test MessageRepository.add_path method."""
@pytest.mark.asyncio
async def test_add_path_to_message_with_no_existing_paths(self):
"""Adding a path to a message with no existing paths creates a new array."""
# Mock the database connection
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"paths": None})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_conn.commit = AsyncMock()
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.add_path(
message_id=42, path="1A2B", received_at=1700000000
)
assert len(result) == 1
assert result[0].path == "1A2B"
assert result[0].received_at == 1700000000
# Verify the UPDATE was called with correct JSON
update_call = mock_conn.execute.call_args_list[-1]
assert update_call[0][0] == "UPDATE messages SET paths = ? WHERE id = ?"
paths_json = update_call[0][1][0]
parsed = json.loads(paths_json)
assert len(parsed) == 1
assert parsed[0]["path"] == "1A2B"
@pytest.mark.asyncio
async def test_add_path_to_message_with_existing_paths(self):
"""Adding a path to a message with existing paths appends to the array."""
existing_paths = json.dumps([{"path": "1A", "received_at": 1699999999}])
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"paths": existing_paths})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_conn.commit = AsyncMock()
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.add_path(
message_id=42, path="2B3C", received_at=1700000000
)
assert len(result) == 2
assert result[0].path == "1A"
assert result[1].path == "2B3C"
# Verify the UPDATE contains both paths
update_call = mock_conn.execute.call_args_list[-1]
paths_json = update_call[0][1][0]
parsed = json.loads(paths_json)
assert len(parsed) == 2
assert parsed[0]["path"] == "1A"
assert parsed[1]["path"] == "2B3C"
@pytest.mark.asyncio
async def test_add_path_to_nonexistent_message_returns_empty(self):
"""Adding a path to a nonexistent message returns empty list."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value=None)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.add_path(
message_id=999, path="1A2B", received_at=1700000000
)
assert result == []
@pytest.mark.asyncio
async def test_add_path_handles_corrupted_json(self):
"""Adding a path handles corrupted JSON in existing paths gracefully."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"paths": "not valid json {"})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_conn.commit = AsyncMock()
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.add_path(
message_id=42, path="1A2B", received_at=1700000000
)
# Should recover and create new array with just the new path
assert len(result) == 1
assert result[0].path == "1A2B"
@pytest.mark.asyncio
async def test_add_path_uses_current_time_if_not_provided(self):
"""Adding a path without received_at uses current timestamp."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"paths": None})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_conn.commit = AsyncMock()
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db), patch("app.repository.time") as mock_time:
mock_time.time.return_value = 1700000500.5
from app.repository import MessageRepository
result = await MessageRepository.add_path(message_id=42, path="1A2B")
assert len(result) == 1
assert result[0].received_at == 1700000500
@pytest.mark.asyncio
async def test_add_empty_path_for_direct_message(self):
"""Adding an empty path (direct message) works correctly."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"paths": None})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_conn.commit = AsyncMock()
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.add_path(
message_id=42, path="", received_at=1700000000
)
assert len(result) == 1
assert result[0].path == "" # Empty path = direct
assert result[0].received_at == 1700000000
class TestMessageRepositoryGetByContent:
"""Test MessageRepository.get_by_content method."""
@pytest.mark.asyncio
async def test_get_by_content_finds_matching_message(self):
"""Returns message when all content fields match."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(
return_value={
"id": 42,
"type": "CHAN",
"conversation_key": "ABCD1234",
"text": "Hello world",
"sender_timestamp": 1700000000,
"received_at": 1700000001,
"paths": None,
"txt_type": 0,
"signature": None,
"outgoing": 0,
"acked": 1,
}
)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_by_content(
msg_type="CHAN",
conversation_key="ABCD1234",
text="Hello world",
sender_timestamp=1700000000,
)
assert result is not None
assert result.id == 42
assert result.type == "CHAN"
assert result.conversation_key == "ABCD1234"
assert result.text == "Hello world"
assert result.acked == 1
@pytest.mark.asyncio
async def test_get_by_content_returns_none_when_not_found(self):
"""Returns None when no message matches."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value=None)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_by_content(
msg_type="CHAN",
conversation_key="NONEXISTENT",
text="Not found",
sender_timestamp=1700000000,
)
assert result is None
@pytest.mark.asyncio
async def test_get_by_content_handles_null_sender_timestamp(self):
"""Handles messages with NULL sender_timestamp correctly."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(
return_value={
"id": 43,
"type": "PRIV",
"conversation_key": "abc123",
"text": "Test message",
"sender_timestamp": None,
"received_at": 1700000001,
"paths": None,
"txt_type": 0,
"signature": None,
"outgoing": 1,
"acked": 0,
}
)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_by_content(
msg_type="PRIV",
conversation_key="abc123",
text="Test message",
sender_timestamp=None,
)
assert result is not None
assert result.sender_timestamp is None
assert result.outgoing is True
@pytest.mark.asyncio
async def test_get_by_content_parses_paths_correctly(self):
"""Parses paths JSON into MessagePath objects."""
paths_json = json.dumps(
[
{"path": "1A2B", "received_at": 1700000000},
{"path": "3C4D", "received_at": 1700000001},
]
)
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(
return_value={
"id": 44,
"type": "CHAN",
"conversation_key": "ABCD1234",
"text": "Multi-path message",
"sender_timestamp": 1700000000,
"received_at": 1700000000,
"paths": paths_json,
"txt_type": 0,
"signature": None,
"outgoing": 0,
"acked": 2,
}
)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_by_content(
msg_type="CHAN",
conversation_key="ABCD1234",
text="Multi-path message",
sender_timestamp=1700000000,
)
assert result is not None
assert result.paths is not None
assert len(result.paths) == 2
assert result.paths[0].path == "1A2B"
assert result.paths[1].path == "3C4D"
@pytest.mark.asyncio
async def test_get_by_content_handles_corrupted_paths_json(self):
"""Handles corrupted paths JSON gracefully."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(
return_value={
"id": 45,
"type": "CHAN",
"conversation_key": "ABCD1234",
"text": "Corrupted paths",
"sender_timestamp": 1700000000,
"received_at": 1700000000,
"paths": "not valid json {",
"txt_type": 0,
"signature": None,
"outgoing": 0,
"acked": 0,
}
)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_by_content(
msg_type="CHAN",
conversation_key="ABCD1234",
text="Corrupted paths",
sender_timestamp=1700000000,
)
# Should return message with paths=None instead of raising
assert result is not None
assert result.paths is None
class TestMessageRepositoryGetAckCount:
"""Test MessageRepository.get_ack_count method."""
@pytest.mark.asyncio
async def test_get_ack_count_returns_count(self):
"""Returns ack count for existing message."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"acked": 3})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_ack_count(message_id=42)
assert result == 3
@pytest.mark.asyncio
async def test_get_ack_count_returns_zero_for_nonexistent(self):
"""Returns 0 for nonexistent message."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value=None)
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_ack_count(message_id=999)
assert result == 0
@pytest.mark.asyncio
async def test_get_ack_count_returns_zero_for_unacked(self):
"""Returns 0 for message with no acks."""
mock_conn = AsyncMock()
mock_cursor = AsyncMock()
mock_cursor.fetchone = AsyncMock(return_value={"acked": 0})
mock_conn.execute = AsyncMock(return_value=mock_cursor)
mock_db = MagicMock()
mock_db.conn = mock_conn
with patch("app.repository.db", mock_db):
from app.repository import MessageRepository
result = await MessageRepository.get_ack_count(message_id=42)
assert result == 0