Merge branch 'main' into feature/add-docker-compose

This commit is contained in:
Jack Kingsman
2026-02-22 13:43:15 -08:00
committed by GitHub
24 changed files with 1717 additions and 251 deletions

View File

@@ -191,7 +191,6 @@ npm run build # build the frontend
| `MESHCORE_BLE_PIN` | | BLE PIN (required when BLE address is set) |
| `MESHCORE_LOG_LEVEL` | INFO | DEBUG, INFO, WARNING, ERROR |
| `MESHCORE_DATABASE_PATH` | data/meshcore.db | SQLite database path |
| `MESHCORE_MAX_RADIO_CONTACTS` | 200 | Max recent contacts to keep on radio for DM ACKs |
Only one transport may be active at a time. If multiple are set, the server will refuse to start.

View File

@@ -41,12 +41,13 @@ CREATE TABLE IF NOT EXISTS messages (
txt_type INTEGER DEFAULT 0,
signature TEXT,
outgoing INTEGER DEFAULT 0,
acked INTEGER DEFAULT 0,
acked INTEGER DEFAULT 0
-- Deduplication: identical text + timestamp in the same conversation is treated as a
-- mesh echo/repeat. Second-precision timestamps mean two intentional identical messages
-- within the same second would collide, but this is not feasible in practice — LoRa
-- transmission takes several seconds per message, and the UI clears the input on send.
UNIQUE(type, conversation_key, text, sender_timestamp)
-- Enforced via idx_messages_dedup_null_safe (unique index) rather than a table constraint
-- to avoid the storage overhead of SQLite's autoindex duplicating every message text.
);
CREATE TABLE IF NOT EXISTS raw_packets (
@@ -60,6 +61,8 @@ CREATE TABLE IF NOT EXISTS raw_packets (
CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(type, conversation_key);
CREATE INDEX IF NOT EXISTS idx_messages_received ON messages(received_at);
CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_dedup_null_safe
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0));
CREATE INDEX IF NOT EXISTS idx_raw_packets_message_id ON raw_packets(message_id);
CREATE UNIQUE INDEX IF NOT EXISTS idx_raw_packets_payload_hash ON raw_packets(payload_hash);
CREATE INDEX IF NOT EXISTS idx_contacts_on_radio ON contacts(on_radio);
@@ -76,6 +79,17 @@ class Database:
Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
self._connection = await aiosqlite.connect(self.db_path)
self._connection.row_factory = aiosqlite.Row
# WAL mode: faster writes, concurrent readers during writes, no journal file churn.
# Persists in the DB file but we set it explicitly on every connection.
await self._connection.execute("PRAGMA journal_mode = WAL")
# Incremental auto-vacuum: freed pages are reclaimable via
# PRAGMA incremental_vacuum without a full VACUUM. Must be set before
# the first table is created (for new databases); for existing databases
# migration 20 handles the one-time VACUUM to restructure the file.
await self._connection.execute("PRAGMA auto_vacuum = INCREMENTAL")
await self._connection.executescript(SCHEMA)
await self._connection.commit()
logger.debug("Database schema initialized")

View File

@@ -1,13 +1,27 @@
import logging
from pathlib import Path
from fastapi import FastAPI, HTTPException
from fastapi.responses import FileResponse
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
logger = logging.getLogger(__name__)
def _resolve_request_origin(request: Request) -> str:
"""Resolve the external origin, honoring common reverse-proxy headers."""
forwarded_proto = request.headers.get("x-forwarded-proto")
forwarded_host = request.headers.get("x-forwarded-host")
if forwarded_proto and forwarded_host:
proto = forwarded_proto.split(",")[0].strip()
host = forwarded_host.split(",")[0].strip()
if proto and host:
return f"{proto}://{host}"
return str(request.base_url).rstrip("/")
def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
"""Register frontend static file routes if a built frontend is available.
@@ -55,6 +69,41 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool:
"""Serve the frontend index.html."""
return FileResponse(index_file)
@app.get("/site.webmanifest")
async def serve_webmanifest(request: Request):
"""Serve a dynamic web manifest using the active request origin."""
origin = _resolve_request_origin(request)
manifest = {
"name": "RemoteTerm for MeshCore",
"short_name": "RemoteTerm",
"id": f"{origin}/",
"start_url": f"{origin}/",
"scope": f"{origin}/",
"display": "standalone",
"display_override": ["window-controls-overlay", "standalone", "fullscreen"],
"theme_color": "#111419",
"background_color": "#111419",
"icons": [
{
"src": f"{origin}/web-app-manifest-192x192.png",
"sizes": "192x192",
"type": "image/png",
"purpose": "maskable",
},
{
"src": f"{origin}/web-app-manifest-512x512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "maskable",
},
],
}
return JSONResponse(
manifest,
media_type="application/manifest+json",
headers={"Cache-Control": "no-store"},
)
@app.get("/{path:path}")
async def serve_frontend(path: str):
"""Serve frontend files, falling back to index.html for SPA routing."""

View File

@@ -156,6 +156,27 @@ async def run_migrations(conn: aiosqlite.Connection) -> int:
await set_version(conn, 17)
applied += 1
# Migration 18: Drop UNIQUE(data) constraint on raw_packets (redundant with payload_hash)
if version < 18:
logger.info("Applying migration 18: drop raw_packets UNIQUE(data) constraint")
await _migrate_018_drop_raw_packets_data_unique(conn)
await set_version(conn, 18)
applied += 1
# Migration 19: Drop UNIQUE constraint on messages (redundant with dedup_null_safe index)
if version < 19:
logger.info("Applying migration 19: drop messages UNIQUE constraint")
await _migrate_019_drop_messages_unique_constraint(conn)
await set_version(conn, 19)
applied += 1
# Migration 20: Enable WAL journal mode and incremental auto-vacuum
if version < 20:
logger.info("Applying migration 20: enable WAL mode and incremental auto-vacuum")
await _migrate_020_enable_wal_and_auto_vacuum(conn)
await set_version(conn, 20)
applied += 1
if applied > 0:
logger.info(
"Applied %d migration(s), schema now at version %d", applied, await get_version(conn)
@@ -1054,3 +1075,180 @@ async def _migrate_017_drop_experimental_channel_double_send(conn: aiosqlite.Con
raise
await conn.commit()
async def _migrate_018_drop_raw_packets_data_unique(conn: aiosqlite.Connection) -> None:
"""
Drop the UNIQUE constraint on raw_packets.data via table rebuild.
This constraint creates a large autoindex (~30 MB on a 340K-row database) that
stores a complete copy of every raw packet BLOB in a B-tree. Deduplication is
already handled by the unique index on payload_hash, making the data UNIQUE
constraint pure storage overhead.
Requires table recreation since SQLite doesn't support DROP CONSTRAINT.
"""
# Check if the autoindex exists (indicates UNIQUE constraint on data)
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE type='index' "
"AND name='sqlite_autoindex_raw_packets_1'"
)
if not await cursor.fetchone():
logger.debug("raw_packets.data UNIQUE constraint already absent, skipping rebuild")
await conn.commit()
return
logger.info("Rebuilding raw_packets table to remove UNIQUE(data) constraint...")
# Get current columns from the existing table
cursor = await conn.execute("PRAGMA table_info(raw_packets)")
old_cols = {col[1] for col in await cursor.fetchall()}
# Target schema without UNIQUE on data
await conn.execute("""
CREATE TABLE raw_packets_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
message_id INTEGER,
payload_hash TEXT,
FOREIGN KEY (message_id) REFERENCES messages(id)
)
""")
# Copy only columns that exist in both old and new tables
new_cols = {"id", "timestamp", "data", "message_id", "payload_hash"}
copy_cols = ", ".join(sorted(c for c in new_cols if c in old_cols))
await conn.execute(
f"INSERT INTO raw_packets_new ({copy_cols}) SELECT {copy_cols} FROM raw_packets"
)
await conn.execute("DROP TABLE raw_packets")
await conn.execute("ALTER TABLE raw_packets_new RENAME TO raw_packets")
# Recreate indexes
await conn.execute(
"CREATE UNIQUE INDEX idx_raw_packets_payload_hash ON raw_packets(payload_hash)"
)
await conn.execute("CREATE INDEX idx_raw_packets_message_id ON raw_packets(message_id)")
await conn.commit()
logger.info("raw_packets table rebuilt without UNIQUE(data) constraint")
async def _migrate_019_drop_messages_unique_constraint(conn: aiosqlite.Connection) -> None:
"""
Drop the UNIQUE(type, conversation_key, text, sender_timestamp) constraint on messages.
This constraint creates a large autoindex (~13 MB on a 112K-row database) that
stores the full message text in a B-tree. The idx_messages_dedup_null_safe unique
index already provides identical dedup protection — no rows have NULL
sender_timestamp since migration 15 backfilled them all.
INSERT OR IGNORE still works correctly because it checks all unique constraints,
including unique indexes like idx_messages_dedup_null_safe.
Requires table recreation since SQLite doesn't support DROP CONSTRAINT.
"""
# Check if the autoindex exists (indicates UNIQUE constraint)
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE type='index' AND name='sqlite_autoindex_messages_1'"
)
if not await cursor.fetchone():
logger.debug("messages UNIQUE constraint already absent, skipping rebuild")
await conn.commit()
return
logger.info("Rebuilding messages table to remove UNIQUE constraint...")
# Get current columns from the existing table
cursor = await conn.execute("PRAGMA table_info(messages)")
old_cols = {col[1] for col in await cursor.fetchall()}
# Target schema without the UNIQUE table constraint
await conn.execute("""
CREATE TABLE messages_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
type TEXT NOT NULL,
conversation_key TEXT NOT NULL,
text TEXT NOT NULL,
sender_timestamp INTEGER,
received_at INTEGER NOT NULL,
txt_type INTEGER DEFAULT 0,
signature TEXT,
outgoing INTEGER DEFAULT 0,
acked INTEGER DEFAULT 0,
paths TEXT
)
""")
# Copy only columns that exist in both old and new tables
new_cols = {
"id",
"type",
"conversation_key",
"text",
"sender_timestamp",
"received_at",
"txt_type",
"signature",
"outgoing",
"acked",
"paths",
}
copy_cols = ", ".join(sorted(c for c in new_cols if c in old_cols))
await conn.execute(f"INSERT INTO messages_new ({copy_cols}) SELECT {copy_cols} FROM messages")
await conn.execute("DROP TABLE messages")
await conn.execute("ALTER TABLE messages_new RENAME TO messages")
# Recreate indexes
await conn.execute("CREATE INDEX idx_messages_conversation ON messages(type, conversation_key)")
await conn.execute("CREATE INDEX idx_messages_received ON messages(received_at)")
await conn.execute(
"""CREATE UNIQUE INDEX idx_messages_dedup_null_safe
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))"""
)
await conn.commit()
logger.info("messages table rebuilt without UNIQUE constraint")
async def _migrate_020_enable_wal_and_auto_vacuum(conn: aiosqlite.Connection) -> None:
"""
Enable WAL journal mode and incremental auto-vacuum.
WAL (Write-Ahead Logging):
- Faster writes: appends to a WAL file instead of rewriting the main DB
- Concurrent reads during writes (readers don't block writers)
- No journal file create/delete churn on every commit
Incremental auto-vacuum:
- Pages freed by DELETE become reclaimable without a full VACUUM
- Call PRAGMA incremental_vacuum to reclaim on demand
- Less overhead than FULL auto-vacuum (which reorganizes on every commit)
auto_vacuum mode change requires a VACUUM to restructure the file.
The VACUUM is performed before switching to WAL so it runs under the
current journal mode; WAL is then set as the final step.
"""
# Check current auto_vacuum mode
cursor = await conn.execute("PRAGMA auto_vacuum")
row = await cursor.fetchone()
current_auto_vacuum = row[0] if row else 0
if current_auto_vacuum != 2: # 2 = INCREMENTAL
logger.info("Switching auto_vacuum to INCREMENTAL (requires VACUUM)...")
await conn.execute("PRAGMA auto_vacuum = INCREMENTAL")
await conn.execute("VACUUM")
logger.info("VACUUM complete, auto_vacuum set to INCREMENTAL")
else:
logger.debug("auto_vacuum already INCREMENTAL, skipping VACUUM")
# Enable WAL mode (idempotent — returns current mode)
cursor = await conn.execute("PRAGMA journal_mode = WAL")
row = await cursor.fetchone()
mode = row[0] if row else "unknown"
logger.info("Journal mode set to %s", mode)
await conn.commit()

View File

@@ -801,6 +801,13 @@ class RawPacketRepository:
await db.conn.commit()
return cursor.rowcount
@staticmethod
async def purge_linked_to_messages() -> int:
"""Delete raw packets that are already linked to a stored message."""
cursor = await db.conn.execute("DELETE FROM raw_packets WHERE message_id IS NOT NULL")
await db.conn.commit()
return cursor.rowcount
@staticmethod
async def get_undecrypted_text_messages() -> list[tuple[int, bytes, int]]:
"""Get all undecrypted TEXT_MESSAGE packets as (id, data, timestamp) tuples.

View File

@@ -306,10 +306,17 @@ RESEND_WINDOW_SECONDS = 30
@router.post("/channel/{message_id}/resend")
async def resend_channel_message(message_id: int) -> dict:
"""Resend a channel message within 30 seconds of original send.
async def resend_channel_message(
message_id: int,
new_timestamp: bool = Query(default=False),
) -> dict:
"""Resend a channel message.
Performs a byte-perfect resend using the same timestamp bytes as the original.
When new_timestamp=False (default): byte-perfect resend using the original timestamp.
Only allowed within 30 seconds of the original send.
When new_timestamp=True: resend with a fresh timestamp so repeaters treat it as a
new packet. Creates a new message row in the database. No time window restriction.
"""
mc = require_connected()
@@ -328,16 +335,22 @@ async def resend_channel_message(message_id: int) -> dict:
if msg.sender_timestamp is None:
raise HTTPException(status_code=400, detail="Message has no timestamp")
elapsed = int(time.time()) - msg.sender_timestamp
if elapsed > RESEND_WINDOW_SECONDS:
raise HTTPException(status_code=400, detail="Resend window has expired (30 seconds)")
# Byte-perfect resend enforces the 30s window; new-timestamp resend does not
if not new_timestamp:
elapsed = int(time.time()) - msg.sender_timestamp
if elapsed > RESEND_WINDOW_SECONDS:
raise HTTPException(status_code=400, detail="Resend window has expired (30 seconds)")
db_channel = await ChannelRepository.get_by_key(msg.conversation_key)
if not db_channel:
raise HTTPException(status_code=404, detail=f"Channel {msg.conversation_key} not found")
# Reconstruct timestamp bytes
timestamp_bytes = msg.sender_timestamp.to_bytes(4, "little")
# Choose timestamp: original for byte-perfect, fresh for new-timestamp
if new_timestamp:
now = int(time.time())
timestamp_bytes = now.to_bytes(4, "little")
else:
timestamp_bytes = msg.sender_timestamp.to_bytes(4, "little")
# Strip sender prefix: DB stores "RadioName: message" but radio needs "message"
radio_name = mc.self_info.get("name", "") if mc.self_info else ""
@@ -374,5 +387,47 @@ async def resend_channel_message(message_id: int) -> dict:
status_code=500, detail=f"Failed to resend message: {result.payload}"
)
# For new-timestamp resend, create a new message row and broadcast it
if new_timestamp:
new_msg_id = await MessageRepository.create(
msg_type="CHAN",
text=msg.text,
conversation_key=msg.conversation_key,
sender_timestamp=now,
received_at=now,
outgoing=True,
)
if new_msg_id is None:
# Timestamp-second collision (same text+channel within the same second).
# The radio already transmitted, so log and return the original ID rather
# than surfacing a 500 for a message that was successfully sent over the air.
logger.warning(
"Duplicate timestamp collision resending message %d — radio sent but DB row not created",
message_id,
)
return {"status": "ok", "message_id": message_id}
broadcast_event(
"message",
Message(
id=new_msg_id,
type="CHAN",
conversation_key=msg.conversation_key,
text=msg.text,
sender_timestamp=now,
received_at=now,
outgoing=True,
acked=0,
).model_dump(),
)
logger.info(
"Resent channel message %d as new message %d to %s",
message_id,
new_msg_id,
db_channel.name,
)
return {"status": "ok", "message_id": new_msg_id}
logger.info("Resent channel message %d to %s", message_id, db_channel.name)
return {"status": "ok", "message_id": message_id}

View File

@@ -236,8 +236,12 @@ async def decrypt_historical_packets(
class MaintenanceRequest(BaseModel):
prune_undecrypted_days: int = Field(
ge=1, description="Delete undecrypted packets older than this many days"
prune_undecrypted_days: int | None = Field(
default=None, ge=1, description="Delete undecrypted packets older than this many days"
)
purge_linked_raw_packets: bool = Field(
default=False,
description="Delete raw packets already linked to a stored message",
)
@@ -249,18 +253,30 @@ class MaintenanceResult(BaseModel):
@router.post("/maintenance", response_model=MaintenanceResult)
async def run_maintenance(request: MaintenanceRequest) -> MaintenanceResult:
"""
Clean up old undecrypted packets and reclaim disk space.
Run packet maintenance tasks and reclaim disk space.
- Deletes undecrypted packets older than the specified number of days
- Optionally deletes undecrypted packets older than the specified number of days
- Optionally deletes raw packets already linked to stored messages
- Runs VACUUM to reclaim disk space
"""
logger.info(
"Running maintenance: pruning packets older than %d days", request.prune_undecrypted_days
)
deleted = 0
# Prune old undecrypted packets
deleted = await RawPacketRepository.prune_old_undecrypted(request.prune_undecrypted_days)
logger.info("Deleted %d old undecrypted packets", deleted)
if request.prune_undecrypted_days is not None:
logger.info(
"Running maintenance: pruning undecrypted packets older than %d days",
request.prune_undecrypted_days,
)
pruned_undecrypted = await RawPacketRepository.prune_old_undecrypted(
request.prune_undecrypted_days
)
deleted += pruned_undecrypted
logger.info("Deleted %d old undecrypted packets", pruned_undecrypted)
if request.purge_linked_raw_packets:
logger.info("Running maintenance: purging raw packets linked to stored messages")
purged_linked = await RawPacketRepository.purge_linked_to_messages()
deleted += purged_linked
logger.info("Deleted %d linked raw packets", purged_linked)
# Run VACUUM to reclaim space on a dedicated connection
async with aiosqlite.connect(db.db_path) as vacuum_conn:

View File

@@ -1,21 +0,0 @@
{
"name": "RemoteTerm for MeshCore",
"short_name": "RemoteTerm",
"icons": [
{
"src": "/web-app-manifest-192x192.png",
"sizes": "192x192",
"type": "image/png",
"purpose": "maskable"
},
{
"src": "/web-app-manifest-512x512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "maskable"
}
],
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone"
}

View File

@@ -371,16 +371,21 @@ export function App() {
);
// Handle resend channel message
const handleResendChannelMessage = useCallback(async (messageId: number) => {
try {
await api.resendChannelMessage(messageId);
toast.success('Message resent');
} catch (err) {
toast.error('Failed to resend', {
description: err instanceof Error ? err.message : 'Unknown error',
});
}
}, []);
const handleResendChannelMessage = useCallback(
async (messageId: number, newTimestamp?: boolean) => {
try {
// New-timestamp resend creates a new message; the backend broadcast_event
// will add it to the conversation via WebSocket.
await api.resendChannelMessage(messageId, newTimestamp);
toast.success(newTimestamp ? 'Message resent with new timestamp' : 'Message resent');
} catch (err) {
toast.error('Failed to resend', {
description: err instanceof Error ? err.message : 'Unknown error',
});
}
},
[]
);
// Handle sender click to add mention
const handleSenderClick = useCallback((sender: string) => {

View File

@@ -167,10 +167,11 @@ export const api = {
method: 'POST',
body: JSON.stringify({ channel_key: channelKey, text }),
}),
resendChannelMessage: (messageId: number) =>
fetchJson<{ status: string; message_id: number }>(`/messages/channel/${messageId}/resend`, {
method: 'POST',
}),
resendChannelMessage: (messageId: number, newTimestamp?: boolean) =>
fetchJson<{ status: string; message_id: number }>(
`/messages/channel/${messageId}/resend${newTimestamp ? '?new_timestamp=true' : ''}`,
{ method: 'POST' }
),
// Packets
getUndecryptedPacketCount: () => fetchJson<{ count: number }>('/packets/undecrypted/count'),
@@ -183,10 +184,17 @@ export const api = {
method: 'POST',
body: JSON.stringify(params),
}),
runMaintenance: (pruneUndecryptedDays: number) =>
runMaintenance: (options: { pruneUndecryptedDays?: number; purgeLinkedRawPackets?: boolean }) =>
fetchJson<MaintenanceResult>('/packets/maintenance', {
method: 'POST',
body: JSON.stringify({ prune_undecrypted_days: pruneUndecryptedDays }),
body: JSON.stringify({
...(options.pruneUndecryptedDays !== undefined && {
prune_undecrypted_days: options.pruneUndecryptedDays,
}),
...(options.purgeLinkedRawPackets !== undefined && {
purge_linked_raw_packets: options.purgeLinkedRawPackets,
}),
}),
}),
// Read State

View File

@@ -166,19 +166,26 @@ export const MessageInput = forwardRef<MessageInputHandle, MessageInputProps>(fu
// For repeater mode, always allow submit (empty = guest login)
const canSubmit = isRepeaterMode ? true : text.trim().length > 0;
// Show character counter for messages (not repeater mode or raw)
// Show counter for messages (not repeater mode or raw).
// Desktop: always visible. Mobile: only show count after 100 characters.
const showCharCounter = !isRepeaterMode && limits !== null;
const showMobileCounterValue = text.length > 100;
return (
<form
className="px-4 py-2.5 border-t border-border flex flex-col gap-1"
onSubmit={handleSubmit}
autoComplete="off"
>
<div className="flex gap-2">
<Input
ref={inputRef}
type={isRepeaterMode ? 'password' : 'text'}
autoComplete={isRepeaterMode ? 'off' : undefined}
autoComplete="off"
name="chat-message-input"
data-lpignore="true"
data-1p-ignore="true"
data-bwignore="true"
value={text}
onChange={(e) => setText(e.target.value)}
onKeyDown={handleKeyDown}
@@ -206,25 +213,53 @@ export const MessageInput = forwardRef<MessageInputHandle, MessageInputProps>(fu
</Button>
</div>
{showCharCounter && (
<div className="flex items-center justify-end gap-2 text-xs">
<span
className={cn(
'tabular-nums',
limitState === 'error' || limitState === 'danger'
? 'text-red-500 font-medium'
: limitState === 'warning'
? 'text-yellow-500'
: 'text-muted-foreground'
)}
>
{textByteLen}/{limits!.hardLimit}b{remaining < 0 && ` (${remaining})`}
</span>
{warningMessage && (
<span className={cn(limitState === 'error' ? 'text-red-500' : 'text-yellow-500')}>
{warningMessage}
<>
<div className="hidden sm:flex items-center justify-end gap-2 text-xs">
<span
className={cn(
'tabular-nums',
limitState === 'error' || limitState === 'danger'
? 'text-red-500 font-medium'
: limitState === 'warning'
? 'text-yellow-500'
: 'text-muted-foreground'
)}
>
{textByteLen}/{limits!.hardLimit}
{remaining < 0 && ` (${remaining})`}
</span>
{warningMessage && (
<span className={cn(limitState === 'error' ? 'text-red-500' : 'text-yellow-500')}>
{warningMessage}
</span>
)}
</div>
{(showMobileCounterValue || warningMessage) && (
<div className="flex sm:hidden items-center justify-end gap-2 text-xs">
{showMobileCounterValue && (
<span
className={cn(
'tabular-nums',
limitState === 'error' || limitState === 'danger'
? 'text-red-500 font-medium'
: limitState === 'warning'
? 'text-yellow-500'
: 'text-muted-foreground'
)}
>
{textByteLen}/{limits!.hardLimit}
{remaining < 0 && ` (${remaining})`}
</span>
)}
{warningMessage && (
<span className={cn(limitState === 'error' ? 'text-red-500' : 'text-yellow-500')}>
{warningMessage}
</span>
)}
</div>
)}
</div>
</>
)}
</form>
);

View File

@@ -23,7 +23,7 @@ interface MessageListProps {
hasOlderMessages?: boolean;
onSenderClick?: (sender: string) => void;
onLoadOlder?: () => void;
onResendChannelMessage?: (messageId: number) => void;
onResendChannelMessage?: (messageId: number, newTimestamp?: boolean) => void;
radioName?: string;
config?: RadioConfig | null;
}
@@ -156,12 +156,11 @@ export function MessageList({
const [selectedPath, setSelectedPath] = useState<{
paths: MessagePath[];
senderInfo: SenderInfo;
messageId?: number;
isOutgoingChan?: boolean;
} | null>(null);
const [resendableIds, setResendableIds] = useState<Set<number>>(new Set());
const resendTimersRef = useRef<Map<number, ReturnType<typeof setTimeout>>>(new Map());
const activeBurstsRef = useRef<Map<number, ReturnType<typeof setTimeout>[]>>(new Map());
const onResendRef = useRef(onResendChannelMessage);
onResendRef.current = onResendChannelMessage;
// Capture scroll state in the scroll handler BEFORE any state updates
const scrollStateRef = useRef({
@@ -262,17 +261,6 @@ export function MessageList({
};
}, [messages, onResendChannelMessage]);
// Clean up burst timers on unmount
useEffect(() => {
const bursts = activeBurstsRef.current;
return () => {
for (const timers of bursts.values()) {
for (const t of timers) clearTimeout(t);
}
bursts.clear();
};
}, []);
// Handle scroll - capture state and detect when user is near top/bottom
const handleScroll = useCallback(() => {
if (!listRef.current) return;
@@ -315,6 +303,21 @@ export function MessageList({
[messages]
);
// Sender info for outgoing messages (used by path modal on own messages)
const selfSenderInfo = useMemo<SenderInfo>(
() => ({
name: config?.name || 'Unknown',
publicKeyOrPrefix: config?.public_key || '',
lat: config?.lat ?? null,
lon: config?.lon ?? null,
}),
[config?.name, config?.public_key, config?.lat, config?.lon]
);
// Derive live so the byte-perfect button disables if the 30s window expires while modal is open
const isSelectedMessageResendable =
selectedPath?.messageId !== undefined && resendableIds.has(selectedPath.messageId);
// Look up contact by public key
const getContact = (conversationKey: string | null): Contact | null => {
if (!conversationKey) return null;
@@ -520,34 +523,6 @@ export function MessageList({
)}
</>
)}
{msg.outgoing && onResendChannelMessage && resendableIds.has(msg.id) && (
<button
className="text-muted-foreground hover:text-primary ml-1 text-xs cursor-pointer"
onClick={(e) => {
e.stopPropagation();
if (e.altKey) {
// Burst resend: 5 times, 2 seconds apart
if (activeBurstsRef.current.has(msg.id)) return;
onResendChannelMessage(msg.id); // first send (immediate)
const msgId = msg.id;
const timers: ReturnType<typeof setTimeout>[] = [];
for (let i = 1; i <= 4; i++) {
const timer = setTimeout(() => {
onResendRef.current?.(msgId);
if (i === 4) activeBurstsRef.current.delete(msgId);
}, i * 3000);
timers.push(timer);
}
activeBurstsRef.current.set(msgId, timers);
} else {
onResendChannelMessage(msg.id);
}
}}
title="Resend message"
>
</button>
)}
{msg.outgoing &&
(msg.acked > 0 ? (
msg.paths && msg.paths.length > 0 ? (
@@ -557,12 +532,9 @@ export function MessageList({
e.stopPropagation();
setSelectedPath({
paths: msg.paths!,
senderInfo: {
name: config?.name || 'Unknown',
publicKeyOrPrefix: config?.public_key || '',
lat: config?.lat ?? null,
lon: config?.lon ?? null,
},
senderInfo: selfSenderInfo,
messageId: msg.id,
isOutgoingChan: msg.type === 'CHAN' && !!onResendChannelMessage,
});
}}
title="View echo paths"
@@ -570,6 +542,23 @@ export function MessageList({
) : (
<span className="text-muted-foreground">{`${msg.acked > 1 ? msg.acked : ''}`}</span>
)
) : onResendChannelMessage && msg.type === 'CHAN' ? (
<span
className="text-muted-foreground cursor-pointer hover:text-primary"
onClick={(e) => {
e.stopPropagation();
setSelectedPath({
paths: [],
senderInfo: selfSenderInfo,
messageId: msg.id,
isOutgoingChan: true,
});
}}
title="Message status"
>
{' '}
?
</span>
) : (
<span className="text-muted-foreground" title="No repeats heard yet">
{' '}
@@ -616,6 +605,10 @@ export function MessageList({
senderInfo={selectedPath.senderInfo}
contacts={contacts}
config={config ?? null}
messageId={selectedPath.messageId}
isOutgoingChan={selectedPath.isOutgoingChan}
isResendable={isSelectedMessageResendable}
onResend={onResendChannelMessage}
/>
)}
</div>

View File

@@ -1,12 +1,5 @@
import type { Contact, RadioConfig, MessagePath } from '../types';
import {
Dialog,
DialogContent,
DialogHeader,
DialogTitle,
DialogDescription,
DialogFooter,
} from './ui/dialog';
import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogDescription } from './ui/dialog';
import { Button } from './ui/button';
import {
resolvePath,
@@ -28,14 +21,34 @@ interface PathModalProps {
senderInfo: SenderInfo;
contacts: Contact[];
config: RadioConfig | null;
messageId?: number;
isOutgoingChan?: boolean;
isResendable?: boolean;
onResend?: (messageId: number, newTimestamp?: boolean) => void;
}
export function PathModal({ open, onClose, paths, senderInfo, contacts, config }: PathModalProps) {
export function PathModal({
open,
onClose,
paths,
senderInfo,
contacts,
config,
messageId,
isOutgoingChan,
isResendable,
onResend,
}: PathModalProps) {
const hasResendActions = isOutgoingChan && messageId !== undefined && onResend;
const hasPaths = paths.length > 0;
// Resolve all paths
const resolvedPaths = paths.map((p) => ({
...p,
resolved: resolvePath(p.path, senderInfo, contacts, config),
}));
const resolvedPaths = hasPaths
? paths.map((p) => ({
...p,
resolved: resolvePath(p.path, senderInfo, contacts, config),
}))
: [];
const hasSinglePath = paths.length === 1;
@@ -43,9 +56,15 @@ export function PathModal({ open, onClose, paths, senderInfo, contacts, config }
<Dialog open={open} onOpenChange={(isOpen) => !isOpen && onClose()}>
<DialogContent className="max-w-md max-h-[80vh] flex flex-col">
<DialogHeader>
<DialogTitle>Message Path{!hasSinglePath && `s (${paths.length})`}</DialogTitle>
<DialogTitle>
{hasPaths
? `Message Path${!hasSinglePath ? `s (${paths.length})` : ''}`
: 'Message Status'}
</DialogTitle>
<DialogDescription>
{hasSinglePath ? (
{!hasPaths ? (
<>No echoes heard yet. Echoes appear when repeaters re-broadcast your message.</>
) : hasSinglePath ? (
<>
This shows <em>one route</em> that this message traveled through the mesh network.
Routers may be incorrectly identified due to prefix collisions between heard and
@@ -60,64 +79,104 @@ export function PathModal({ open, onClose, paths, senderInfo, contacts, config }
</DialogDescription>
</DialogHeader>
<div className="flex-1 overflow-y-auto py-2 space-y-4">
{/* Raw path summary */}
<div className="text-sm">
{paths.map((p, index) => {
const hops = parsePathHops(p.path);
const rawPath = hops.length > 0 ? hops.join('->') : 'direct';
return (
<div key={index}>
<span className="text-foreground/70 font-semibold">Path {index + 1}:</span>{' '}
<span className="font-mono text-muted-foreground">{rawPath}</span>
</div>
);
})}
</div>
{hasPaths && (
<div className="flex-1 overflow-y-auto py-2 space-y-4">
{/* Raw path summary */}
<div className="text-sm">
{paths.map((p, index) => {
const hops = parsePathHops(p.path);
const rawPath = hops.length > 0 ? hops.join('->') : 'direct';
return (
<div key={index}>
<span className="text-foreground/70 font-semibold">Path {index + 1}:</span>{' '}
<span className="font-mono text-muted-foreground">{rawPath}</span>
</div>
);
})}
</div>
{/* Straight-line distance (sender to receiver, same for all routes) */}
{resolvedPaths.length > 0 &&
isValidLocation(
resolvedPaths[0].resolved.sender.lat,
resolvedPaths[0].resolved.sender.lon
) &&
isValidLocation(
resolvedPaths[0].resolved.receiver.lat,
resolvedPaths[0].resolved.receiver.lon
) && (
<div className="text-sm pb-2 border-b border-border">
<span className="text-muted-foreground">Straight-line distance: </span>
<span className="font-medium">
{formatDistance(
calculateDistance(
resolvedPaths[0].resolved.sender.lat,
resolvedPaths[0].resolved.sender.lon,
resolvedPaths[0].resolved.receiver.lat,
resolvedPaths[0].resolved.receiver.lon
)!
)}
</span>
</div>
)}
{resolvedPaths.map((pathData, index) => (
<div key={index}>
{!hasSinglePath && (
<div className="text-sm text-foreground/70 font-semibold mb-2 pb-1 border-b border-border">
Path {index + 1}{' '}
<span className="font-normal text-muted-foreground">
received {formatTime(pathData.received_at)}
{/* Straight-line distance (sender to receiver, same for all routes) */}
{resolvedPaths.length > 0 &&
isValidLocation(
resolvedPaths[0].resolved.sender.lat,
resolvedPaths[0].resolved.sender.lon
) &&
isValidLocation(
resolvedPaths[0].resolved.receiver.lat,
resolvedPaths[0].resolved.receiver.lon
) && (
<div className="text-sm pb-2 border-b border-border">
<span className="text-muted-foreground">Straight-line distance: </span>
<span className="font-medium">
{formatDistance(
calculateDistance(
resolvedPaths[0].resolved.sender.lat,
resolvedPaths[0].resolved.sender.lon,
resolvedPaths[0].resolved.receiver.lat,
resolvedPaths[0].resolved.receiver.lon
)!
)}
</span>
</div>
)}
<PathVisualization resolved={pathData.resolved} senderInfo={senderInfo} />
</div>
))}
</div>
<DialogFooter>
<Button onClick={onClose}>Close</Button>
</DialogFooter>
{resolvedPaths.map((pathData, index) => (
<div key={index}>
{!hasSinglePath && (
<div className="text-sm text-foreground/70 font-semibold mb-2 pb-1 border-b border-border">
Path {index + 1}{' '}
<span className="font-normal text-muted-foreground">
received {formatTime(pathData.received_at)}
</span>
</div>
)}
<PathVisualization resolved={pathData.resolved} senderInfo={senderInfo} />
</div>
))}
</div>
)}
<div className="flex flex-col gap-2 pt-2">
{hasResendActions && (
<div className="flex gap-2">
{isResendable && (
<Button
variant="outline"
className="flex-1 min-w-0 h-auto py-2"
onClick={() => {
onResend(messageId);
onClose();
}}
>
<span className="flex flex-col items-center leading-tight">
<span> Resend</span>
<span className="text-[10px] font-normal opacity-80">
Only repeated by new repeaters
</span>
</span>
</Button>
)}
<Button
variant="destructive"
className="flex-1 min-w-0 h-auto py-2"
onClick={() => {
onResend(messageId, true);
onClose();
}}
>
<span className="flex flex-col items-center leading-tight">
<span> Resend as new</span>
<span className="text-[10px] font-normal opacity-80">
Will appear as duplicate to receivers
</span>
</span>
</Button>
</div>
)}
<Button variant="secondary" className="h-auto py-2" onClick={onClose}>
Close
</Button>
</div>
</DialogContent>
</Dialog>
);

View File

@@ -19,6 +19,11 @@ import { Separator } from './ui/separator';
import { toast } from './ui/sonner';
import { api } from '../api';
import { formatTime } from '../utils/messageParser';
import {
captureLastViewedConversationFromHash,
getReopenLastConversationEnabled,
setReopenLastConversationEnabled,
} from '../utils/lastViewedConversation';
// Radio presets for common configurations
interface RadioPreset {
@@ -140,7 +145,11 @@ export function SettingsModal(props: SettingsModalProps) {
// Database maintenance state
const [retentionDays, setRetentionDays] = useState('14');
const [cleaning, setCleaning] = useState(false);
const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false);
const [autoDecryptOnAdvert, setAutoDecryptOnAdvert] = useState(false);
const [reopenLastConversation, setReopenLastConversation] = useState(
getReopenLastConversationEnabled
);
// Advertisement interval state
const [advertInterval, setAdvertInterval] = useState('0');
@@ -222,6 +231,12 @@ export function SettingsModal(props: SettingsModalProps) {
}
}, [open, pageMode, onRefreshAppSettings]);
useEffect(() => {
if (open || pageMode) {
setReopenLastConversation(getReopenLastConversationEnabled());
}
}, [open, pageMode]);
useEffect(() => {
if (typeof window === 'undefined' || typeof window.matchMedia !== 'function') return;
@@ -495,7 +510,7 @@ export function SettingsModal(props: SettingsModalProps) {
setCleaning(true);
try {
const result = await api.runMaintenance(days);
const result = await api.runMaintenance({ pruneUndecryptedDays: days });
toast.success('Database cleanup complete', {
description: `Deleted ${result.packets_deleted} old packet${result.packets_deleted === 1 ? '' : 's'}`,
});
@@ -510,6 +525,25 @@ export function SettingsModal(props: SettingsModalProps) {
}
};
const handlePurgeDecryptedRawPackets = async () => {
setPurgingDecryptedRaw(true);
try {
const result = await api.runMaintenance({ purgeLinkedRawPackets: true });
toast.success('Decrypted raw packets purged', {
description: `Deleted ${result.packets_deleted} raw packet${result.packets_deleted === 1 ? '' : 's'}`,
});
await onHealthRefresh();
} catch (err) {
console.error('Failed to purge decrypted raw packets:', err);
toast.error('Failed to purge decrypted raw packets', {
description: err instanceof Error ? err.message : 'Unknown error',
});
} finally {
setPurgingDecryptedRaw(false);
}
};
const handleSaveDatabaseSettings = async () => {
setBusySection('database');
setSectionError(null);
@@ -529,6 +563,14 @@ export function SettingsModal(props: SettingsModalProps) {
}
};
const handleToggleReopenLastConversation = (enabled: boolean) => {
setReopenLastConversation(enabled);
setReopenLastConversationEnabled(enabled);
if (enabled) {
captureLastViewedConversationFromHash();
}
};
const handleSaveBotSettings = async () => {
setBusySection('bot');
setSectionError(null);
@@ -612,14 +654,14 @@ export function SettingsModal(props: SettingsModalProps) {
const shouldRenderSection = (section: SettingsSection) =>
!externalDesktopSidebarMode || desktopSection === section;
const sectionWrapperClass = 'border border-input rounded-md overflow-hidden';
const sectionWrapperClass = 'overflow-hidden';
const sectionContentClass = externalDesktopSidebarMode
? 'space-y-4 p-4 h-full overflow-y-auto'
? 'space-y-4 p-4'
: 'space-y-4 p-4 border-t border-input';
const settingsContainerClass = externalDesktopSidebarMode
? 'w-full h-full'
? 'w-full h-full overflow-y-auto'
: 'w-full h-full overflow-y-auto space-y-3';
const sectionButtonClasses =
@@ -996,15 +1038,17 @@ export function SettingsModal(props: SettingsModalProps) {
<Separator />
<div className="space-y-3">
<Label>Cleanup Old Packets</Label>
<Label>Delete Undecrypted Packets</Label>
<p className="text-xs text-muted-foreground">
Delete undecrypted packets older than the specified days. This helps manage
storage for packets that couldn't be decrypted (unknown channel keys).
Permanently deletes stored raw packets containing DMs and channel messages that
have not yet been decrypted. These packets are retained in case you later obtain
the correct key once deleted, these messages can never be recovered or
decrypted.
</p>
<div className="flex gap-2 items-end">
<div className="space-y-1">
<Label htmlFor="retention-days" className="text-xs">
Days to retain
Older than (days)
</Label>
<Input
id="retention-days"
@@ -1016,14 +1060,43 @@ export function SettingsModal(props: SettingsModalProps) {
className="w-24"
/>
</div>
<Button variant="outline" onClick={handleCleanup} disabled={cleaning}>
{cleaning ? 'Cleaning...' : 'Cleanup'}
<Button
variant="outline"
onClick={handleCleanup}
disabled={cleaning}
className="border-red-500/50 text-red-400 hover:bg-red-500/10"
>
{cleaning ? 'Deleting...' : 'Permanently Delete'}
</Button>
</div>
</div>
<Separator />
<div className="space-y-3">
<Label>Purge Archival Raw Packets</Label>
<p className="text-xs text-muted-foreground">
Deletes archival copies of raw packet bytes for messages that are already
decrypted and visible in your chat history.{' '}
<em className="text-muted-foreground/80">
This will not affect any displayed messages or app functionality.
</em>{' '}
The raw bytes are only useful for manual packet analysis.
</p>
<Button
variant="outline"
onClick={handlePurgeDecryptedRawPackets}
disabled={purgingDecryptedRaw}
className="w-full border-yellow-500/50 text-yellow-400 hover:bg-yellow-500/10"
>
{purgingDecryptedRaw
? 'Purging Archival Raw Packets...'
: 'Purge Archival Raw Packets'}
</Button>
</div>
<Separator />
<div className="space-y-3">
<Label>DM Decryption</Label>
<label className="flex items-center gap-3 cursor-pointer">
@@ -1044,6 +1117,24 @@ export function SettingsModal(props: SettingsModalProps) {
</p>
</div>
<Separator />
<div className="space-y-3">
<Label>Interface</Label>
<label className="flex items-center gap-3 cursor-pointer">
<input
type="checkbox"
checked={reopenLastConversation}
onChange={(e) => handleToggleReopenLastConversation(e.target.checked)}
className="w-4 h-4 rounded border-input accent-primary"
/>
<span className="text-sm">Reopen to last viewed channel/conversation</span>
</label>
<p className="text-xs text-muted-foreground">
This applies only to this device/browser. It does not sync to server settings.
</p>
</div>
{getSectionError('database') && (
<div className="text-sm text-destructive">{getSectionError('database')}</div>
)}

View File

@@ -19,7 +19,7 @@ export const SETTINGS_SECTION_LABELS: Record<SettingsSection, string> = {
radio: '📻 Radio',
identity: '🪪 Identity',
connectivity: '📡 Connectivity',
database: '🗄️ Database',
database: '🗄️ Database & Interface',
bot: '🤖 Bot',
statistics: '📊 Statistics',
};

View File

@@ -5,6 +5,11 @@ import {
resolveChannelFromHashToken,
resolveContactFromHashToken,
} from '../utils/urlHash';
import {
getLastViewedConversation,
getReopenLastConversationEnabled,
saveLastViewedConversation,
} from '../utils/lastViewedConversation';
import { getContactDisplayName } from '../utils/pubkey';
import type { Channel, Contact, Conversation } from '../types';
@@ -27,8 +32,26 @@ export function useConversationRouter({
pendingDeleteFallbackRef,
hasSetDefaultConversation,
}: UseConversationRouterArgs) {
const [activeConversation, setActiveConversation] = useState<Conversation | null>(null);
const [activeConversation, setActiveConversationState] = useState<Conversation | null>(null);
const activeConversationRef = useRef<Conversation | null>(null);
const hashSyncEnabledRef = useRef(
typeof window !== 'undefined' ? window.location.hash.length > 0 : false
);
const setActiveConversation = useCallback((conv: Conversation | null) => {
hashSyncEnabledRef.current = true;
setActiveConversationState(conv);
}, []);
const getPublicChannelConversation = useCallback((): Conversation | null => {
const publicChannel = channels.find((c) => c.name === 'Public');
if (!publicChannel) return null;
return {
type: 'channel',
id: publicChannel.key,
name: publicChannel.name,
};
}, [channels]);
// Phase 1: Set initial conversation from URL hash or default to Public channel
// Only needs channels (fast path) - doesn't wait for contacts
@@ -40,12 +63,12 @@ export function useConversationRouter({
// Handle non-data views immediately
if (hashConv?.type === 'raw') {
setActiveConversation({ type: 'raw', id: 'raw', name: 'Raw Packet Feed' });
setActiveConversationState({ type: 'raw', id: 'raw', name: 'Raw Packet Feed' });
hasSetDefaultConversation.current = true;
return;
}
if (hashConv?.type === 'map') {
setActiveConversation({
setActiveConversationState({
type: 'map',
id: 'map',
name: 'Node Map',
@@ -55,7 +78,7 @@ export function useConversationRouter({
return;
}
if (hashConv?.type === 'visualizer') {
setActiveConversation({ type: 'visualizer', id: 'visualizer', name: 'Mesh Visualizer' });
setActiveConversationState({ type: 'visualizer', id: 'visualizer', name: 'Mesh Visualizer' });
hasSetDefaultConversation.current = true;
return;
}
@@ -64,7 +87,7 @@ export function useConversationRouter({
if (hashConv?.type === 'channel') {
const channel = resolveChannelFromHashToken(hashConv.name, channels);
if (channel) {
setActiveConversation({ type: 'channel', id: channel.key, name: channel.name });
setActiveConversationState({ type: 'channel', id: channel.key, name: channel.name });
hasSetDefaultConversation.current = true;
return;
}
@@ -73,17 +96,42 @@ export function useConversationRouter({
// Contact hash — wait for phase 2
if (hashConv?.type === 'contact') return;
// No hash: optionally restore last-viewed conversation if enabled on this device.
if (!hashConv && getReopenLastConversationEnabled()) {
const lastViewed = getLastViewedConversation();
if (
lastViewed &&
(lastViewed.type === 'raw' || lastViewed.type === 'map' || lastViewed.type === 'visualizer')
) {
setActiveConversationState(lastViewed);
hasSetDefaultConversation.current = true;
return;
}
if (lastViewed?.type === 'channel') {
const channel =
channels.find((c) => c.key.toLowerCase() === lastViewed.id.toLowerCase()) ||
resolveChannelFromHashToken(lastViewed.id, channels);
if (channel) {
setActiveConversationState({
type: 'channel',
id: channel.key,
name: channel.name,
});
hasSetDefaultConversation.current = true;
return;
}
}
// Last-viewed contact resolution waits for contacts in phase 2.
if (lastViewed?.type === 'contact') return;
}
// No hash or unresolvable — default to Public
const publicChannel = channels.find((c) => c.name === 'Public');
if (publicChannel) {
setActiveConversation({
type: 'channel',
id: publicChannel.key,
name: publicChannel.name,
});
const publicConversation = getPublicChannelConversation();
if (publicConversation) {
setActiveConversationState(publicConversation);
hasSetDefaultConversation.current = true;
}
}, [channels, activeConversation]);
}, [channels, activeConversation, getPublicChannelConversation, hasSetDefaultConversation]);
// Phase 2: Resolve contact hash (only if phase 1 didn't set a conversation)
useEffect(() => {
@@ -95,7 +143,7 @@ export function useConversationRouter({
const contact = resolveContactFromHashToken(hashConv.name, contacts);
if (contact) {
setActiveConversation({
setActiveConversationState({
type: 'contact',
id: contact.public_key,
name: getContactDisplayName(contact.name, contact.public_key),
@@ -105,25 +153,58 @@ export function useConversationRouter({
}
// Contact hash didn't match — fall back to Public if channels loaded.
if (channels.length > 0) {
const publicChannel = channels.find((c) => c.name === 'Public');
if (publicChannel) {
setActiveConversation({
type: 'channel',
id: publicChannel.key,
name: publicChannel.name,
});
hasSetDefaultConversation.current = true;
}
const publicConversation = getPublicChannelConversation();
if (publicConversation) {
setActiveConversationState(publicConversation);
hasSetDefaultConversation.current = true;
}
return;
}
// No hash: optionally restore a last-viewed contact once contacts are loaded.
if (!hashConv && getReopenLastConversationEnabled()) {
const lastViewed = getLastViewedConversation();
if (lastViewed?.type !== 'contact') return;
if (!contactsLoaded) return;
const contact =
contacts.find((item) => item.public_key.toLowerCase() === lastViewed.id.toLowerCase()) ||
resolveContactFromHashToken(lastViewed.id, contacts);
if (contact) {
setActiveConversationState({
type: 'contact',
id: contact.public_key,
name: getContactDisplayName(contact.name, contact.public_key),
});
hasSetDefaultConversation.current = true;
return;
}
const publicConversation = getPublicChannelConversation();
if (publicConversation) {
setActiveConversationState(publicConversation);
hasSetDefaultConversation.current = true;
}
}
}, [contacts, channels, activeConversation, contactsLoaded]);
}, [
contacts,
channels,
activeConversation,
contactsLoaded,
getPublicChannelConversation,
hasSetDefaultConversation,
]);
// Keep ref in sync and update URL hash
useEffect(() => {
activeConversationRef.current = activeConversation;
if (activeConversation) {
updateUrlHash(activeConversation);
if (hashSyncEnabledRef.current) {
updateUrlHash(activeConversation);
}
if (getReopenLastConversationEnabled()) {
saveLastViewedConversation(activeConversation);
}
}
}, [activeConversation]);
@@ -142,12 +223,12 @@ export function useConversationRouter({
hasSetDefaultConversation.current = true;
pendingDeleteFallbackRef.current = false;
setActiveConversation({
setActiveConversationState({
type: 'channel',
id: publicChannel.key,
name: publicChannel.name,
});
}, [activeConversation, channels]);
}, [activeConversation, channels, hasSetDefaultConversation, pendingDeleteFallbackRef]);
// Handle conversation selection (closes sidebar on mobile)
const handleSelectConversation = useCallback(
@@ -155,7 +236,7 @@ export function useConversationRouter({
setActiveConversation(conv);
setSidebarOpen(false);
},
[setSidebarOpen]
[setActiveConversation, setSidebarOpen]
);
return {

View File

@@ -137,6 +137,10 @@ vi.mock('../components/ui/sonner', () => ({
}));
import { App } from '../App';
import {
LAST_VIEWED_CONVERSATION_KEY,
REOPEN_LAST_CONVERSATION_KEY,
} from '../utils/lastViewedConversation';
const publicChannel = {
key: '8B3387E9C5CDEA6AC9E5EDBAA115CD72',
@@ -149,6 +153,7 @@ const publicChannel = {
describe('App startup hash resolution', () => {
beforeEach(() => {
vi.clearAllMocks();
localStorage.clear();
window.location.hash = `#contact/${'a'.repeat(64)}/Alice`;
mocks.api.getRadioConfig.mockResolvedValue({
@@ -178,6 +183,7 @@ describe('App startup hash resolution', () => {
afterEach(() => {
window.location.hash = '';
localStorage.clear();
});
it('falls back to Public when contact hash is unresolvable and contacts are empty', async () => {
@@ -189,4 +195,104 @@ describe('App startup hash resolution', () => {
}
});
});
it('restores last viewed channel when hash is empty and reopen preference is enabled', async () => {
const chatChannel = {
key: '11111111111111111111111111111111',
name: 'Ops',
is_hashtag: false,
on_radio: false,
last_read_at: null,
};
window.location.hash = '';
localStorage.setItem(REOPEN_LAST_CONVERSATION_KEY, '1');
localStorage.setItem(
LAST_VIEWED_CONVERSATION_KEY,
JSON.stringify({
type: 'channel',
id: chatChannel.key,
name: chatChannel.name,
})
);
mocks.api.getChannels.mockResolvedValue([publicChannel, chatChannel]);
render(<App />);
await waitFor(() => {
for (const node of screen.getAllByTestId('active-conversation')) {
expect(node).toHaveTextContent(`channel:${chatChannel.key}:${chatChannel.name}`);
}
});
expect(window.location.hash).toBe('');
});
it('uses Public channel when hash is empty and reopen preference is disabled', async () => {
const chatChannel = {
key: '11111111111111111111111111111111',
name: 'Ops',
is_hashtag: false,
on_radio: false,
last_read_at: null,
};
window.location.hash = '';
localStorage.setItem(
LAST_VIEWED_CONVERSATION_KEY,
JSON.stringify({
type: 'channel',
id: chatChannel.key,
name: chatChannel.name,
})
);
mocks.api.getChannels.mockResolvedValue([publicChannel, chatChannel]);
render(<App />);
await waitFor(() => {
for (const node of screen.getAllByTestId('active-conversation')) {
expect(node).toHaveTextContent(`channel:${publicChannel.key}:Public`);
}
});
expect(window.location.hash).toBe('');
});
it('restores last viewed contact from legacy name token when hash is empty and reopen is enabled', async () => {
const aliceContact = {
public_key: 'b'.repeat(64),
name: 'Alice',
type: 1,
flags: 0,
last_path: null,
last_path_len: -1,
last_advert: null,
lat: null,
lon: null,
last_seen: null,
on_radio: false,
last_contacted: null,
last_read_at: null,
};
window.location.hash = '';
localStorage.setItem(REOPEN_LAST_CONVERSATION_KEY, '1');
localStorage.setItem(
LAST_VIEWED_CONVERSATION_KEY,
JSON.stringify({
type: 'contact',
id: 'Alice',
name: 'Alice',
})
);
mocks.api.getContacts.mockResolvedValue([aliceContact]);
render(<App />);
await waitFor(() => {
for (const node of screen.getAllByTestId('active-conversation')) {
expect(node).toHaveTextContent(`contact:${aliceContact.public_key}:Alice`);
}
});
expect(window.location.hash).toBe('');
});
});

View File

@@ -11,6 +11,11 @@ import type {
StatisticsResponse,
} from '../types';
import type { SettingsSection } from '../components/SettingsModal';
import {
LAST_VIEWED_CONVERSATION_KEY,
REOPEN_LAST_CONVERSATION_KEY,
} from '../utils/lastViewedConversation';
import { api } from '../api';
const baseConfig: RadioConfig = {
public_key: 'aa'.repeat(32),
@@ -128,9 +133,16 @@ function openConnectivitySection() {
fireEvent.click(connectivityToggle);
}
function openDatabaseSection() {
const databaseToggle = screen.getByRole('button', { name: /Database/i });
fireEvent.click(databaseToggle);
}
describe('SettingsModal', () => {
afterEach(() => {
vi.restoreAllMocks();
localStorage.clear();
window.location.hash = '';
});
it('refreshes app settings when opened', async () => {
@@ -291,6 +303,41 @@ describe('SettingsModal', () => {
expect(onClose).not.toHaveBeenCalled();
});
it('stores and clears reopen-last-conversation preference locally', () => {
window.location.hash = '#raw';
renderModal();
openDatabaseSection();
const checkbox = screen.getByLabelText('Reopen to last viewed channel/conversation');
expect(checkbox).not.toBeChecked();
fireEvent.click(checkbox);
expect(localStorage.getItem(REOPEN_LAST_CONVERSATION_KEY)).toBe('1');
expect(localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY)).toContain('"type":"raw"');
fireEvent.click(checkbox);
expect(localStorage.getItem(REOPEN_LAST_CONVERSATION_KEY)).toBeNull();
expect(localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY)).toBeNull();
});
it('purges decrypted raw packets via maintenance endpoint action', async () => {
const runMaintenanceSpy = vi.spyOn(api, 'runMaintenance').mockResolvedValue({
packets_deleted: 12,
vacuumed: true,
});
renderModal();
openDatabaseSection();
fireEvent.click(screen.getByRole('button', { name: 'Purge Archival Raw Packets' }));
await waitFor(() => {
expect(runMaintenanceSpy).toHaveBeenCalledWith({ purgeLinkedRawPackets: true });
});
});
it('renders statistics section with fetched data', async () => {
const mockStats: StatisticsResponse = {
busiest_channels_24h: [

View File

@@ -0,0 +1,103 @@
import type { Conversation } from '../types';
import { parseHashConversation } from './urlHash';
export const REOPEN_LAST_CONVERSATION_KEY = 'remoteterm-reopen-last-conversation';
export const LAST_VIEWED_CONVERSATION_KEY = 'remoteterm-last-viewed-conversation';
const SUPPORTED_TYPES: Conversation['type'][] = ['contact', 'channel', 'raw', 'map', 'visualizer'];
function isSupportedType(value: unknown): value is Conversation['type'] {
return typeof value === 'string' && SUPPORTED_TYPES.includes(value as Conversation['type']);
}
export function getReopenLastConversationEnabled(): boolean {
try {
return localStorage.getItem(REOPEN_LAST_CONVERSATION_KEY) === '1';
} catch {
return false;
}
}
export function setReopenLastConversationEnabled(enabled: boolean): void {
try {
if (enabled) {
localStorage.setItem(REOPEN_LAST_CONVERSATION_KEY, '1');
return;
}
localStorage.removeItem(REOPEN_LAST_CONVERSATION_KEY);
localStorage.removeItem(LAST_VIEWED_CONVERSATION_KEY);
} catch {
// localStorage may be unavailable
}
}
export function saveLastViewedConversation(conversation: Conversation): void {
try {
localStorage.setItem(LAST_VIEWED_CONVERSATION_KEY, JSON.stringify(conversation));
} catch {
// localStorage may be unavailable
}
}
export function getLastViewedConversation(): Conversation | null {
try {
const raw = localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY);
if (!raw) return null;
const parsed = JSON.parse(raw) as Partial<Conversation>;
if (
!isSupportedType(parsed.type) ||
typeof parsed.id !== 'string' ||
typeof parsed.name !== 'string'
) {
return null;
}
if (parsed.type !== 'map') {
return {
type: parsed.type,
id: parsed.id,
name: parsed.name,
};
}
return {
type: 'map',
id: parsed.id,
name: parsed.name,
...(typeof parsed.mapFocusKey === 'string' && { mapFocusKey: parsed.mapFocusKey }),
};
} catch {
return null;
}
}
export function captureLastViewedConversationFromHash(): void {
const hashConversation = parseHashConversation();
if (!hashConversation) return;
if (hashConversation.type === 'raw') {
saveLastViewedConversation({ type: 'raw', id: 'raw', name: 'Raw Packet Feed' });
return;
}
if (hashConversation.type === 'map') {
saveLastViewedConversation({
type: 'map',
id: 'map',
name: 'Node Map',
...(hashConversation.mapFocusKey && { mapFocusKey: hashConversation.mapFocusKey }),
});
return;
}
if (hashConversation.type === 'visualizer') {
saveLastViewedConversation({ type: 'visualizer', id: 'visualizer', name: 'Mesh Visualizer' });
return;
}
saveLastViewedConversation({
type: hashConversation.type,
id: hashConversation.name,
name: hashConversation.label || hashConversation.name,
});
}

View File

@@ -0,0 +1,83 @@
import { test, expect } from '@playwright/test';
import { createChannel, deleteChannel } from '../helpers/api';
const REOPEN_LAST_CONVERSATION_KEY = 'remoteterm-reopen-last-conversation';
const LAST_VIEWED_CONVERSATION_KEY = 'remoteterm-last-viewed-conversation';
function escapeRegex(value: string): string {
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
test.describe('Reopen last conversation (device-local)', () => {
let channelName = '';
let channelKey = '';
test.beforeAll(async () => {
channelName = `#e2ereopen${Date.now().toString().slice(-6)}`;
const channel = await createChannel(channelName);
channelKey = channel.key;
});
test.afterAll(async () => {
try {
await deleteChannel(channelKey);
} catch {
// Best-effort cleanup
}
});
test('reopens last viewed conversation on startup when enabled', async ({ page }) => {
await page.goto('/');
await expect(page.getByText('Connected')).toBeVisible();
await expect.poll(() => new URL(page.url()).hash).toBe('');
await page.getByText(channelName, { exact: true }).first().click();
await expect(
page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(channelName)}`, 'i'))
).toBeVisible();
await page.getByRole('button', { name: 'Settings' }).click();
await page.getByRole('button', { name: /Database & Interface/i }).click();
await page.getByLabel('Reopen to last viewed channel/conversation').check();
await page.getByRole('button', { name: 'Back to Chat' }).click();
// Fresh launch path without hash should restore the saved conversation.
await page.goto('/');
await expect(
page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(channelName)}`, 'i'))
).toBeVisible();
await expect.poll(() => new URL(page.url()).hash).toBe('');
});
test('clears local storage and falls back to default when disabled', async ({ page }) => {
await page.goto('/');
await expect(page.getByText('Connected')).toBeVisible();
await page.getByText(channelName, { exact: true }).first().click();
await expect(
page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(channelName)}`, 'i'))
).toBeVisible();
await page.getByRole('button', { name: 'Settings' }).click();
await page.getByRole('button', { name: /Database & Interface/i }).click();
const reopenToggle = page.getByLabel('Reopen to last viewed channel/conversation');
await reopenToggle.check();
await reopenToggle.uncheck();
const localState = await page.evaluate(
([enabledKey, lastViewedKey]) => ({
enabled: localStorage.getItem(enabledKey),
lastViewed: localStorage.getItem(lastViewedKey),
}),
[REOPEN_LAST_CONVERSATION_KEY, LAST_VIEWED_CONVERSATION_KEY]
);
expect(localState.enabled).toBeNull();
expect(localState.lastViewed).toBeNull();
await page.getByRole('button', { name: 'Back to Chat' }).click();
await page.goto('/');
await expect(page.getByPlaceholder(/message\s+Public/i)).toBeVisible();
await expect.poll(() => new URL(page.url()).hash).toBe('');
});
});

View File

@@ -887,6 +887,23 @@ class TestRawPacketRepository:
deleted = await RawPacketRepository.prune_old_undecrypted(10)
assert deleted == 0
@pytest.mark.asyncio
async def test_purge_linked_to_messages_deletes_only_linked_packets(self, test_db):
"""Purge linked raw packets removes only rows with a message_id."""
ts = int(time.time())
linked_1, _ = await RawPacketRepository.create(b"\x01\x02\x03", ts)
linked_2, _ = await RawPacketRepository.create(b"\x04\x05\x06", ts)
await RawPacketRepository.mark_decrypted(linked_1, 101)
await RawPacketRepository.mark_decrypted(linked_2, 102)
await RawPacketRepository.create(b"\x07\x08\x09", ts) # undecrypted, should remain
deleted = await RawPacketRepository.purge_linked_to_messages()
assert deleted == 2
remaining = await RawPacketRepository.get_undecrypted_count()
assert remaining == 1
class TestMaintenanceEndpoint:
"""Test database maintenance endpoint."""
@@ -909,6 +926,23 @@ class TestMaintenanceEndpoint:
assert result.packets_deleted == 2
assert result.vacuumed is True
@pytest.mark.asyncio
async def test_maintenance_can_purge_linked_raw_packets(self, test_db):
"""Maintenance endpoint can purge raw packets linked to messages."""
from app.routers.packets import MaintenanceRequest, run_maintenance
ts = int(time.time())
linked_1, _ = await RawPacketRepository.create(b"\x0a\x0b\x0c", ts)
linked_2, _ = await RawPacketRepository.create(b"\x0d\x0e\x0f", ts)
await RawPacketRepository.mark_decrypted(linked_1, 201)
await RawPacketRepository.mark_decrypted(linked_2, 202)
request = MaintenanceRequest(purge_linked_raw_packets=True)
result = await run_maintenance(request)
assert result.packets_deleted == 2
assert result.vacuumed is True
class TestHealthEndpointDatabaseSize:
"""Test database size reporting in health endpoint."""

View File

@@ -53,6 +53,16 @@ def test_valid_dist_serves_static_and_spa_fallback(tmp_path):
assert root_response.status_code == 200
assert "index page" in root_response.text
manifest_response = client.get("/site.webmanifest")
assert manifest_response.status_code == 200
assert manifest_response.headers["content-type"].startswith("application/manifest+json")
manifest = manifest_response.json()
assert manifest["start_url"] == "http://testserver/"
assert manifest["scope"] == "http://testserver/"
assert manifest["id"] == "http://testserver/"
assert manifest["display"] == "standalone"
assert manifest["icons"][0]["src"] == "http://testserver/web-app-manifest-192x192.png"
file_response = client.get("/robots.txt")
assert file_response.status_code == 200
assert file_response.text == "User-agent: *"
@@ -64,3 +74,28 @@ def test_valid_dist_serves_static_and_spa_fallback(tmp_path):
asset_response = client.get("/assets/app.js")
assert asset_response.status_code == 200
assert "console.log('ok');" in asset_response.text
def test_webmanifest_uses_forwarded_origin_headers(tmp_path):
app = FastAPI()
dist_dir = tmp_path / "frontend" / "dist"
dist_dir.mkdir(parents=True)
(dist_dir / "index.html").write_text("<html><body>index page</body></html>")
registered = register_frontend_static_routes(app, dist_dir)
assert registered is True
with TestClient(app) as client:
response = client.get(
"/site.webmanifest",
headers={
"x-forwarded-proto": "https",
"x-forwarded-host": "mesh.example.com:8443",
},
)
assert response.status_code == 200
data = response.json()
assert data["start_url"] == "https://mesh.example.com:8443/"
assert data["scope"] == "https://mesh.example.com:8443/"
assert data["id"] == "https://mesh.example.com:8443/"

View File

@@ -100,8 +100,8 @@ class TestMigration001:
# Run migrations
applied = await run_migrations(conn)
assert applied == 17 # All 17 migrations run
assert await get_version(conn) == 17
assert applied == 20 # All 17 migrations run
assert await get_version(conn) == 20
# Verify columns exist by inserting and selecting
await conn.execute(
@@ -183,9 +183,9 @@ class TestMigration001:
applied1 = await run_migrations(conn)
applied2 = await run_migrations(conn)
assert applied1 == 17 # All 17 migrations run
assert applied1 == 20 # All 20 migrations run
assert applied2 == 0 # No migrations on second run
assert await get_version(conn) == 17
assert await get_version(conn) == 20
finally:
await conn.close()
@@ -246,8 +246,8 @@ class TestMigration001:
applied = await run_migrations(conn)
# All 17 migrations applied (version incremented) but no error
assert applied == 17
assert await get_version(conn) == 17
assert applied == 20
assert await get_version(conn) == 20
finally:
await conn.close()
@@ -374,10 +374,10 @@ class TestMigration013:
)
await conn.commit()
# Run migration 13 (plus 14+15+16+17 which also run)
# Run migration 13 (plus 14-20 which also run)
applied = await run_migrations(conn)
assert applied == 5
assert await get_version(conn) == 17
assert applied == 8
assert await get_version(conn) == 20
# Verify bots array was created with migrated data
cursor = await conn.execute("SELECT bots FROM app_settings WHERE id = 1")
@@ -431,3 +431,322 @@ class TestMigration013:
assert bots == []
finally:
await conn.close()
class TestMigration018:
"""Test migration 018: drop UNIQUE(data) from raw_packets."""
@pytest.mark.asyncio
async def test_migration_drops_data_unique_constraint(self):
"""Migration rebuilds raw_packets without UNIQUE(data), preserving data."""
conn = await aiosqlite.connect(":memory:")
conn.row_factory = aiosqlite.Row
try:
await set_version(conn, 17)
# Create raw_packets WITH UNIQUE(data) — simulates production schema
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL UNIQUE,
message_id INTEGER,
payload_hash TEXT
)
""")
await conn.execute(
"CREATE UNIQUE INDEX idx_raw_packets_payload_hash ON raw_packets(payload_hash)"
)
await conn.execute("CREATE INDEX idx_raw_packets_message_id ON raw_packets(message_id)")
# Insert test data
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, payload_hash) VALUES (?, ?, ?)",
(1000, b"\x01\x02\x03", "hash_a"),
)
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, message_id, payload_hash) VALUES (?, ?, ?, ?)",
(2000, b"\x04\x05\x06", 42, "hash_b"),
)
# Create messages table stub (needed for migration 19)
await conn.execute("""
CREATE TABLE messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
type TEXT NOT NULL,
conversation_key TEXT NOT NULL,
text TEXT NOT NULL,
sender_timestamp INTEGER,
received_at INTEGER NOT NULL,
txt_type INTEGER DEFAULT 0,
signature TEXT,
outgoing INTEGER DEFAULT 0,
acked INTEGER DEFAULT 0,
paths TEXT
)
""")
await conn.execute(
"""CREATE UNIQUE INDEX idx_messages_dedup_null_safe
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))"""
)
await conn.commit()
# Verify autoindex exists before migration
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_raw_packets_1'"
)
assert await cursor.fetchone() is not None
await run_migrations(conn)
assert await get_version(conn) == 20
# Verify autoindex is gone
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_raw_packets_1'"
)
assert await cursor.fetchone() is None
# Verify data is preserved
cursor = await conn.execute("SELECT COUNT(*) FROM raw_packets")
assert (await cursor.fetchone())[0] == 2
cursor = await conn.execute(
"SELECT timestamp, data, message_id, payload_hash FROM raw_packets ORDER BY id"
)
rows = await cursor.fetchall()
assert rows[0]["timestamp"] == 1000
assert bytes(rows[0]["data"]) == b"\x01\x02\x03"
assert rows[0]["message_id"] is None
assert rows[0]["payload_hash"] == "hash_a"
assert rows[1]["message_id"] == 42
# Verify payload_hash unique index still works
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE name='idx_raw_packets_payload_hash'"
)
assert await cursor.fetchone() is not None
finally:
await conn.close()
@pytest.mark.asyncio
async def test_migration_skips_when_no_unique_constraint(self):
"""Migration is a no-op when UNIQUE(data) is already absent."""
conn = await aiosqlite.connect(":memory:")
conn.row_factory = aiosqlite.Row
try:
await set_version(conn, 17)
# Create raw_packets WITHOUT UNIQUE(data) — fresh install schema
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
message_id INTEGER,
payload_hash TEXT
)
""")
await conn.execute(
"CREATE UNIQUE INDEX idx_raw_packets_payload_hash ON raw_packets(payload_hash)"
)
# Messages stub for migration 19
await conn.execute("""
CREATE TABLE messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
type TEXT NOT NULL,
conversation_key TEXT NOT NULL,
text TEXT NOT NULL,
sender_timestamp INTEGER,
received_at INTEGER NOT NULL,
txt_type INTEGER DEFAULT 0,
signature TEXT,
outgoing INTEGER DEFAULT 0,
acked INTEGER DEFAULT 0,
paths TEXT
)
""")
await conn.execute(
"""CREATE UNIQUE INDEX idx_messages_dedup_null_safe
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))"""
)
await conn.commit()
applied = await run_migrations(conn)
assert applied == 3 # Migrations 18+19+20 run (18+19 skip internally)
assert await get_version(conn) == 20
finally:
await conn.close()
class TestMigration019:
"""Test migration 019: drop UNIQUE constraint from messages."""
@pytest.mark.asyncio
async def test_migration_drops_messages_unique_constraint(self):
"""Migration rebuilds messages without UNIQUE, preserving data and dedup index."""
conn = await aiosqlite.connect(":memory:")
conn.row_factory = aiosqlite.Row
try:
await set_version(conn, 17)
# raw_packets stub (no UNIQUE on data, so migration 18 skips)
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
message_id INTEGER,
payload_hash TEXT
)
""")
# Create messages WITH UNIQUE constraint — simulates production schema
await conn.execute("""
CREATE TABLE messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
type TEXT NOT NULL,
conversation_key TEXT NOT NULL,
text TEXT NOT NULL,
sender_timestamp INTEGER,
received_at INTEGER NOT NULL,
txt_type INTEGER DEFAULT 0,
signature TEXT,
outgoing INTEGER DEFAULT 0,
acked INTEGER DEFAULT 0,
paths TEXT,
UNIQUE(type, conversation_key, text, sender_timestamp)
)
""")
await conn.execute(
"CREATE INDEX idx_messages_conversation ON messages(type, conversation_key)"
)
await conn.execute("CREATE INDEX idx_messages_received ON messages(received_at)")
await conn.execute(
"""CREATE UNIQUE INDEX idx_messages_dedup_null_safe
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))"""
)
# Insert test data
await conn.execute(
"INSERT INTO messages (type, conversation_key, text, sender_timestamp, received_at, paths) "
"VALUES (?, ?, ?, ?, ?, ?)",
("CHAN", "KEY1", "hello world", 1000, 1000, '[{"path":"ab","received_at":1000}]'),
)
await conn.execute(
"INSERT INTO messages (type, conversation_key, text, sender_timestamp, received_at, outgoing) "
"VALUES (?, ?, ?, ?, ?, ?)",
("PRIV", "abc123", "dm text", 2000, 2000, 1),
)
await conn.commit()
# Verify autoindex exists before migration
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_messages_1'"
)
assert await cursor.fetchone() is not None
await run_migrations(conn)
assert await get_version(conn) == 20
# Verify autoindex is gone
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_messages_1'"
)
assert await cursor.fetchone() is None
# Verify data is preserved
cursor = await conn.execute("SELECT COUNT(*) FROM messages")
assert (await cursor.fetchone())[0] == 2
cursor = await conn.execute(
"SELECT type, conversation_key, text, paths, outgoing FROM messages ORDER BY id"
)
rows = await cursor.fetchall()
assert rows[0]["type"] == "CHAN"
assert rows[0]["text"] == "hello world"
assert rows[0]["paths"] == '[{"path":"ab","received_at":1000}]'
assert rows[1]["type"] == "PRIV"
assert rows[1]["outgoing"] == 1
# Verify dedup index still works (INSERT OR IGNORE should ignore duplicates)
cursor = await conn.execute(
"INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp, received_at) "
"VALUES (?, ?, ?, ?, ?)",
("CHAN", "KEY1", "hello world", 1000, 9999),
)
assert cursor.rowcount == 0 # Duplicate ignored
# Verify dedup index exists
cursor = await conn.execute(
"SELECT name FROM sqlite_master WHERE name='idx_messages_dedup_null_safe'"
)
assert await cursor.fetchone() is not None
finally:
await conn.close()
class TestMigration020:
"""Test migration 020: enable WAL mode and incremental auto-vacuum."""
@pytest.mark.asyncio
async def test_migration_enables_wal_and_incremental_auto_vacuum(self, tmp_path):
"""Migration switches journal mode to WAL and auto_vacuum to INCREMENTAL."""
db_path = str(tmp_path / "test.db")
conn = await aiosqlite.connect(db_path)
conn.row_factory = aiosqlite.Row
try:
await set_version(conn, 19)
# Create minimal tables so migration 20 can run
await conn.execute(
"CREATE TABLE raw_packets (id INTEGER PRIMARY KEY, data BLOB NOT NULL)"
)
await conn.execute("CREATE TABLE messages (id INTEGER PRIMARY KEY, text TEXT NOT NULL)")
await conn.commit()
# Verify defaults before migration
cursor = await conn.execute("PRAGMA auto_vacuum")
assert (await cursor.fetchone())[0] == 0 # NONE
cursor = await conn.execute("PRAGMA journal_mode")
assert (await cursor.fetchone())[0] == "delete"
applied = await run_migrations(conn)
assert applied == 1
assert await get_version(conn) == 20
# Verify WAL mode
cursor = await conn.execute("PRAGMA journal_mode")
assert (await cursor.fetchone())[0] == "wal"
# Verify incremental auto-vacuum
cursor = await conn.execute("PRAGMA auto_vacuum")
assert (await cursor.fetchone())[0] == 2 # INCREMENTAL
finally:
await conn.close()
@pytest.mark.asyncio
async def test_migration_is_idempotent(self, tmp_path):
"""Running migration 20 twice doesn't error or re-VACUUM."""
db_path = str(tmp_path / "test.db")
conn = await aiosqlite.connect(db_path)
conn.row_factory = aiosqlite.Row
try:
# Set up as if already at version 20 with WAL + incremental
await conn.execute("PRAGMA auto_vacuum = INCREMENTAL")
await conn.execute("PRAGMA journal_mode = WAL")
await conn.execute(
"CREATE TABLE raw_packets (id INTEGER PRIMARY KEY, data BLOB NOT NULL)"
)
await conn.execute("CREATE TABLE messages (id INTEGER PRIMARY KEY, text TEXT NOT NULL)")
await conn.commit()
await set_version(conn, 20)
applied = await run_migrations(conn)
assert applied == 0 # Already at version 20
# Still WAL + INCREMENTAL
cursor = await conn.execute("PRAGMA journal_mode")
assert (await cursor.fetchone())[0] == "wal"
cursor = await conn.execute("PRAGMA auto_vacuum")
assert (await cursor.fetchone())[0] == 2
finally:
await conn.close()

View File

@@ -283,7 +283,7 @@ class TestResendChannelMessage:
assert msg_id is not None
with patch("app.routers.messages.require_connected", return_value=mc):
result = await resend_channel_message(msg_id)
result = await resend_channel_message(msg_id, new_timestamp=False)
assert result["status"] == "ok"
assert result["message_id"] == msg_id
@@ -316,11 +316,42 @@ class TestResendChannelMessage:
patch("app.routers.messages.require_connected", return_value=mc),
pytest.raises(HTTPException) as exc_info,
):
await resend_channel_message(msg_id)
await resend_channel_message(msg_id, new_timestamp=False)
assert exc_info.value.status_code == 400
assert "expired" in exc_info.value.detail.lower()
@pytest.mark.asyncio
async def test_resend_new_timestamp_collision_returns_original_id(self, test_db):
"""When new-timestamp resend collides (same second), return original ID gracefully."""
mc = _make_mc(name="MyNode")
chan_key = "dd" * 16
await ChannelRepository.upsert(key=chan_key, name="#collision")
now = int(time.time())
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: duplicate",
conversation_key=chan_key.upper(),
sender_timestamp=now,
received_at=now,
outgoing=True,
)
assert msg_id is not None
with (
patch("app.routers.messages.require_connected", return_value=mc),
patch("app.routers.messages.broadcast_event"),
patch("app.routers.messages.time") as mock_time,
):
# Force the same second so MessageRepository.create returns None (duplicate)
mock_time.time.return_value = float(now)
result = await resend_channel_message(msg_id, new_timestamp=True)
# Should succeed gracefully, returning the original message ID
assert result["status"] == "ok"
assert result["message_id"] == msg_id
@pytest.mark.asyncio
async def test_resend_non_outgoing_returns_400(self, test_db):
"""Resend of incoming message fails."""
@@ -343,7 +374,7 @@ class TestResendChannelMessage:
patch("app.routers.messages.require_connected", return_value=mc),
pytest.raises(HTTPException) as exc_info,
):
await resend_channel_message(msg_id)
await resend_channel_message(msg_id, new_timestamp=False)
assert exc_info.value.status_code == 400
assert "outgoing" in exc_info.value.detail.lower()
@@ -369,7 +400,7 @@ class TestResendChannelMessage:
patch("app.routers.messages.require_connected", return_value=mc),
pytest.raises(HTTPException) as exc_info,
):
await resend_channel_message(msg_id)
await resend_channel_message(msg_id, new_timestamp=False)
assert exc_info.value.status_code == 400
assert "channel" in exc_info.value.detail.lower()
@@ -383,7 +414,7 @@ class TestResendChannelMessage:
patch("app.routers.messages.require_connected", return_value=mc),
pytest.raises(HTTPException) as exc_info,
):
await resend_channel_message(999999)
await resend_channel_message(999999, new_timestamp=False)
assert exc_info.value.status_code == 404
@@ -406,7 +437,126 @@ class TestResendChannelMessage:
assert msg_id is not None
with patch("app.routers.messages.require_connected", return_value=mc):
await resend_channel_message(msg_id)
await resend_channel_message(msg_id, new_timestamp=False)
call_kwargs = mc.commands.send_chan_msg.await_args.kwargs
assert call_kwargs["msg"] == "hello world"
@pytest.mark.asyncio
async def test_resend_new_timestamp_skips_window(self, test_db):
"""new_timestamp=True succeeds even when the 30s window has expired."""
mc = _make_mc(name="MyNode")
chan_key = "dd" * 16
await ChannelRepository.upsert(key=chan_key, name="#old")
old_ts = int(time.time()) - 60 # 60 seconds ago — outside byte-perfect window
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: old message",
conversation_key=chan_key.upper(),
sender_timestamp=old_ts,
received_at=old_ts,
outgoing=True,
)
assert msg_id is not None
with (
patch("app.routers.messages.require_connected", return_value=mc),
patch("app.routers.messages.broadcast_event"),
):
result = await resend_channel_message(msg_id, new_timestamp=True)
assert result["status"] == "ok"
# Should return a NEW message id, not the original
assert result["message_id"] != msg_id
@pytest.mark.asyncio
async def test_resend_new_timestamp_creates_new_message(self, test_db):
"""new_timestamp=True creates a new DB row with a different sender_timestamp."""
mc = _make_mc(name="MyNode")
chan_key = "dd" * 16
await ChannelRepository.upsert(key=chan_key, name="#new")
old_ts = int(time.time()) - 10
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: test",
conversation_key=chan_key.upper(),
sender_timestamp=old_ts,
received_at=old_ts,
outgoing=True,
)
assert msg_id is not None
with (
patch("app.routers.messages.require_connected", return_value=mc),
patch("app.routers.messages.broadcast_event"),
):
result = await resend_channel_message(msg_id, new_timestamp=True)
new_msg_id = result["message_id"]
new_msg = await MessageRepository.get_by_id(new_msg_id)
original_msg = await MessageRepository.get_by_id(msg_id)
assert new_msg is not None
assert original_msg is not None
assert new_msg.sender_timestamp != original_msg.sender_timestamp
assert new_msg.text == original_msg.text
assert new_msg.outgoing is True
@pytest.mark.asyncio
async def test_resend_new_timestamp_broadcasts_message(self, test_db):
"""new_timestamp=True broadcasts the new message via WebSocket."""
mc = _make_mc(name="MyNode")
chan_key = "dd" * 16
await ChannelRepository.upsert(key=chan_key, name="#broadcast")
old_ts = int(time.time()) - 5
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: broadcast test",
conversation_key=chan_key.upper(),
sender_timestamp=old_ts,
received_at=old_ts,
outgoing=True,
)
assert msg_id is not None
with (
patch("app.routers.messages.require_connected", return_value=mc),
patch("app.routers.messages.broadcast_event") as mock_broadcast,
):
result = await resend_channel_message(msg_id, new_timestamp=True)
mock_broadcast.assert_called_once()
event_type, event_data = mock_broadcast.call_args.args
assert event_type == "message"
assert event_data["id"] == result["message_id"]
assert event_data["outgoing"] is True
@pytest.mark.asyncio
async def test_resend_byte_perfect_still_enforces_window(self, test_db):
"""Default (byte-perfect) resend still enforces the 30s window."""
mc = _make_mc(name="MyNode")
chan_key = "dd" * 16
await ChannelRepository.upsert(key=chan_key, name="#window")
old_ts = int(time.time()) - 60
msg_id = await MessageRepository.create(
msg_type="CHAN",
text="MyNode: expired",
conversation_key=chan_key.upper(),
sender_timestamp=old_ts,
received_at=old_ts,
outgoing=True,
)
assert msg_id is not None
with (
patch("app.routers.messages.require_connected", return_value=mc),
pytest.raises(HTTPException) as exc_info,
):
await resend_channel_message(msg_id, new_timestamp=False)
assert exc_info.value.status_code == 400
assert "expired" in exc_info.value.detail.lower()