diff --git a/README.md b/README.md index 042819f..b39a3f2 100644 --- a/README.md +++ b/README.md @@ -191,7 +191,6 @@ npm run build # build the frontend | `MESHCORE_BLE_PIN` | | BLE PIN (required when BLE address is set) | | `MESHCORE_LOG_LEVEL` | INFO | DEBUG, INFO, WARNING, ERROR | | `MESHCORE_DATABASE_PATH` | data/meshcore.db | SQLite database path | -| `MESHCORE_MAX_RADIO_CONTACTS` | 200 | Max recent contacts to keep on radio for DM ACKs | Only one transport may be active at a time. If multiple are set, the server will refuse to start. diff --git a/app/database.py b/app/database.py index 23a2e3d..6f886e1 100644 --- a/app/database.py +++ b/app/database.py @@ -41,12 +41,13 @@ CREATE TABLE IF NOT EXISTS messages ( txt_type INTEGER DEFAULT 0, signature TEXT, outgoing INTEGER DEFAULT 0, - acked INTEGER DEFAULT 0, + acked INTEGER DEFAULT 0 -- Deduplication: identical text + timestamp in the same conversation is treated as a -- mesh echo/repeat. Second-precision timestamps mean two intentional identical messages -- within the same second would collide, but this is not feasible in practice — LoRa -- transmission takes several seconds per message, and the UI clears the input on send. - UNIQUE(type, conversation_key, text, sender_timestamp) + -- Enforced via idx_messages_dedup_null_safe (unique index) rather than a table constraint + -- to avoid the storage overhead of SQLite's autoindex duplicating every message text. ); CREATE TABLE IF NOT EXISTS raw_packets ( @@ -60,6 +61,8 @@ CREATE TABLE IF NOT EXISTS raw_packets ( CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(type, conversation_key); CREATE INDEX IF NOT EXISTS idx_messages_received ON messages(received_at); +CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_dedup_null_safe + ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0)); CREATE INDEX IF NOT EXISTS idx_raw_packets_message_id ON raw_packets(message_id); CREATE UNIQUE INDEX IF NOT EXISTS idx_raw_packets_payload_hash ON raw_packets(payload_hash); CREATE INDEX IF NOT EXISTS idx_contacts_on_radio ON contacts(on_radio); @@ -76,6 +79,17 @@ class Database: Path(self.db_path).parent.mkdir(parents=True, exist_ok=True) self._connection = await aiosqlite.connect(self.db_path) self._connection.row_factory = aiosqlite.Row + + # WAL mode: faster writes, concurrent readers during writes, no journal file churn. + # Persists in the DB file but we set it explicitly on every connection. + await self._connection.execute("PRAGMA journal_mode = WAL") + + # Incremental auto-vacuum: freed pages are reclaimable via + # PRAGMA incremental_vacuum without a full VACUUM. Must be set before + # the first table is created (for new databases); for existing databases + # migration 20 handles the one-time VACUUM to restructure the file. + await self._connection.execute("PRAGMA auto_vacuum = INCREMENTAL") + await self._connection.executescript(SCHEMA) await self._connection.commit() logger.debug("Database schema initialized") diff --git a/app/frontend_static.py b/app/frontend_static.py index e7a735c..ab53446 100644 --- a/app/frontend_static.py +++ b/app/frontend_static.py @@ -1,13 +1,27 @@ import logging from pathlib import Path -from fastapi import FastAPI, HTTPException -from fastapi.responses import FileResponse +from fastapi import FastAPI, HTTPException, Request +from fastapi.responses import FileResponse, JSONResponse from fastapi.staticfiles import StaticFiles logger = logging.getLogger(__name__) +def _resolve_request_origin(request: Request) -> str: + """Resolve the external origin, honoring common reverse-proxy headers.""" + forwarded_proto = request.headers.get("x-forwarded-proto") + forwarded_host = request.headers.get("x-forwarded-host") + + if forwarded_proto and forwarded_host: + proto = forwarded_proto.split(",")[0].strip() + host = forwarded_host.split(",")[0].strip() + if proto and host: + return f"{proto}://{host}" + + return str(request.base_url).rstrip("/") + + def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool: """Register frontend static file routes if a built frontend is available. @@ -55,6 +69,41 @@ def register_frontend_static_routes(app: FastAPI, frontend_dir: Path) -> bool: """Serve the frontend index.html.""" return FileResponse(index_file) + @app.get("/site.webmanifest") + async def serve_webmanifest(request: Request): + """Serve a dynamic web manifest using the active request origin.""" + origin = _resolve_request_origin(request) + manifest = { + "name": "RemoteTerm for MeshCore", + "short_name": "RemoteTerm", + "id": f"{origin}/", + "start_url": f"{origin}/", + "scope": f"{origin}/", + "display": "standalone", + "display_override": ["window-controls-overlay", "standalone", "fullscreen"], + "theme_color": "#111419", + "background_color": "#111419", + "icons": [ + { + "src": f"{origin}/web-app-manifest-192x192.png", + "sizes": "192x192", + "type": "image/png", + "purpose": "maskable", + }, + { + "src": f"{origin}/web-app-manifest-512x512.png", + "sizes": "512x512", + "type": "image/png", + "purpose": "maskable", + }, + ], + } + return JSONResponse( + manifest, + media_type="application/manifest+json", + headers={"Cache-Control": "no-store"}, + ) + @app.get("/{path:path}") async def serve_frontend(path: str): """Serve frontend files, falling back to index.html for SPA routing.""" diff --git a/app/migrations.py b/app/migrations.py index 0782fd7..4588003 100644 --- a/app/migrations.py +++ b/app/migrations.py @@ -156,6 +156,27 @@ async def run_migrations(conn: aiosqlite.Connection) -> int: await set_version(conn, 17) applied += 1 + # Migration 18: Drop UNIQUE(data) constraint on raw_packets (redundant with payload_hash) + if version < 18: + logger.info("Applying migration 18: drop raw_packets UNIQUE(data) constraint") + await _migrate_018_drop_raw_packets_data_unique(conn) + await set_version(conn, 18) + applied += 1 + + # Migration 19: Drop UNIQUE constraint on messages (redundant with dedup_null_safe index) + if version < 19: + logger.info("Applying migration 19: drop messages UNIQUE constraint") + await _migrate_019_drop_messages_unique_constraint(conn) + await set_version(conn, 19) + applied += 1 + + # Migration 20: Enable WAL journal mode and incremental auto-vacuum + if version < 20: + logger.info("Applying migration 20: enable WAL mode and incremental auto-vacuum") + await _migrate_020_enable_wal_and_auto_vacuum(conn) + await set_version(conn, 20) + applied += 1 + if applied > 0: logger.info( "Applied %d migration(s), schema now at version %d", applied, await get_version(conn) @@ -1054,3 +1075,180 @@ async def _migrate_017_drop_experimental_channel_double_send(conn: aiosqlite.Con raise await conn.commit() + + +async def _migrate_018_drop_raw_packets_data_unique(conn: aiosqlite.Connection) -> None: + """ + Drop the UNIQUE constraint on raw_packets.data via table rebuild. + + This constraint creates a large autoindex (~30 MB on a 340K-row database) that + stores a complete copy of every raw packet BLOB in a B-tree. Deduplication is + already handled by the unique index on payload_hash, making the data UNIQUE + constraint pure storage overhead. + + Requires table recreation since SQLite doesn't support DROP CONSTRAINT. + """ + # Check if the autoindex exists (indicates UNIQUE constraint on data) + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE type='index' " + "AND name='sqlite_autoindex_raw_packets_1'" + ) + if not await cursor.fetchone(): + logger.debug("raw_packets.data UNIQUE constraint already absent, skipping rebuild") + await conn.commit() + return + + logger.info("Rebuilding raw_packets table to remove UNIQUE(data) constraint...") + + # Get current columns from the existing table + cursor = await conn.execute("PRAGMA table_info(raw_packets)") + old_cols = {col[1] for col in await cursor.fetchall()} + + # Target schema without UNIQUE on data + await conn.execute(""" + CREATE TABLE raw_packets_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp INTEGER NOT NULL, + data BLOB NOT NULL, + message_id INTEGER, + payload_hash TEXT, + FOREIGN KEY (message_id) REFERENCES messages(id) + ) + """) + + # Copy only columns that exist in both old and new tables + new_cols = {"id", "timestamp", "data", "message_id", "payload_hash"} + copy_cols = ", ".join(sorted(c for c in new_cols if c in old_cols)) + + await conn.execute( + f"INSERT INTO raw_packets_new ({copy_cols}) SELECT {copy_cols} FROM raw_packets" + ) + await conn.execute("DROP TABLE raw_packets") + await conn.execute("ALTER TABLE raw_packets_new RENAME TO raw_packets") + + # Recreate indexes + await conn.execute( + "CREATE UNIQUE INDEX idx_raw_packets_payload_hash ON raw_packets(payload_hash)" + ) + await conn.execute("CREATE INDEX idx_raw_packets_message_id ON raw_packets(message_id)") + + await conn.commit() + logger.info("raw_packets table rebuilt without UNIQUE(data) constraint") + + +async def _migrate_019_drop_messages_unique_constraint(conn: aiosqlite.Connection) -> None: + """ + Drop the UNIQUE(type, conversation_key, text, sender_timestamp) constraint on messages. + + This constraint creates a large autoindex (~13 MB on a 112K-row database) that + stores the full message text in a B-tree. The idx_messages_dedup_null_safe unique + index already provides identical dedup protection — no rows have NULL + sender_timestamp since migration 15 backfilled them all. + + INSERT OR IGNORE still works correctly because it checks all unique constraints, + including unique indexes like idx_messages_dedup_null_safe. + + Requires table recreation since SQLite doesn't support DROP CONSTRAINT. + """ + # Check if the autoindex exists (indicates UNIQUE constraint) + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE type='index' AND name='sqlite_autoindex_messages_1'" + ) + if not await cursor.fetchone(): + logger.debug("messages UNIQUE constraint already absent, skipping rebuild") + await conn.commit() + return + + logger.info("Rebuilding messages table to remove UNIQUE constraint...") + + # Get current columns from the existing table + cursor = await conn.execute("PRAGMA table_info(messages)") + old_cols = {col[1] for col in await cursor.fetchall()} + + # Target schema without the UNIQUE table constraint + await conn.execute(""" + CREATE TABLE messages_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + type TEXT NOT NULL, + conversation_key TEXT NOT NULL, + text TEXT NOT NULL, + sender_timestamp INTEGER, + received_at INTEGER NOT NULL, + txt_type INTEGER DEFAULT 0, + signature TEXT, + outgoing INTEGER DEFAULT 0, + acked INTEGER DEFAULT 0, + paths TEXT + ) + """) + + # Copy only columns that exist in both old and new tables + new_cols = { + "id", + "type", + "conversation_key", + "text", + "sender_timestamp", + "received_at", + "txt_type", + "signature", + "outgoing", + "acked", + "paths", + } + copy_cols = ", ".join(sorted(c for c in new_cols if c in old_cols)) + + await conn.execute(f"INSERT INTO messages_new ({copy_cols}) SELECT {copy_cols} FROM messages") + await conn.execute("DROP TABLE messages") + await conn.execute("ALTER TABLE messages_new RENAME TO messages") + + # Recreate indexes + await conn.execute("CREATE INDEX idx_messages_conversation ON messages(type, conversation_key)") + await conn.execute("CREATE INDEX idx_messages_received ON messages(received_at)") + await conn.execute( + """CREATE UNIQUE INDEX idx_messages_dedup_null_safe + ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))""" + ) + + await conn.commit() + logger.info("messages table rebuilt without UNIQUE constraint") + + +async def _migrate_020_enable_wal_and_auto_vacuum(conn: aiosqlite.Connection) -> None: + """ + Enable WAL journal mode and incremental auto-vacuum. + + WAL (Write-Ahead Logging): + - Faster writes: appends to a WAL file instead of rewriting the main DB + - Concurrent reads during writes (readers don't block writers) + - No journal file create/delete churn on every commit + + Incremental auto-vacuum: + - Pages freed by DELETE become reclaimable without a full VACUUM + - Call PRAGMA incremental_vacuum to reclaim on demand + - Less overhead than FULL auto-vacuum (which reorganizes on every commit) + + auto_vacuum mode change requires a VACUUM to restructure the file. + The VACUUM is performed before switching to WAL so it runs under the + current journal mode; WAL is then set as the final step. + """ + # Check current auto_vacuum mode + cursor = await conn.execute("PRAGMA auto_vacuum") + row = await cursor.fetchone() + current_auto_vacuum = row[0] if row else 0 + + if current_auto_vacuum != 2: # 2 = INCREMENTAL + logger.info("Switching auto_vacuum to INCREMENTAL (requires VACUUM)...") + await conn.execute("PRAGMA auto_vacuum = INCREMENTAL") + await conn.execute("VACUUM") + logger.info("VACUUM complete, auto_vacuum set to INCREMENTAL") + else: + logger.debug("auto_vacuum already INCREMENTAL, skipping VACUUM") + + # Enable WAL mode (idempotent — returns current mode) + cursor = await conn.execute("PRAGMA journal_mode = WAL") + row = await cursor.fetchone() + mode = row[0] if row else "unknown" + logger.info("Journal mode set to %s", mode) + + await conn.commit() diff --git a/app/repository.py b/app/repository.py index b090b69..4b1b39d 100644 --- a/app/repository.py +++ b/app/repository.py @@ -801,6 +801,13 @@ class RawPacketRepository: await db.conn.commit() return cursor.rowcount + @staticmethod + async def purge_linked_to_messages() -> int: + """Delete raw packets that are already linked to a stored message.""" + cursor = await db.conn.execute("DELETE FROM raw_packets WHERE message_id IS NOT NULL") + await db.conn.commit() + return cursor.rowcount + @staticmethod async def get_undecrypted_text_messages() -> list[tuple[int, bytes, int]]: """Get all undecrypted TEXT_MESSAGE packets as (id, data, timestamp) tuples. diff --git a/app/routers/messages.py b/app/routers/messages.py index 5ce7f10..314ce22 100644 --- a/app/routers/messages.py +++ b/app/routers/messages.py @@ -306,10 +306,17 @@ RESEND_WINDOW_SECONDS = 30 @router.post("/channel/{message_id}/resend") -async def resend_channel_message(message_id: int) -> dict: - """Resend a channel message within 30 seconds of original send. +async def resend_channel_message( + message_id: int, + new_timestamp: bool = Query(default=False), +) -> dict: + """Resend a channel message. - Performs a byte-perfect resend using the same timestamp bytes as the original. + When new_timestamp=False (default): byte-perfect resend using the original timestamp. + Only allowed within 30 seconds of the original send. + + When new_timestamp=True: resend with a fresh timestamp so repeaters treat it as a + new packet. Creates a new message row in the database. No time window restriction. """ mc = require_connected() @@ -328,16 +335,22 @@ async def resend_channel_message(message_id: int) -> dict: if msg.sender_timestamp is None: raise HTTPException(status_code=400, detail="Message has no timestamp") - elapsed = int(time.time()) - msg.sender_timestamp - if elapsed > RESEND_WINDOW_SECONDS: - raise HTTPException(status_code=400, detail="Resend window has expired (30 seconds)") + # Byte-perfect resend enforces the 30s window; new-timestamp resend does not + if not new_timestamp: + elapsed = int(time.time()) - msg.sender_timestamp + if elapsed > RESEND_WINDOW_SECONDS: + raise HTTPException(status_code=400, detail="Resend window has expired (30 seconds)") db_channel = await ChannelRepository.get_by_key(msg.conversation_key) if not db_channel: raise HTTPException(status_code=404, detail=f"Channel {msg.conversation_key} not found") - # Reconstruct timestamp bytes - timestamp_bytes = msg.sender_timestamp.to_bytes(4, "little") + # Choose timestamp: original for byte-perfect, fresh for new-timestamp + if new_timestamp: + now = int(time.time()) + timestamp_bytes = now.to_bytes(4, "little") + else: + timestamp_bytes = msg.sender_timestamp.to_bytes(4, "little") # Strip sender prefix: DB stores "RadioName: message" but radio needs "message" radio_name = mc.self_info.get("name", "") if mc.self_info else "" @@ -374,5 +387,47 @@ async def resend_channel_message(message_id: int) -> dict: status_code=500, detail=f"Failed to resend message: {result.payload}" ) + # For new-timestamp resend, create a new message row and broadcast it + if new_timestamp: + new_msg_id = await MessageRepository.create( + msg_type="CHAN", + text=msg.text, + conversation_key=msg.conversation_key, + sender_timestamp=now, + received_at=now, + outgoing=True, + ) + if new_msg_id is None: + # Timestamp-second collision (same text+channel within the same second). + # The radio already transmitted, so log and return the original ID rather + # than surfacing a 500 for a message that was successfully sent over the air. + logger.warning( + "Duplicate timestamp collision resending message %d — radio sent but DB row not created", + message_id, + ) + return {"status": "ok", "message_id": message_id} + + broadcast_event( + "message", + Message( + id=new_msg_id, + type="CHAN", + conversation_key=msg.conversation_key, + text=msg.text, + sender_timestamp=now, + received_at=now, + outgoing=True, + acked=0, + ).model_dump(), + ) + + logger.info( + "Resent channel message %d as new message %d to %s", + message_id, + new_msg_id, + db_channel.name, + ) + return {"status": "ok", "message_id": new_msg_id} + logger.info("Resent channel message %d to %s", message_id, db_channel.name) return {"status": "ok", "message_id": message_id} diff --git a/app/routers/packets.py b/app/routers/packets.py index 554f5f3..049ebb1 100644 --- a/app/routers/packets.py +++ b/app/routers/packets.py @@ -236,8 +236,12 @@ async def decrypt_historical_packets( class MaintenanceRequest(BaseModel): - prune_undecrypted_days: int = Field( - ge=1, description="Delete undecrypted packets older than this many days" + prune_undecrypted_days: int | None = Field( + default=None, ge=1, description="Delete undecrypted packets older than this many days" + ) + purge_linked_raw_packets: bool = Field( + default=False, + description="Delete raw packets already linked to a stored message", ) @@ -249,18 +253,30 @@ class MaintenanceResult(BaseModel): @router.post("/maintenance", response_model=MaintenanceResult) async def run_maintenance(request: MaintenanceRequest) -> MaintenanceResult: """ - Clean up old undecrypted packets and reclaim disk space. + Run packet maintenance tasks and reclaim disk space. - - Deletes undecrypted packets older than the specified number of days + - Optionally deletes undecrypted packets older than the specified number of days + - Optionally deletes raw packets already linked to stored messages - Runs VACUUM to reclaim disk space """ - logger.info( - "Running maintenance: pruning packets older than %d days", request.prune_undecrypted_days - ) + deleted = 0 - # Prune old undecrypted packets - deleted = await RawPacketRepository.prune_old_undecrypted(request.prune_undecrypted_days) - logger.info("Deleted %d old undecrypted packets", deleted) + if request.prune_undecrypted_days is not None: + logger.info( + "Running maintenance: pruning undecrypted packets older than %d days", + request.prune_undecrypted_days, + ) + pruned_undecrypted = await RawPacketRepository.prune_old_undecrypted( + request.prune_undecrypted_days + ) + deleted += pruned_undecrypted + logger.info("Deleted %d old undecrypted packets", pruned_undecrypted) + + if request.purge_linked_raw_packets: + logger.info("Running maintenance: purging raw packets linked to stored messages") + purged_linked = await RawPacketRepository.purge_linked_to_messages() + deleted += purged_linked + logger.info("Deleted %d linked raw packets", purged_linked) # Run VACUUM to reclaim space on a dedicated connection async with aiosqlite.connect(db.db_path) as vacuum_conn: diff --git a/frontend/public/site.webmanifest b/frontend/public/site.webmanifest deleted file mode 100644 index 92caeb2..0000000 --- a/frontend/public/site.webmanifest +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "RemoteTerm for MeshCore", - "short_name": "RemoteTerm", - "icons": [ - { - "src": "/web-app-manifest-192x192.png", - "sizes": "192x192", - "type": "image/png", - "purpose": "maskable" - }, - { - "src": "/web-app-manifest-512x512.png", - "sizes": "512x512", - "type": "image/png", - "purpose": "maskable" - } - ], - "theme_color": "#ffffff", - "background_color": "#ffffff", - "display": "standalone" -} \ No newline at end of file diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index c4b023e..5257af0 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -371,16 +371,21 @@ export function App() { ); // Handle resend channel message - const handleResendChannelMessage = useCallback(async (messageId: number) => { - try { - await api.resendChannelMessage(messageId); - toast.success('Message resent'); - } catch (err) { - toast.error('Failed to resend', { - description: err instanceof Error ? err.message : 'Unknown error', - }); - } - }, []); + const handleResendChannelMessage = useCallback( + async (messageId: number, newTimestamp?: boolean) => { + try { + // New-timestamp resend creates a new message; the backend broadcast_event + // will add it to the conversation via WebSocket. + await api.resendChannelMessage(messageId, newTimestamp); + toast.success(newTimestamp ? 'Message resent with new timestamp' : 'Message resent'); + } catch (err) { + toast.error('Failed to resend', { + description: err instanceof Error ? err.message : 'Unknown error', + }); + } + }, + [] + ); // Handle sender click to add mention const handleSenderClick = useCallback((sender: string) => { diff --git a/frontend/src/api.ts b/frontend/src/api.ts index 2b94af7..9fdee96 100644 --- a/frontend/src/api.ts +++ b/frontend/src/api.ts @@ -167,10 +167,11 @@ export const api = { method: 'POST', body: JSON.stringify({ channel_key: channelKey, text }), }), - resendChannelMessage: (messageId: number) => - fetchJson<{ status: string; message_id: number }>(`/messages/channel/${messageId}/resend`, { - method: 'POST', - }), + resendChannelMessage: (messageId: number, newTimestamp?: boolean) => + fetchJson<{ status: string; message_id: number }>( + `/messages/channel/${messageId}/resend${newTimestamp ? '?new_timestamp=true' : ''}`, + { method: 'POST' } + ), // Packets getUndecryptedPacketCount: () => fetchJson<{ count: number }>('/packets/undecrypted/count'), @@ -183,10 +184,17 @@ export const api = { method: 'POST', body: JSON.stringify(params), }), - runMaintenance: (pruneUndecryptedDays: number) => + runMaintenance: (options: { pruneUndecryptedDays?: number; purgeLinkedRawPackets?: boolean }) => fetchJson('/packets/maintenance', { method: 'POST', - body: JSON.stringify({ prune_undecrypted_days: pruneUndecryptedDays }), + body: JSON.stringify({ + ...(options.pruneUndecryptedDays !== undefined && { + prune_undecrypted_days: options.pruneUndecryptedDays, + }), + ...(options.purgeLinkedRawPackets !== undefined && { + purge_linked_raw_packets: options.purgeLinkedRawPackets, + }), + }), }), // Read State diff --git a/frontend/src/components/MessageInput.tsx b/frontend/src/components/MessageInput.tsx index f8f628c..2d83b78 100644 --- a/frontend/src/components/MessageInput.tsx +++ b/frontend/src/components/MessageInput.tsx @@ -166,19 +166,26 @@ export const MessageInput = forwardRef(fu // For repeater mode, always allow submit (empty = guest login) const canSubmit = isRepeaterMode ? true : text.trim().length > 0; - // Show character counter for messages (not repeater mode or raw) + // Show counter for messages (not repeater mode or raw). + // Desktop: always visible. Mobile: only show count after 100 characters. const showCharCounter = !isRepeaterMode && limits !== null; + const showMobileCounterValue = text.length > 100; return (
setText(e.target.value)} onKeyDown={handleKeyDown} @@ -206,25 +213,53 @@ export const MessageInput = forwardRef(fu
{showCharCounter && ( -
- - {textByteLen}/{limits!.hardLimit}b{remaining < 0 && ` (${remaining})`} - - {warningMessage && ( - - — {warningMessage} + <> +
+ + {textByteLen}/{limits!.hardLimit} + {remaining < 0 && ` (${remaining})`} + {warningMessage && ( + + — {warningMessage} + + )} +
+ + {(showMobileCounterValue || warningMessage) && ( +
+ {showMobileCounterValue && ( + + {textByteLen}/{limits!.hardLimit} + {remaining < 0 && ` (${remaining})`} + + )} + {warningMessage && ( + + — {warningMessage} + + )} +
)} -
+ )}
); diff --git a/frontend/src/components/MessageList.tsx b/frontend/src/components/MessageList.tsx index 786a6a7..7555ae4 100644 --- a/frontend/src/components/MessageList.tsx +++ b/frontend/src/components/MessageList.tsx @@ -23,7 +23,7 @@ interface MessageListProps { hasOlderMessages?: boolean; onSenderClick?: (sender: string) => void; onLoadOlder?: () => void; - onResendChannelMessage?: (messageId: number) => void; + onResendChannelMessage?: (messageId: number, newTimestamp?: boolean) => void; radioName?: string; config?: RadioConfig | null; } @@ -156,12 +156,11 @@ export function MessageList({ const [selectedPath, setSelectedPath] = useState<{ paths: MessagePath[]; senderInfo: SenderInfo; + messageId?: number; + isOutgoingChan?: boolean; } | null>(null); const [resendableIds, setResendableIds] = useState>(new Set()); const resendTimersRef = useRef>>(new Map()); - const activeBurstsRef = useRef[]>>(new Map()); - const onResendRef = useRef(onResendChannelMessage); - onResendRef.current = onResendChannelMessage; // Capture scroll state in the scroll handler BEFORE any state updates const scrollStateRef = useRef({ @@ -262,17 +261,6 @@ export function MessageList({ }; }, [messages, onResendChannelMessage]); - // Clean up burst timers on unmount - useEffect(() => { - const bursts = activeBurstsRef.current; - return () => { - for (const timers of bursts.values()) { - for (const t of timers) clearTimeout(t); - } - bursts.clear(); - }; - }, []); - // Handle scroll - capture state and detect when user is near top/bottom const handleScroll = useCallback(() => { if (!listRef.current) return; @@ -315,6 +303,21 @@ export function MessageList({ [messages] ); + // Sender info for outgoing messages (used by path modal on own messages) + const selfSenderInfo = useMemo( + () => ({ + name: config?.name || 'Unknown', + publicKeyOrPrefix: config?.public_key || '', + lat: config?.lat ?? null, + lon: config?.lon ?? null, + }), + [config?.name, config?.public_key, config?.lat, config?.lon] + ); + + // Derive live so the byte-perfect button disables if the 30s window expires while modal is open + const isSelectedMessageResendable = + selectedPath?.messageId !== undefined && resendableIds.has(selectedPath.messageId); + // Look up contact by public key const getContact = (conversationKey: string | null): Contact | null => { if (!conversationKey) return null; @@ -520,34 +523,6 @@ export function MessageList({ )} )} - {msg.outgoing && onResendChannelMessage && resendableIds.has(msg.id) && ( - - )} {msg.outgoing && (msg.acked > 0 ? ( msg.paths && msg.paths.length > 0 ? ( @@ -557,12 +532,9 @@ export function MessageList({ e.stopPropagation(); setSelectedPath({ paths: msg.paths!, - senderInfo: { - name: config?.name || 'Unknown', - publicKeyOrPrefix: config?.public_key || '', - lat: config?.lat ?? null, - lon: config?.lon ?? null, - }, + senderInfo: selfSenderInfo, + messageId: msg.id, + isOutgoingChan: msg.type === 'CHAN' && !!onResendChannelMessage, }); }} title="View echo paths" @@ -570,6 +542,23 @@ export function MessageList({ ) : ( {` ✓${msg.acked > 1 ? msg.acked : ''}`} ) + ) : onResendChannelMessage && msg.type === 'CHAN' ? ( + { + e.stopPropagation(); + setSelectedPath({ + paths: [], + senderInfo: selfSenderInfo, + messageId: msg.id, + isOutgoingChan: true, + }); + }} + title="Message status" + > + {' '} + ? + ) : ( {' '} @@ -616,6 +605,10 @@ export function MessageList({ senderInfo={selectedPath.senderInfo} contacts={contacts} config={config ?? null} + messageId={selectedPath.messageId} + isOutgoingChan={selectedPath.isOutgoingChan} + isResendable={isSelectedMessageResendable} + onResend={onResendChannelMessage} /> )} diff --git a/frontend/src/components/PathModal.tsx b/frontend/src/components/PathModal.tsx index af79ea4..2cb5459 100644 --- a/frontend/src/components/PathModal.tsx +++ b/frontend/src/components/PathModal.tsx @@ -1,12 +1,5 @@ import type { Contact, RadioConfig, MessagePath } from '../types'; -import { - Dialog, - DialogContent, - DialogHeader, - DialogTitle, - DialogDescription, - DialogFooter, -} from './ui/dialog'; +import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogDescription } from './ui/dialog'; import { Button } from './ui/button'; import { resolvePath, @@ -28,14 +21,34 @@ interface PathModalProps { senderInfo: SenderInfo; contacts: Contact[]; config: RadioConfig | null; + messageId?: number; + isOutgoingChan?: boolean; + isResendable?: boolean; + onResend?: (messageId: number, newTimestamp?: boolean) => void; } -export function PathModal({ open, onClose, paths, senderInfo, contacts, config }: PathModalProps) { +export function PathModal({ + open, + onClose, + paths, + senderInfo, + contacts, + config, + messageId, + isOutgoingChan, + isResendable, + onResend, +}: PathModalProps) { + const hasResendActions = isOutgoingChan && messageId !== undefined && onResend; + const hasPaths = paths.length > 0; + // Resolve all paths - const resolvedPaths = paths.map((p) => ({ - ...p, - resolved: resolvePath(p.path, senderInfo, contacts, config), - })); + const resolvedPaths = hasPaths + ? paths.map((p) => ({ + ...p, + resolved: resolvePath(p.path, senderInfo, contacts, config), + })) + : []; const hasSinglePath = paths.length === 1; @@ -43,9 +56,15 @@ export function PathModal({ open, onClose, paths, senderInfo, contacts, config } !isOpen && onClose()}> - Message Path{!hasSinglePath && `s (${paths.length})`} + + {hasPaths + ? `Message Path${!hasSinglePath ? `s (${paths.length})` : ''}` + : 'Message Status'} + - {hasSinglePath ? ( + {!hasPaths ? ( + <>No echoes heard yet. Echoes appear when repeaters re-broadcast your message. + ) : hasSinglePath ? ( <> This shows one route that this message traveled through the mesh network. Routers may be incorrectly identified due to prefix collisions between heard and @@ -60,64 +79,104 @@ export function PathModal({ open, onClose, paths, senderInfo, contacts, config } -
- {/* Raw path summary */} -
- {paths.map((p, index) => { - const hops = parsePathHops(p.path); - const rawPath = hops.length > 0 ? hops.join('->') : 'direct'; - return ( -
- Path {index + 1}:{' '} - {rawPath} -
- ); - })} -
+ {hasPaths && ( +
+ {/* Raw path summary */} +
+ {paths.map((p, index) => { + const hops = parsePathHops(p.path); + const rawPath = hops.length > 0 ? hops.join('->') : 'direct'; + return ( +
+ Path {index + 1}:{' '} + {rawPath} +
+ ); + })} +
- {/* Straight-line distance (sender to receiver, same for all routes) */} - {resolvedPaths.length > 0 && - isValidLocation( - resolvedPaths[0].resolved.sender.lat, - resolvedPaths[0].resolved.sender.lon - ) && - isValidLocation( - resolvedPaths[0].resolved.receiver.lat, - resolvedPaths[0].resolved.receiver.lon - ) && ( -
- Straight-line distance: - - {formatDistance( - calculateDistance( - resolvedPaths[0].resolved.sender.lat, - resolvedPaths[0].resolved.sender.lon, - resolvedPaths[0].resolved.receiver.lat, - resolvedPaths[0].resolved.receiver.lon - )! - )} - -
- )} - - {resolvedPaths.map((pathData, index) => ( -
- {!hasSinglePath && ( -
- Path {index + 1}{' '} - - — received {formatTime(pathData.received_at)} + {/* Straight-line distance (sender to receiver, same for all routes) */} + {resolvedPaths.length > 0 && + isValidLocation( + resolvedPaths[0].resolved.sender.lat, + resolvedPaths[0].resolved.sender.lon + ) && + isValidLocation( + resolvedPaths[0].resolved.receiver.lat, + resolvedPaths[0].resolved.receiver.lon + ) && ( +
+ Straight-line distance: + + {formatDistance( + calculateDistance( + resolvedPaths[0].resolved.sender.lat, + resolvedPaths[0].resolved.sender.lon, + resolvedPaths[0].resolved.receiver.lat, + resolvedPaths[0].resolved.receiver.lon + )! + )}
)} - -
- ))} -
- - - + {resolvedPaths.map((pathData, index) => ( +
+ {!hasSinglePath && ( +
+ Path {index + 1}{' '} + + — received {formatTime(pathData.received_at)} + +
+ )} + +
+ ))} +
+ )} + +
+ {hasResendActions && ( +
+ {isResendable && ( + + )} + +
+ )} + +
); diff --git a/frontend/src/components/SettingsModal.tsx b/frontend/src/components/SettingsModal.tsx index e25f707..769a29f 100644 --- a/frontend/src/components/SettingsModal.tsx +++ b/frontend/src/components/SettingsModal.tsx @@ -19,6 +19,11 @@ import { Separator } from './ui/separator'; import { toast } from './ui/sonner'; import { api } from '../api'; import { formatTime } from '../utils/messageParser'; +import { + captureLastViewedConversationFromHash, + getReopenLastConversationEnabled, + setReopenLastConversationEnabled, +} from '../utils/lastViewedConversation'; // Radio presets for common configurations interface RadioPreset { @@ -140,7 +145,11 @@ export function SettingsModal(props: SettingsModalProps) { // Database maintenance state const [retentionDays, setRetentionDays] = useState('14'); const [cleaning, setCleaning] = useState(false); + const [purgingDecryptedRaw, setPurgingDecryptedRaw] = useState(false); const [autoDecryptOnAdvert, setAutoDecryptOnAdvert] = useState(false); + const [reopenLastConversation, setReopenLastConversation] = useState( + getReopenLastConversationEnabled + ); // Advertisement interval state const [advertInterval, setAdvertInterval] = useState('0'); @@ -222,6 +231,12 @@ export function SettingsModal(props: SettingsModalProps) { } }, [open, pageMode, onRefreshAppSettings]); + useEffect(() => { + if (open || pageMode) { + setReopenLastConversation(getReopenLastConversationEnabled()); + } + }, [open, pageMode]); + useEffect(() => { if (typeof window === 'undefined' || typeof window.matchMedia !== 'function') return; @@ -495,7 +510,7 @@ export function SettingsModal(props: SettingsModalProps) { setCleaning(true); try { - const result = await api.runMaintenance(days); + const result = await api.runMaintenance({ pruneUndecryptedDays: days }); toast.success('Database cleanup complete', { description: `Deleted ${result.packets_deleted} old packet${result.packets_deleted === 1 ? '' : 's'}`, }); @@ -510,6 +525,25 @@ export function SettingsModal(props: SettingsModalProps) { } }; + const handlePurgeDecryptedRawPackets = async () => { + setPurgingDecryptedRaw(true); + + try { + const result = await api.runMaintenance({ purgeLinkedRawPackets: true }); + toast.success('Decrypted raw packets purged', { + description: `Deleted ${result.packets_deleted} raw packet${result.packets_deleted === 1 ? '' : 's'}`, + }); + await onHealthRefresh(); + } catch (err) { + console.error('Failed to purge decrypted raw packets:', err); + toast.error('Failed to purge decrypted raw packets', { + description: err instanceof Error ? err.message : 'Unknown error', + }); + } finally { + setPurgingDecryptedRaw(false); + } + }; + const handleSaveDatabaseSettings = async () => { setBusySection('database'); setSectionError(null); @@ -529,6 +563,14 @@ export function SettingsModal(props: SettingsModalProps) { } }; + const handleToggleReopenLastConversation = (enabled: boolean) => { + setReopenLastConversation(enabled); + setReopenLastConversationEnabled(enabled); + if (enabled) { + captureLastViewedConversationFromHash(); + } + }; + const handleSaveBotSettings = async () => { setBusySection('bot'); setSectionError(null); @@ -612,14 +654,14 @@ export function SettingsModal(props: SettingsModalProps) { const shouldRenderSection = (section: SettingsSection) => !externalDesktopSidebarMode || desktopSection === section; - const sectionWrapperClass = 'border border-input rounded-md overflow-hidden'; + const sectionWrapperClass = 'overflow-hidden'; const sectionContentClass = externalDesktopSidebarMode - ? 'space-y-4 p-4 h-full overflow-y-auto' + ? 'space-y-4 p-4' : 'space-y-4 p-4 border-t border-input'; const settingsContainerClass = externalDesktopSidebarMode - ? 'w-full h-full' + ? 'w-full h-full overflow-y-auto' : 'w-full h-full overflow-y-auto space-y-3'; const sectionButtonClasses = @@ -996,15 +1038,17 @@ export function SettingsModal(props: SettingsModalProps) {
- +

- Delete undecrypted packets older than the specified days. This helps manage - storage for packets that couldn't be decrypted (unknown channel keys). + Permanently deletes stored raw packets containing DMs and channel messages that + have not yet been decrypted. These packets are retained in case you later obtain + the correct key — once deleted, these messages can never be recovered or + decrypted.

-
+
+ +

+ Deletes archival copies of raw packet bytes for messages that are already + decrypted and visible in your chat history.{' '} + + This will not affect any displayed messages or app functionality. + {' '} + The raw bytes are only useful for manual packet analysis. +

+ +
+ + +
+ + +
+ + +

+ This applies only to this device/browser. It does not sync to server settings. +

+
+ {getSectionError('database') && (
{getSectionError('database')}
)} diff --git a/frontend/src/components/settingsConstants.ts b/frontend/src/components/settingsConstants.ts index 22931b8..60fa3bc 100644 --- a/frontend/src/components/settingsConstants.ts +++ b/frontend/src/components/settingsConstants.ts @@ -19,7 +19,7 @@ export const SETTINGS_SECTION_LABELS: Record = { radio: '📻 Radio', identity: '🪪 Identity', connectivity: '📡 Connectivity', - database: '🗄️ Database', + database: '🗄️ Database & Interface', bot: '🤖 Bot', statistics: '📊 Statistics', }; diff --git a/frontend/src/hooks/useConversationRouter.ts b/frontend/src/hooks/useConversationRouter.ts index 0b67223..94d3c0e 100644 --- a/frontend/src/hooks/useConversationRouter.ts +++ b/frontend/src/hooks/useConversationRouter.ts @@ -5,6 +5,11 @@ import { resolveChannelFromHashToken, resolveContactFromHashToken, } from '../utils/urlHash'; +import { + getLastViewedConversation, + getReopenLastConversationEnabled, + saveLastViewedConversation, +} from '../utils/lastViewedConversation'; import { getContactDisplayName } from '../utils/pubkey'; import type { Channel, Contact, Conversation } from '../types'; @@ -27,8 +32,26 @@ export function useConversationRouter({ pendingDeleteFallbackRef, hasSetDefaultConversation, }: UseConversationRouterArgs) { - const [activeConversation, setActiveConversation] = useState(null); + const [activeConversation, setActiveConversationState] = useState(null); const activeConversationRef = useRef(null); + const hashSyncEnabledRef = useRef( + typeof window !== 'undefined' ? window.location.hash.length > 0 : false + ); + + const setActiveConversation = useCallback((conv: Conversation | null) => { + hashSyncEnabledRef.current = true; + setActiveConversationState(conv); + }, []); + + const getPublicChannelConversation = useCallback((): Conversation | null => { + const publicChannel = channels.find((c) => c.name === 'Public'); + if (!publicChannel) return null; + return { + type: 'channel', + id: publicChannel.key, + name: publicChannel.name, + }; + }, [channels]); // Phase 1: Set initial conversation from URL hash or default to Public channel // Only needs channels (fast path) - doesn't wait for contacts @@ -40,12 +63,12 @@ export function useConversationRouter({ // Handle non-data views immediately if (hashConv?.type === 'raw') { - setActiveConversation({ type: 'raw', id: 'raw', name: 'Raw Packet Feed' }); + setActiveConversationState({ type: 'raw', id: 'raw', name: 'Raw Packet Feed' }); hasSetDefaultConversation.current = true; return; } if (hashConv?.type === 'map') { - setActiveConversation({ + setActiveConversationState({ type: 'map', id: 'map', name: 'Node Map', @@ -55,7 +78,7 @@ export function useConversationRouter({ return; } if (hashConv?.type === 'visualizer') { - setActiveConversation({ type: 'visualizer', id: 'visualizer', name: 'Mesh Visualizer' }); + setActiveConversationState({ type: 'visualizer', id: 'visualizer', name: 'Mesh Visualizer' }); hasSetDefaultConversation.current = true; return; } @@ -64,7 +87,7 @@ export function useConversationRouter({ if (hashConv?.type === 'channel') { const channel = resolveChannelFromHashToken(hashConv.name, channels); if (channel) { - setActiveConversation({ type: 'channel', id: channel.key, name: channel.name }); + setActiveConversationState({ type: 'channel', id: channel.key, name: channel.name }); hasSetDefaultConversation.current = true; return; } @@ -73,17 +96,42 @@ export function useConversationRouter({ // Contact hash — wait for phase 2 if (hashConv?.type === 'contact') return; + // No hash: optionally restore last-viewed conversation if enabled on this device. + if (!hashConv && getReopenLastConversationEnabled()) { + const lastViewed = getLastViewedConversation(); + if ( + lastViewed && + (lastViewed.type === 'raw' || lastViewed.type === 'map' || lastViewed.type === 'visualizer') + ) { + setActiveConversationState(lastViewed); + hasSetDefaultConversation.current = true; + return; + } + if (lastViewed?.type === 'channel') { + const channel = + channels.find((c) => c.key.toLowerCase() === lastViewed.id.toLowerCase()) || + resolveChannelFromHashToken(lastViewed.id, channels); + if (channel) { + setActiveConversationState({ + type: 'channel', + id: channel.key, + name: channel.name, + }); + hasSetDefaultConversation.current = true; + return; + } + } + // Last-viewed contact resolution waits for contacts in phase 2. + if (lastViewed?.type === 'contact') return; + } + // No hash or unresolvable — default to Public - const publicChannel = channels.find((c) => c.name === 'Public'); - if (publicChannel) { - setActiveConversation({ - type: 'channel', - id: publicChannel.key, - name: publicChannel.name, - }); + const publicConversation = getPublicChannelConversation(); + if (publicConversation) { + setActiveConversationState(publicConversation); hasSetDefaultConversation.current = true; } - }, [channels, activeConversation]); + }, [channels, activeConversation, getPublicChannelConversation, hasSetDefaultConversation]); // Phase 2: Resolve contact hash (only if phase 1 didn't set a conversation) useEffect(() => { @@ -95,7 +143,7 @@ export function useConversationRouter({ const contact = resolveContactFromHashToken(hashConv.name, contacts); if (contact) { - setActiveConversation({ + setActiveConversationState({ type: 'contact', id: contact.public_key, name: getContactDisplayName(contact.name, contact.public_key), @@ -105,25 +153,58 @@ export function useConversationRouter({ } // Contact hash didn't match — fall back to Public if channels loaded. - if (channels.length > 0) { - const publicChannel = channels.find((c) => c.name === 'Public'); - if (publicChannel) { - setActiveConversation({ - type: 'channel', - id: publicChannel.key, - name: publicChannel.name, - }); - hasSetDefaultConversation.current = true; - } + const publicConversation = getPublicChannelConversation(); + if (publicConversation) { + setActiveConversationState(publicConversation); + hasSetDefaultConversation.current = true; + } + return; + } + + // No hash: optionally restore a last-viewed contact once contacts are loaded. + if (!hashConv && getReopenLastConversationEnabled()) { + const lastViewed = getLastViewedConversation(); + if (lastViewed?.type !== 'contact') return; + if (!contactsLoaded) return; + + const contact = + contacts.find((item) => item.public_key.toLowerCase() === lastViewed.id.toLowerCase()) || + resolveContactFromHashToken(lastViewed.id, contacts); + if (contact) { + setActiveConversationState({ + type: 'contact', + id: contact.public_key, + name: getContactDisplayName(contact.name, contact.public_key), + }); + hasSetDefaultConversation.current = true; + return; + } + + const publicConversation = getPublicChannelConversation(); + if (publicConversation) { + setActiveConversationState(publicConversation); + hasSetDefaultConversation.current = true; } } - }, [contacts, channels, activeConversation, contactsLoaded]); + }, [ + contacts, + channels, + activeConversation, + contactsLoaded, + getPublicChannelConversation, + hasSetDefaultConversation, + ]); // Keep ref in sync and update URL hash useEffect(() => { activeConversationRef.current = activeConversation; if (activeConversation) { - updateUrlHash(activeConversation); + if (hashSyncEnabledRef.current) { + updateUrlHash(activeConversation); + } + if (getReopenLastConversationEnabled()) { + saveLastViewedConversation(activeConversation); + } } }, [activeConversation]); @@ -142,12 +223,12 @@ export function useConversationRouter({ hasSetDefaultConversation.current = true; pendingDeleteFallbackRef.current = false; - setActiveConversation({ + setActiveConversationState({ type: 'channel', id: publicChannel.key, name: publicChannel.name, }); - }, [activeConversation, channels]); + }, [activeConversation, channels, hasSetDefaultConversation, pendingDeleteFallbackRef]); // Handle conversation selection (closes sidebar on mobile) const handleSelectConversation = useCallback( @@ -155,7 +236,7 @@ export function useConversationRouter({ setActiveConversation(conv); setSidebarOpen(false); }, - [setSidebarOpen] + [setActiveConversation, setSidebarOpen] ); return { diff --git a/frontend/src/test/appStartupHash.test.tsx b/frontend/src/test/appStartupHash.test.tsx index 8752e54..31863c1 100644 --- a/frontend/src/test/appStartupHash.test.tsx +++ b/frontend/src/test/appStartupHash.test.tsx @@ -137,6 +137,10 @@ vi.mock('../components/ui/sonner', () => ({ })); import { App } from '../App'; +import { + LAST_VIEWED_CONVERSATION_KEY, + REOPEN_LAST_CONVERSATION_KEY, +} from '../utils/lastViewedConversation'; const publicChannel = { key: '8B3387E9C5CDEA6AC9E5EDBAA115CD72', @@ -149,6 +153,7 @@ const publicChannel = { describe('App startup hash resolution', () => { beforeEach(() => { vi.clearAllMocks(); + localStorage.clear(); window.location.hash = `#contact/${'a'.repeat(64)}/Alice`; mocks.api.getRadioConfig.mockResolvedValue({ @@ -178,6 +183,7 @@ describe('App startup hash resolution', () => { afterEach(() => { window.location.hash = ''; + localStorage.clear(); }); it('falls back to Public when contact hash is unresolvable and contacts are empty', async () => { @@ -189,4 +195,104 @@ describe('App startup hash resolution', () => { } }); }); + + it('restores last viewed channel when hash is empty and reopen preference is enabled', async () => { + const chatChannel = { + key: '11111111111111111111111111111111', + name: 'Ops', + is_hashtag: false, + on_radio: false, + last_read_at: null, + }; + + window.location.hash = ''; + localStorage.setItem(REOPEN_LAST_CONVERSATION_KEY, '1'); + localStorage.setItem( + LAST_VIEWED_CONVERSATION_KEY, + JSON.stringify({ + type: 'channel', + id: chatChannel.key, + name: chatChannel.name, + }) + ); + mocks.api.getChannels.mockResolvedValue([publicChannel, chatChannel]); + + render(); + + await waitFor(() => { + for (const node of screen.getAllByTestId('active-conversation')) { + expect(node).toHaveTextContent(`channel:${chatChannel.key}:${chatChannel.name}`); + } + }); + expect(window.location.hash).toBe(''); + }); + + it('uses Public channel when hash is empty and reopen preference is disabled', async () => { + const chatChannel = { + key: '11111111111111111111111111111111', + name: 'Ops', + is_hashtag: false, + on_radio: false, + last_read_at: null, + }; + + window.location.hash = ''; + localStorage.setItem( + LAST_VIEWED_CONVERSATION_KEY, + JSON.stringify({ + type: 'channel', + id: chatChannel.key, + name: chatChannel.name, + }) + ); + mocks.api.getChannels.mockResolvedValue([publicChannel, chatChannel]); + + render(); + + await waitFor(() => { + for (const node of screen.getAllByTestId('active-conversation')) { + expect(node).toHaveTextContent(`channel:${publicChannel.key}:Public`); + } + }); + expect(window.location.hash).toBe(''); + }); + + it('restores last viewed contact from legacy name token when hash is empty and reopen is enabled', async () => { + const aliceContact = { + public_key: 'b'.repeat(64), + name: 'Alice', + type: 1, + flags: 0, + last_path: null, + last_path_len: -1, + last_advert: null, + lat: null, + lon: null, + last_seen: null, + on_radio: false, + last_contacted: null, + last_read_at: null, + }; + + window.location.hash = ''; + localStorage.setItem(REOPEN_LAST_CONVERSATION_KEY, '1'); + localStorage.setItem( + LAST_VIEWED_CONVERSATION_KEY, + JSON.stringify({ + type: 'contact', + id: 'Alice', + name: 'Alice', + }) + ); + mocks.api.getContacts.mockResolvedValue([aliceContact]); + + render(); + + await waitFor(() => { + for (const node of screen.getAllByTestId('active-conversation')) { + expect(node).toHaveTextContent(`contact:${aliceContact.public_key}:Alice`); + } + }); + expect(window.location.hash).toBe(''); + }); }); diff --git a/frontend/src/test/settingsModal.test.tsx b/frontend/src/test/settingsModal.test.tsx index 2815201..22fd424 100644 --- a/frontend/src/test/settingsModal.test.tsx +++ b/frontend/src/test/settingsModal.test.tsx @@ -11,6 +11,11 @@ import type { StatisticsResponse, } from '../types'; import type { SettingsSection } from '../components/SettingsModal'; +import { + LAST_VIEWED_CONVERSATION_KEY, + REOPEN_LAST_CONVERSATION_KEY, +} from '../utils/lastViewedConversation'; +import { api } from '../api'; const baseConfig: RadioConfig = { public_key: 'aa'.repeat(32), @@ -128,9 +133,16 @@ function openConnectivitySection() { fireEvent.click(connectivityToggle); } +function openDatabaseSection() { + const databaseToggle = screen.getByRole('button', { name: /Database/i }); + fireEvent.click(databaseToggle); +} + describe('SettingsModal', () => { afterEach(() => { vi.restoreAllMocks(); + localStorage.clear(); + window.location.hash = ''; }); it('refreshes app settings when opened', async () => { @@ -291,6 +303,41 @@ describe('SettingsModal', () => { expect(onClose).not.toHaveBeenCalled(); }); + it('stores and clears reopen-last-conversation preference locally', () => { + window.location.hash = '#raw'; + renderModal(); + openDatabaseSection(); + + const checkbox = screen.getByLabelText('Reopen to last viewed channel/conversation'); + expect(checkbox).not.toBeChecked(); + + fireEvent.click(checkbox); + + expect(localStorage.getItem(REOPEN_LAST_CONVERSATION_KEY)).toBe('1'); + expect(localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY)).toContain('"type":"raw"'); + + fireEvent.click(checkbox); + + expect(localStorage.getItem(REOPEN_LAST_CONVERSATION_KEY)).toBeNull(); + expect(localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY)).toBeNull(); + }); + + it('purges decrypted raw packets via maintenance endpoint action', async () => { + const runMaintenanceSpy = vi.spyOn(api, 'runMaintenance').mockResolvedValue({ + packets_deleted: 12, + vacuumed: true, + }); + + renderModal(); + openDatabaseSection(); + + fireEvent.click(screen.getByRole('button', { name: 'Purge Archival Raw Packets' })); + + await waitFor(() => { + expect(runMaintenanceSpy).toHaveBeenCalledWith({ purgeLinkedRawPackets: true }); + }); + }); + it('renders statistics section with fetched data', async () => { const mockStats: StatisticsResponse = { busiest_channels_24h: [ diff --git a/frontend/src/utils/lastViewedConversation.ts b/frontend/src/utils/lastViewedConversation.ts new file mode 100644 index 0000000..1bd7967 --- /dev/null +++ b/frontend/src/utils/lastViewedConversation.ts @@ -0,0 +1,103 @@ +import type { Conversation } from '../types'; +import { parseHashConversation } from './urlHash'; + +export const REOPEN_LAST_CONVERSATION_KEY = 'remoteterm-reopen-last-conversation'; +export const LAST_VIEWED_CONVERSATION_KEY = 'remoteterm-last-viewed-conversation'; + +const SUPPORTED_TYPES: Conversation['type'][] = ['contact', 'channel', 'raw', 'map', 'visualizer']; + +function isSupportedType(value: unknown): value is Conversation['type'] { + return typeof value === 'string' && SUPPORTED_TYPES.includes(value as Conversation['type']); +} + +export function getReopenLastConversationEnabled(): boolean { + try { + return localStorage.getItem(REOPEN_LAST_CONVERSATION_KEY) === '1'; + } catch { + return false; + } +} + +export function setReopenLastConversationEnabled(enabled: boolean): void { + try { + if (enabled) { + localStorage.setItem(REOPEN_LAST_CONVERSATION_KEY, '1'); + return; + } + + localStorage.removeItem(REOPEN_LAST_CONVERSATION_KEY); + localStorage.removeItem(LAST_VIEWED_CONVERSATION_KEY); + } catch { + // localStorage may be unavailable + } +} + +export function saveLastViewedConversation(conversation: Conversation): void { + try { + localStorage.setItem(LAST_VIEWED_CONVERSATION_KEY, JSON.stringify(conversation)); + } catch { + // localStorage may be unavailable + } +} + +export function getLastViewedConversation(): Conversation | null { + try { + const raw = localStorage.getItem(LAST_VIEWED_CONVERSATION_KEY); + if (!raw) return null; + + const parsed = JSON.parse(raw) as Partial; + if ( + !isSupportedType(parsed.type) || + typeof parsed.id !== 'string' || + typeof parsed.name !== 'string' + ) { + return null; + } + + if (parsed.type !== 'map') { + return { + type: parsed.type, + id: parsed.id, + name: parsed.name, + }; + } + + return { + type: 'map', + id: parsed.id, + name: parsed.name, + ...(typeof parsed.mapFocusKey === 'string' && { mapFocusKey: parsed.mapFocusKey }), + }; + } catch { + return null; + } +} + +export function captureLastViewedConversationFromHash(): void { + const hashConversation = parseHashConversation(); + if (!hashConversation) return; + + if (hashConversation.type === 'raw') { + saveLastViewedConversation({ type: 'raw', id: 'raw', name: 'Raw Packet Feed' }); + return; + } + if (hashConversation.type === 'map') { + saveLastViewedConversation({ + type: 'map', + id: 'map', + name: 'Node Map', + ...(hashConversation.mapFocusKey && { mapFocusKey: hashConversation.mapFocusKey }), + }); + return; + } + if (hashConversation.type === 'visualizer') { + saveLastViewedConversation({ type: 'visualizer', id: 'visualizer', name: 'Mesh Visualizer' }); + return; + } + + saveLastViewedConversation({ + type: hashConversation.type, + id: hashConversation.name, + name: hashConversation.label || hashConversation.name, + }); +} diff --git a/tests/e2e/specs/reopen-last-conversation.spec.ts b/tests/e2e/specs/reopen-last-conversation.spec.ts new file mode 100644 index 0000000..7ca9f27 --- /dev/null +++ b/tests/e2e/specs/reopen-last-conversation.spec.ts @@ -0,0 +1,83 @@ +import { test, expect } from '@playwright/test'; +import { createChannel, deleteChannel } from '../helpers/api'; + +const REOPEN_LAST_CONVERSATION_KEY = 'remoteterm-reopen-last-conversation'; +const LAST_VIEWED_CONVERSATION_KEY = 'remoteterm-last-viewed-conversation'; + +function escapeRegex(value: string): string { + return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +test.describe('Reopen last conversation (device-local)', () => { + let channelName = ''; + let channelKey = ''; + + test.beforeAll(async () => { + channelName = `#e2ereopen${Date.now().toString().slice(-6)}`; + const channel = await createChannel(channelName); + channelKey = channel.key; + }); + + test.afterAll(async () => { + try { + await deleteChannel(channelKey); + } catch { + // Best-effort cleanup + } + }); + + test('reopens last viewed conversation on startup when enabled', async ({ page }) => { + await page.goto('/'); + await expect(page.getByText('Connected')).toBeVisible(); + await expect.poll(() => new URL(page.url()).hash).toBe(''); + + await page.getByText(channelName, { exact: true }).first().click(); + await expect( + page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(channelName)}`, 'i')) + ).toBeVisible(); + + await page.getByRole('button', { name: 'Settings' }).click(); + await page.getByRole('button', { name: /Database & Interface/i }).click(); + await page.getByLabel('Reopen to last viewed channel/conversation').check(); + await page.getByRole('button', { name: 'Back to Chat' }).click(); + + // Fresh launch path without hash should restore the saved conversation. + await page.goto('/'); + await expect( + page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(channelName)}`, 'i')) + ).toBeVisible(); + await expect.poll(() => new URL(page.url()).hash).toBe(''); + }); + + test('clears local storage and falls back to default when disabled', async ({ page }) => { + await page.goto('/'); + await expect(page.getByText('Connected')).toBeVisible(); + + await page.getByText(channelName, { exact: true }).first().click(); + await expect( + page.getByPlaceholder(new RegExp(`message\\s+${escapeRegex(channelName)}`, 'i')) + ).toBeVisible(); + + await page.getByRole('button', { name: 'Settings' }).click(); + await page.getByRole('button', { name: /Database & Interface/i }).click(); + + const reopenToggle = page.getByLabel('Reopen to last viewed channel/conversation'); + await reopenToggle.check(); + await reopenToggle.uncheck(); + + const localState = await page.evaluate( + ([enabledKey, lastViewedKey]) => ({ + enabled: localStorage.getItem(enabledKey), + lastViewed: localStorage.getItem(lastViewedKey), + }), + [REOPEN_LAST_CONVERSATION_KEY, LAST_VIEWED_CONVERSATION_KEY] + ); + expect(localState.enabled).toBeNull(); + expect(localState.lastViewed).toBeNull(); + + await page.getByRole('button', { name: 'Back to Chat' }).click(); + await page.goto('/'); + await expect(page.getByPlaceholder(/message\s+Public/i)).toBeVisible(); + await expect.poll(() => new URL(page.url()).hash).toBe(''); + }); +}); diff --git a/tests/test_api.py b/tests/test_api.py index ba8388f..9cb92cd 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -887,6 +887,23 @@ class TestRawPacketRepository: deleted = await RawPacketRepository.prune_old_undecrypted(10) assert deleted == 0 + @pytest.mark.asyncio + async def test_purge_linked_to_messages_deletes_only_linked_packets(self, test_db): + """Purge linked raw packets removes only rows with a message_id.""" + ts = int(time.time()) + linked_1, _ = await RawPacketRepository.create(b"\x01\x02\x03", ts) + linked_2, _ = await RawPacketRepository.create(b"\x04\x05\x06", ts) + await RawPacketRepository.mark_decrypted(linked_1, 101) + await RawPacketRepository.mark_decrypted(linked_2, 102) + + await RawPacketRepository.create(b"\x07\x08\x09", ts) # undecrypted, should remain + + deleted = await RawPacketRepository.purge_linked_to_messages() + assert deleted == 2 + + remaining = await RawPacketRepository.get_undecrypted_count() + assert remaining == 1 + class TestMaintenanceEndpoint: """Test database maintenance endpoint.""" @@ -909,6 +926,23 @@ class TestMaintenanceEndpoint: assert result.packets_deleted == 2 assert result.vacuumed is True + @pytest.mark.asyncio + async def test_maintenance_can_purge_linked_raw_packets(self, test_db): + """Maintenance endpoint can purge raw packets linked to messages.""" + from app.routers.packets import MaintenanceRequest, run_maintenance + + ts = int(time.time()) + linked_1, _ = await RawPacketRepository.create(b"\x0a\x0b\x0c", ts) + linked_2, _ = await RawPacketRepository.create(b"\x0d\x0e\x0f", ts) + await RawPacketRepository.mark_decrypted(linked_1, 201) + await RawPacketRepository.mark_decrypted(linked_2, 202) + + request = MaintenanceRequest(purge_linked_raw_packets=True) + result = await run_maintenance(request) + + assert result.packets_deleted == 2 + assert result.vacuumed is True + class TestHealthEndpointDatabaseSize: """Test database size reporting in health endpoint.""" diff --git a/tests/test_frontend_static.py b/tests/test_frontend_static.py index be3faf3..3043f34 100644 --- a/tests/test_frontend_static.py +++ b/tests/test_frontend_static.py @@ -53,6 +53,16 @@ def test_valid_dist_serves_static_and_spa_fallback(tmp_path): assert root_response.status_code == 200 assert "index page" in root_response.text + manifest_response = client.get("/site.webmanifest") + assert manifest_response.status_code == 200 + assert manifest_response.headers["content-type"].startswith("application/manifest+json") + manifest = manifest_response.json() + assert manifest["start_url"] == "http://testserver/" + assert manifest["scope"] == "http://testserver/" + assert manifest["id"] == "http://testserver/" + assert manifest["display"] == "standalone" + assert manifest["icons"][0]["src"] == "http://testserver/web-app-manifest-192x192.png" + file_response = client.get("/robots.txt") assert file_response.status_code == 200 assert file_response.text == "User-agent: *" @@ -64,3 +74,28 @@ def test_valid_dist_serves_static_and_spa_fallback(tmp_path): asset_response = client.get("/assets/app.js") assert asset_response.status_code == 200 assert "console.log('ok');" in asset_response.text + + +def test_webmanifest_uses_forwarded_origin_headers(tmp_path): + app = FastAPI() + dist_dir = tmp_path / "frontend" / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("index page") + + registered = register_frontend_static_routes(app, dist_dir) + assert registered is True + + with TestClient(app) as client: + response = client.get( + "/site.webmanifest", + headers={ + "x-forwarded-proto": "https", + "x-forwarded-host": "mesh.example.com:8443", + }, + ) + + assert response.status_code == 200 + data = response.json() + assert data["start_url"] == "https://mesh.example.com:8443/" + assert data["scope"] == "https://mesh.example.com:8443/" + assert data["id"] == "https://mesh.example.com:8443/" diff --git a/tests/test_migrations.py b/tests/test_migrations.py index 927641c..cb324f6 100644 --- a/tests/test_migrations.py +++ b/tests/test_migrations.py @@ -100,8 +100,8 @@ class TestMigration001: # Run migrations applied = await run_migrations(conn) - assert applied == 17 # All 17 migrations run - assert await get_version(conn) == 17 + assert applied == 20 # All 17 migrations run + assert await get_version(conn) == 20 # Verify columns exist by inserting and selecting await conn.execute( @@ -183,9 +183,9 @@ class TestMigration001: applied1 = await run_migrations(conn) applied2 = await run_migrations(conn) - assert applied1 == 17 # All 17 migrations run + assert applied1 == 20 # All 20 migrations run assert applied2 == 0 # No migrations on second run - assert await get_version(conn) == 17 + assert await get_version(conn) == 20 finally: await conn.close() @@ -246,8 +246,8 @@ class TestMigration001: applied = await run_migrations(conn) # All 17 migrations applied (version incremented) but no error - assert applied == 17 - assert await get_version(conn) == 17 + assert applied == 20 + assert await get_version(conn) == 20 finally: await conn.close() @@ -374,10 +374,10 @@ class TestMigration013: ) await conn.commit() - # Run migration 13 (plus 14+15+16+17 which also run) + # Run migration 13 (plus 14-20 which also run) applied = await run_migrations(conn) - assert applied == 5 - assert await get_version(conn) == 17 + assert applied == 8 + assert await get_version(conn) == 20 # Verify bots array was created with migrated data cursor = await conn.execute("SELECT bots FROM app_settings WHERE id = 1") @@ -431,3 +431,322 @@ class TestMigration013: assert bots == [] finally: await conn.close() + + +class TestMigration018: + """Test migration 018: drop UNIQUE(data) from raw_packets.""" + + @pytest.mark.asyncio + async def test_migration_drops_data_unique_constraint(self): + """Migration rebuilds raw_packets without UNIQUE(data), preserving data.""" + conn = await aiosqlite.connect(":memory:") + conn.row_factory = aiosqlite.Row + try: + await set_version(conn, 17) + + # Create raw_packets WITH UNIQUE(data) — simulates production schema + await conn.execute(""" + CREATE TABLE raw_packets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp INTEGER NOT NULL, + data BLOB NOT NULL UNIQUE, + message_id INTEGER, + payload_hash TEXT + ) + """) + await conn.execute( + "CREATE UNIQUE INDEX idx_raw_packets_payload_hash ON raw_packets(payload_hash)" + ) + await conn.execute("CREATE INDEX idx_raw_packets_message_id ON raw_packets(message_id)") + + # Insert test data + await conn.execute( + "INSERT INTO raw_packets (timestamp, data, payload_hash) VALUES (?, ?, ?)", + (1000, b"\x01\x02\x03", "hash_a"), + ) + await conn.execute( + "INSERT INTO raw_packets (timestamp, data, message_id, payload_hash) VALUES (?, ?, ?, ?)", + (2000, b"\x04\x05\x06", 42, "hash_b"), + ) + # Create messages table stub (needed for migration 19) + await conn.execute(""" + CREATE TABLE messages ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + type TEXT NOT NULL, + conversation_key TEXT NOT NULL, + text TEXT NOT NULL, + sender_timestamp INTEGER, + received_at INTEGER NOT NULL, + txt_type INTEGER DEFAULT 0, + signature TEXT, + outgoing INTEGER DEFAULT 0, + acked INTEGER DEFAULT 0, + paths TEXT + ) + """) + await conn.execute( + """CREATE UNIQUE INDEX idx_messages_dedup_null_safe + ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))""" + ) + await conn.commit() + + # Verify autoindex exists before migration + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_raw_packets_1'" + ) + assert await cursor.fetchone() is not None + + await run_migrations(conn) + assert await get_version(conn) == 20 + + # Verify autoindex is gone + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_raw_packets_1'" + ) + assert await cursor.fetchone() is None + + # Verify data is preserved + cursor = await conn.execute("SELECT COUNT(*) FROM raw_packets") + assert (await cursor.fetchone())[0] == 2 + + cursor = await conn.execute( + "SELECT timestamp, data, message_id, payload_hash FROM raw_packets ORDER BY id" + ) + rows = await cursor.fetchall() + assert rows[0]["timestamp"] == 1000 + assert bytes(rows[0]["data"]) == b"\x01\x02\x03" + assert rows[0]["message_id"] is None + assert rows[0]["payload_hash"] == "hash_a" + assert rows[1]["message_id"] == 42 + + # Verify payload_hash unique index still works + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE name='idx_raw_packets_payload_hash'" + ) + assert await cursor.fetchone() is not None + finally: + await conn.close() + + @pytest.mark.asyncio + async def test_migration_skips_when_no_unique_constraint(self): + """Migration is a no-op when UNIQUE(data) is already absent.""" + conn = await aiosqlite.connect(":memory:") + conn.row_factory = aiosqlite.Row + try: + await set_version(conn, 17) + + # Create raw_packets WITHOUT UNIQUE(data) — fresh install schema + await conn.execute(""" + CREATE TABLE raw_packets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp INTEGER NOT NULL, + data BLOB NOT NULL, + message_id INTEGER, + payload_hash TEXT + ) + """) + await conn.execute( + "CREATE UNIQUE INDEX idx_raw_packets_payload_hash ON raw_packets(payload_hash)" + ) + # Messages stub for migration 19 + await conn.execute(""" + CREATE TABLE messages ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + type TEXT NOT NULL, + conversation_key TEXT NOT NULL, + text TEXT NOT NULL, + sender_timestamp INTEGER, + received_at INTEGER NOT NULL, + txt_type INTEGER DEFAULT 0, + signature TEXT, + outgoing INTEGER DEFAULT 0, + acked INTEGER DEFAULT 0, + paths TEXT + ) + """) + await conn.execute( + """CREATE UNIQUE INDEX idx_messages_dedup_null_safe + ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))""" + ) + await conn.commit() + + applied = await run_migrations(conn) + assert applied == 3 # Migrations 18+19+20 run (18+19 skip internally) + assert await get_version(conn) == 20 + finally: + await conn.close() + + +class TestMigration019: + """Test migration 019: drop UNIQUE constraint from messages.""" + + @pytest.mark.asyncio + async def test_migration_drops_messages_unique_constraint(self): + """Migration rebuilds messages without UNIQUE, preserving data and dedup index.""" + conn = await aiosqlite.connect(":memory:") + conn.row_factory = aiosqlite.Row + try: + await set_version(conn, 17) + + # raw_packets stub (no UNIQUE on data, so migration 18 skips) + await conn.execute(""" + CREATE TABLE raw_packets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp INTEGER NOT NULL, + data BLOB NOT NULL, + message_id INTEGER, + payload_hash TEXT + ) + """) + # Create messages WITH UNIQUE constraint — simulates production schema + await conn.execute(""" + CREATE TABLE messages ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + type TEXT NOT NULL, + conversation_key TEXT NOT NULL, + text TEXT NOT NULL, + sender_timestamp INTEGER, + received_at INTEGER NOT NULL, + txt_type INTEGER DEFAULT 0, + signature TEXT, + outgoing INTEGER DEFAULT 0, + acked INTEGER DEFAULT 0, + paths TEXT, + UNIQUE(type, conversation_key, text, sender_timestamp) + ) + """) + await conn.execute( + "CREATE INDEX idx_messages_conversation ON messages(type, conversation_key)" + ) + await conn.execute("CREATE INDEX idx_messages_received ON messages(received_at)") + await conn.execute( + """CREATE UNIQUE INDEX idx_messages_dedup_null_safe + ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))""" + ) + + # Insert test data + await conn.execute( + "INSERT INTO messages (type, conversation_key, text, sender_timestamp, received_at, paths) " + "VALUES (?, ?, ?, ?, ?, ?)", + ("CHAN", "KEY1", "hello world", 1000, 1000, '[{"path":"ab","received_at":1000}]'), + ) + await conn.execute( + "INSERT INTO messages (type, conversation_key, text, sender_timestamp, received_at, outgoing) " + "VALUES (?, ?, ?, ?, ?, ?)", + ("PRIV", "abc123", "dm text", 2000, 2000, 1), + ) + await conn.commit() + + # Verify autoindex exists before migration + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_messages_1'" + ) + assert await cursor.fetchone() is not None + + await run_migrations(conn) + assert await get_version(conn) == 20 + + # Verify autoindex is gone + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE name='sqlite_autoindex_messages_1'" + ) + assert await cursor.fetchone() is None + + # Verify data is preserved + cursor = await conn.execute("SELECT COUNT(*) FROM messages") + assert (await cursor.fetchone())[0] == 2 + + cursor = await conn.execute( + "SELECT type, conversation_key, text, paths, outgoing FROM messages ORDER BY id" + ) + rows = await cursor.fetchall() + assert rows[0]["type"] == "CHAN" + assert rows[0]["text"] == "hello world" + assert rows[0]["paths"] == '[{"path":"ab","received_at":1000}]' + assert rows[1]["type"] == "PRIV" + assert rows[1]["outgoing"] == 1 + + # Verify dedup index still works (INSERT OR IGNORE should ignore duplicates) + cursor = await conn.execute( + "INSERT OR IGNORE INTO messages (type, conversation_key, text, sender_timestamp, received_at) " + "VALUES (?, ?, ?, ?, ?)", + ("CHAN", "KEY1", "hello world", 1000, 9999), + ) + assert cursor.rowcount == 0 # Duplicate ignored + + # Verify dedup index exists + cursor = await conn.execute( + "SELECT name FROM sqlite_master WHERE name='idx_messages_dedup_null_safe'" + ) + assert await cursor.fetchone() is not None + finally: + await conn.close() + + +class TestMigration020: + """Test migration 020: enable WAL mode and incremental auto-vacuum.""" + + @pytest.mark.asyncio + async def test_migration_enables_wal_and_incremental_auto_vacuum(self, tmp_path): + """Migration switches journal mode to WAL and auto_vacuum to INCREMENTAL.""" + db_path = str(tmp_path / "test.db") + conn = await aiosqlite.connect(db_path) + conn.row_factory = aiosqlite.Row + try: + await set_version(conn, 19) + + # Create minimal tables so migration 20 can run + await conn.execute( + "CREATE TABLE raw_packets (id INTEGER PRIMARY KEY, data BLOB NOT NULL)" + ) + await conn.execute("CREATE TABLE messages (id INTEGER PRIMARY KEY, text TEXT NOT NULL)") + await conn.commit() + + # Verify defaults before migration + cursor = await conn.execute("PRAGMA auto_vacuum") + assert (await cursor.fetchone())[0] == 0 # NONE + + cursor = await conn.execute("PRAGMA journal_mode") + assert (await cursor.fetchone())[0] == "delete" + + applied = await run_migrations(conn) + assert applied == 1 + assert await get_version(conn) == 20 + + # Verify WAL mode + cursor = await conn.execute("PRAGMA journal_mode") + assert (await cursor.fetchone())[0] == "wal" + + # Verify incremental auto-vacuum + cursor = await conn.execute("PRAGMA auto_vacuum") + assert (await cursor.fetchone())[0] == 2 # INCREMENTAL + finally: + await conn.close() + + @pytest.mark.asyncio + async def test_migration_is_idempotent(self, tmp_path): + """Running migration 20 twice doesn't error or re-VACUUM.""" + db_path = str(tmp_path / "test.db") + conn = await aiosqlite.connect(db_path) + conn.row_factory = aiosqlite.Row + try: + # Set up as if already at version 20 with WAL + incremental + await conn.execute("PRAGMA auto_vacuum = INCREMENTAL") + await conn.execute("PRAGMA journal_mode = WAL") + await conn.execute( + "CREATE TABLE raw_packets (id INTEGER PRIMARY KEY, data BLOB NOT NULL)" + ) + await conn.execute("CREATE TABLE messages (id INTEGER PRIMARY KEY, text TEXT NOT NULL)") + await conn.commit() + await set_version(conn, 20) + + applied = await run_migrations(conn) + assert applied == 0 # Already at version 20 + + # Still WAL + INCREMENTAL + cursor = await conn.execute("PRAGMA journal_mode") + assert (await cursor.fetchone())[0] == "wal" + cursor = await conn.execute("PRAGMA auto_vacuum") + assert (await cursor.fetchone())[0] == 2 + finally: + await conn.close() diff --git a/tests/test_send_messages.py b/tests/test_send_messages.py index 23877df..44ed3b1 100644 --- a/tests/test_send_messages.py +++ b/tests/test_send_messages.py @@ -283,7 +283,7 @@ class TestResendChannelMessage: assert msg_id is not None with patch("app.routers.messages.require_connected", return_value=mc): - result = await resend_channel_message(msg_id) + result = await resend_channel_message(msg_id, new_timestamp=False) assert result["status"] == "ok" assert result["message_id"] == msg_id @@ -316,11 +316,42 @@ class TestResendChannelMessage: patch("app.routers.messages.require_connected", return_value=mc), pytest.raises(HTTPException) as exc_info, ): - await resend_channel_message(msg_id) + await resend_channel_message(msg_id, new_timestamp=False) assert exc_info.value.status_code == 400 assert "expired" in exc_info.value.detail.lower() + @pytest.mark.asyncio + async def test_resend_new_timestamp_collision_returns_original_id(self, test_db): + """When new-timestamp resend collides (same second), return original ID gracefully.""" + mc = _make_mc(name="MyNode") + chan_key = "dd" * 16 + await ChannelRepository.upsert(key=chan_key, name="#collision") + + now = int(time.time()) + msg_id = await MessageRepository.create( + msg_type="CHAN", + text="MyNode: duplicate", + conversation_key=chan_key.upper(), + sender_timestamp=now, + received_at=now, + outgoing=True, + ) + assert msg_id is not None + + with ( + patch("app.routers.messages.require_connected", return_value=mc), + patch("app.routers.messages.broadcast_event"), + patch("app.routers.messages.time") as mock_time, + ): + # Force the same second so MessageRepository.create returns None (duplicate) + mock_time.time.return_value = float(now) + result = await resend_channel_message(msg_id, new_timestamp=True) + + # Should succeed gracefully, returning the original message ID + assert result["status"] == "ok" + assert result["message_id"] == msg_id + @pytest.mark.asyncio async def test_resend_non_outgoing_returns_400(self, test_db): """Resend of incoming message fails.""" @@ -343,7 +374,7 @@ class TestResendChannelMessage: patch("app.routers.messages.require_connected", return_value=mc), pytest.raises(HTTPException) as exc_info, ): - await resend_channel_message(msg_id) + await resend_channel_message(msg_id, new_timestamp=False) assert exc_info.value.status_code == 400 assert "outgoing" in exc_info.value.detail.lower() @@ -369,7 +400,7 @@ class TestResendChannelMessage: patch("app.routers.messages.require_connected", return_value=mc), pytest.raises(HTTPException) as exc_info, ): - await resend_channel_message(msg_id) + await resend_channel_message(msg_id, new_timestamp=False) assert exc_info.value.status_code == 400 assert "channel" in exc_info.value.detail.lower() @@ -383,7 +414,7 @@ class TestResendChannelMessage: patch("app.routers.messages.require_connected", return_value=mc), pytest.raises(HTTPException) as exc_info, ): - await resend_channel_message(999999) + await resend_channel_message(999999, new_timestamp=False) assert exc_info.value.status_code == 404 @@ -406,7 +437,126 @@ class TestResendChannelMessage: assert msg_id is not None with patch("app.routers.messages.require_connected", return_value=mc): - await resend_channel_message(msg_id) + await resend_channel_message(msg_id, new_timestamp=False) call_kwargs = mc.commands.send_chan_msg.await_args.kwargs assert call_kwargs["msg"] == "hello world" + + @pytest.mark.asyncio + async def test_resend_new_timestamp_skips_window(self, test_db): + """new_timestamp=True succeeds even when the 30s window has expired.""" + mc = _make_mc(name="MyNode") + chan_key = "dd" * 16 + await ChannelRepository.upsert(key=chan_key, name="#old") + + old_ts = int(time.time()) - 60 # 60 seconds ago — outside byte-perfect window + msg_id = await MessageRepository.create( + msg_type="CHAN", + text="MyNode: old message", + conversation_key=chan_key.upper(), + sender_timestamp=old_ts, + received_at=old_ts, + outgoing=True, + ) + assert msg_id is not None + + with ( + patch("app.routers.messages.require_connected", return_value=mc), + patch("app.routers.messages.broadcast_event"), + ): + result = await resend_channel_message(msg_id, new_timestamp=True) + + assert result["status"] == "ok" + # Should return a NEW message id, not the original + assert result["message_id"] != msg_id + + @pytest.mark.asyncio + async def test_resend_new_timestamp_creates_new_message(self, test_db): + """new_timestamp=True creates a new DB row with a different sender_timestamp.""" + mc = _make_mc(name="MyNode") + chan_key = "dd" * 16 + await ChannelRepository.upsert(key=chan_key, name="#new") + + old_ts = int(time.time()) - 10 + msg_id = await MessageRepository.create( + msg_type="CHAN", + text="MyNode: test", + conversation_key=chan_key.upper(), + sender_timestamp=old_ts, + received_at=old_ts, + outgoing=True, + ) + assert msg_id is not None + + with ( + patch("app.routers.messages.require_connected", return_value=mc), + patch("app.routers.messages.broadcast_event"), + ): + result = await resend_channel_message(msg_id, new_timestamp=True) + + new_msg_id = result["message_id"] + new_msg = await MessageRepository.get_by_id(new_msg_id) + original_msg = await MessageRepository.get_by_id(msg_id) + + assert new_msg is not None + assert original_msg is not None + assert new_msg.sender_timestamp != original_msg.sender_timestamp + assert new_msg.text == original_msg.text + assert new_msg.outgoing is True + + @pytest.mark.asyncio + async def test_resend_new_timestamp_broadcasts_message(self, test_db): + """new_timestamp=True broadcasts the new message via WebSocket.""" + mc = _make_mc(name="MyNode") + chan_key = "dd" * 16 + await ChannelRepository.upsert(key=chan_key, name="#broadcast") + + old_ts = int(time.time()) - 5 + msg_id = await MessageRepository.create( + msg_type="CHAN", + text="MyNode: broadcast test", + conversation_key=chan_key.upper(), + sender_timestamp=old_ts, + received_at=old_ts, + outgoing=True, + ) + assert msg_id is not None + + with ( + patch("app.routers.messages.require_connected", return_value=mc), + patch("app.routers.messages.broadcast_event") as mock_broadcast, + ): + result = await resend_channel_message(msg_id, new_timestamp=True) + + mock_broadcast.assert_called_once() + event_type, event_data = mock_broadcast.call_args.args + assert event_type == "message" + assert event_data["id"] == result["message_id"] + assert event_data["outgoing"] is True + + @pytest.mark.asyncio + async def test_resend_byte_perfect_still_enforces_window(self, test_db): + """Default (byte-perfect) resend still enforces the 30s window.""" + mc = _make_mc(name="MyNode") + chan_key = "dd" * 16 + await ChannelRepository.upsert(key=chan_key, name="#window") + + old_ts = int(time.time()) - 60 + msg_id = await MessageRepository.create( + msg_type="CHAN", + text="MyNode: expired", + conversation_key=chan_key.upper(), + sender_timestamp=old_ts, + received_at=old_ts, + outgoing=True, + ) + assert msg_id is not None + + with ( + patch("app.routers.messages.require_connected", return_value=mc), + pytest.raises(HTTPException) as exc_info, + ): + await resend_channel_message(msg_id, new_timestamp=False) + + assert exc_info.value.status_code == 400 + assert "expired" in exc_info.value.detail.lower()