Drop unnecessary decryption columns and rely on FK to messages table as indicator of decryption. Also, reboot retries radio connection

This commit is contained in:
Jack Kingsman
2026-01-17 16:33:42 -08:00
parent 9b59cccf69
commit 4219f96894
9 changed files with 222 additions and 79 deletions

View File

@@ -1,5 +1,11 @@
# RemoteTerm for MeshCore
## Important Rules
**NEVER make git commits.** A human must make all commits. You may stage files and prepare commit messages, but do not run `git commit`.
## Overview
A web interface for MeshCore mesh radio networks. The backend connects to a MeshCore-compatible radio over serial and exposes REST/WebSocket APIs. The React frontend provides real-time messaging and radio configuration.
**For detailed component documentation, see:**

View File

@@ -49,16 +49,13 @@ CREATE TABLE IF NOT EXISTS raw_packets (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER,
FOREIGN KEY (message_id) REFERENCES messages(id)
);
CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(type, conversation_key);
CREATE INDEX IF NOT EXISTS idx_messages_received ON messages(received_at);
CREATE INDEX IF NOT EXISTS idx_raw_packets_decrypted ON raw_packets(decrypted);
CREATE INDEX IF NOT EXISTS idx_raw_packets_message_id ON raw_packets(message_id);
CREATE INDEX IF NOT EXISTS idx_contacts_on_radio ON contacts(on_radio);
"""

View File

@@ -43,11 +43,19 @@ async def run_migrations(conn: aiosqlite.Connection) -> int:
await set_version(conn, 1)
applied += 1
# Future migrations go here:
# if version < 2:
# await _migrate_002_something(conn)
# await set_version(conn, 2)
# applied += 1
# Migration 2: Drop unused decrypt_attempts and last_attempt columns
if version < 2:
logger.info("Applying migration 2: drop decrypt_attempts and last_attempt columns")
await _migrate_002_drop_decrypt_attempt_columns(conn)
await set_version(conn, 2)
applied += 1
# Migration 3: Drop decrypted column (redundant with message_id), update index
if version < 3:
logger.info("Applying migration 3: drop decrypted column, add message_id index")
await _migrate_003_drop_decrypted_column(conn)
await set_version(conn, 3)
applied += 1
if applied > 0:
logger.info(
@@ -90,3 +98,74 @@ async def _migrate_001_add_last_read_at(conn: aiosqlite.Connection) -> None:
raise
await conn.commit()
async def _migrate_002_drop_decrypt_attempt_columns(conn: aiosqlite.Connection) -> None:
"""
Drop unused decrypt_attempts and last_attempt columns from raw_packets.
These columns were added for a retry-limiting feature that was never implemented.
They are written to but never read, so we can safely remove them.
SQLite 3.35.0+ supports ALTER TABLE DROP COLUMN. For older versions,
we silently skip (the columns will remain but are harmless).
"""
for column in ["decrypt_attempts", "last_attempt"]:
try:
await conn.execute(f"ALTER TABLE raw_packets DROP COLUMN {column}")
logger.debug("Dropped %s from raw_packets table", column)
except aiosqlite.OperationalError as e:
error_msg = str(e).lower()
if "no such column" in error_msg:
logger.debug("raw_packets.%s already dropped, skipping", column)
elif "syntax error" in error_msg or "drop column" in error_msg:
# SQLite version doesn't support DROP COLUMN - harmless, column stays
logger.debug("SQLite doesn't support DROP COLUMN, %s column will remain", column)
else:
raise
await conn.commit()
async def _migrate_003_drop_decrypted_column(conn: aiosqlite.Connection) -> None:
"""
Drop the decrypted column and update indexes.
The decrypted column is redundant with message_id - a packet is decrypted
iff message_id IS NOT NULL. We replace the decrypted index with a message_id index.
SQLite 3.35.0+ supports ALTER TABLE DROP COLUMN. For older versions,
we silently skip the column drop but still update the index.
"""
# First, drop the old index on decrypted (safe even if it doesn't exist)
try:
await conn.execute("DROP INDEX IF EXISTS idx_raw_packets_decrypted")
logger.debug("Dropped idx_raw_packets_decrypted index")
except aiosqlite.OperationalError:
pass # Index didn't exist
# Create new index on message_id for efficient undecrypted packet queries
try:
await conn.execute(
"CREATE INDEX IF NOT EXISTS idx_raw_packets_message_id ON raw_packets(message_id)"
)
logger.debug("Created idx_raw_packets_message_id index")
except aiosqlite.OperationalError as e:
if "already exists" not in str(e).lower():
raise
# Try to drop the decrypted column
try:
await conn.execute("ALTER TABLE raw_packets DROP COLUMN decrypted")
logger.debug("Dropped decrypted from raw_packets table")
except aiosqlite.OperationalError as e:
error_msg = str(e).lower()
if "no such column" in error_msg:
logger.debug("raw_packets.decrypted already dropped, skipping")
elif "syntax error" in error_msg or "drop column" in error_msg:
# SQLite version doesn't support DROP COLUMN - harmless, column stays
logger.debug("SQLite doesn't support DROP COLUMN, decrypted column will remain")
else:
raise
await conn.commit()

View File

@@ -87,10 +87,12 @@ class RawPacket(BaseModel):
id: int
timestamp: int
data: str = Field(description="Hex-encoded packet data")
decrypted: bool = False
message_id: int | None = None
decrypt_attempts: int = 0
last_attempt: int | None = None
@property
def decrypted(self) -> bool:
"""A packet is decrypted iff it has a linked message_id."""
return self.message_id is not None
class RawPacketDecryptedInfo(BaseModel):

View File

@@ -468,9 +468,9 @@ class RawPacketRepository:
@staticmethod
async def get_undecrypted_count() -> int:
"""Get count of undecrypted packets."""
"""Get count of undecrypted packets (those without a linked message)."""
cursor = await db.conn.execute(
"SELECT COUNT(*) as count FROM raw_packets WHERE decrypted = 0"
"SELECT COUNT(*) as count FROM raw_packets WHERE message_id IS NULL"
)
row = await cursor.fetchone()
return row["count"] if row else 0
@@ -479,25 +479,27 @@ class RawPacketRepository:
async def get_all_undecrypted() -> list[tuple[int, bytes, int]]:
"""Get all undecrypted packets as (id, data, timestamp) tuples."""
cursor = await db.conn.execute(
"SELECT id, data, timestamp FROM raw_packets WHERE decrypted = 0 ORDER BY timestamp ASC"
"SELECT id, data, timestamp FROM raw_packets WHERE message_id IS NULL ORDER BY timestamp ASC"
)
rows = await cursor.fetchall()
return [(row["id"], bytes(row["data"]), row["timestamp"]) for row in rows]
@staticmethod
async def mark_decrypted(packet_id: int, message_id: int) -> None:
"""Link a raw packet to its decrypted message."""
await db.conn.execute(
"UPDATE raw_packets SET decrypted = 1, message_id = ? WHERE id = ?",
"UPDATE raw_packets SET message_id = ? WHERE id = ?",
(message_id, packet_id),
)
await db.conn.commit()
@staticmethod
async def get_undecrypted(limit: int = 100) -> list[RawPacket]:
"""Get undecrypted packets (those without a linked message)."""
cursor = await db.conn.execute(
"""
SELECT * FROM raw_packets
WHERE decrypted = 0
SELECT id, timestamp, data, message_id FROM raw_packets
WHERE message_id IS NULL
ORDER BY timestamp DESC
LIMIT ?
""",
@@ -509,32 +511,17 @@ class RawPacketRepository:
id=row["id"],
timestamp=row["timestamp"],
data=row["data"].hex(),
decrypted=bool(row["decrypted"]),
message_id=row["message_id"],
decrypt_attempts=row["decrypt_attempts"],
last_attempt=row["last_attempt"],
)
for row in rows
]
@staticmethod
async def increment_attempts(packet_id: int) -> None:
await db.conn.execute(
"""
UPDATE raw_packets
SET decrypt_attempts = decrypt_attempts + 1, last_attempt = ?
WHERE id = ?
""",
(int(time.time()), packet_id),
)
await db.conn.commit()
@staticmethod
async def prune_old_undecrypted(max_age_days: int) -> int:
"""Delete undecrypted packets older than max_age_days. Returns count deleted."""
cutoff = int(time.time()) - (max_age_days * 86400)
cursor = await db.conn.execute(
"DELETE FROM raw_packets WHERE decrypted = 0 AND timestamp < ?",
"DELETE FROM raw_packets WHERE message_id IS NULL AND timestamp < ?",
(cutoff,),
)
await db.conn.commit()

View File

@@ -145,13 +145,47 @@ async def send_advertisement(flood: bool = True) -> dict:
@router.post("/reboot")
async def reboot_radio() -> dict:
"""Reboot the radio. Connection will temporarily drop and auto-reconnect."""
mc = require_connected()
"""Reboot the radio, or reconnect if not currently connected.
logger.info("Rebooting radio")
await mc.commands.reboot()
If connected: sends reboot command, connection will temporarily drop and auto-reconnect.
If not connected: attempts to reconnect (same as /reconnect endpoint).
"""
from app.radio import radio_manager
return {"status": "ok", "message": "Reboot command sent. Radio will reconnect automatically."}
# If connected, send reboot command
if radio_manager.is_connected and radio_manager.meshcore:
logger.info("Rebooting radio")
await radio_manager.meshcore.commands.reboot()
return {
"status": "ok",
"message": "Reboot command sent. Radio will reconnect automatically.",
}
# Not connected - attempt to reconnect
if radio_manager.is_reconnecting:
return {
"status": "pending",
"message": "Reconnection already in progress",
"connected": False,
}
logger.info("Radio not connected, attempting reconnect")
success = await radio_manager.reconnect()
if success:
# Re-register event handlers after successful reconnect
from app.event_handlers import register_event_handlers
if radio_manager.meshcore:
register_event_handlers(radio_manager.meshcore)
await radio_manager.meshcore.start_auto_message_fetching()
logger.info("Event handlers re-registered and auto message fetching started")
return {"status": "ok", "message": "Reconnected successfully", "connected": True}
else:
raise HTTPException(
status_code=503, detail="Failed to reconnect. Check radio connection and power."
)
@router.post("/reconnect")

View File

@@ -483,10 +483,7 @@ class TestRawPacketRepository:
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL UNIQUE,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
message_id INTEGER
)
""")
await conn.commit()
@@ -523,10 +520,7 @@ class TestRawPacketRepository:
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL UNIQUE,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
message_id INTEGER
)
""")
await conn.commit()
@@ -567,10 +561,7 @@ class TestRawPacketRepository:
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL UNIQUE,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
message_id INTEGER
)
""")
@@ -578,20 +569,21 @@ class TestRawPacketRepository:
old_timestamp = now - (15 * 86400) # 15 days ago
recent_timestamp = now - (5 * 86400) # 5 days ago
# Insert old undecrypted packet
# Insert old undecrypted packet (message_id NULL = undecrypted)
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, decrypted) VALUES (?, ?, 0)",
"INSERT INTO raw_packets (timestamp, data) VALUES (?, ?)",
(old_timestamp, b"\x01\x02\x03"),
)
# Insert recent undecrypted packet
# Insert recent undecrypted packet (message_id NULL = undecrypted)
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, decrypted) VALUES (?, ?, 0)",
"INSERT INTO raw_packets (timestamp, data) VALUES (?, ?)",
(recent_timestamp, b"\x04\x05\x06"),
)
# Insert old but decrypted packet (should NOT be deleted)
# message_id NOT NULL = decrypted
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, decrypted) VALUES (?, ?, 1)",
(old_timestamp, b"\x07\x08\x09"),
"INSERT INTO raw_packets (timestamp, data, message_id) VALUES (?, ?, ?)",
(old_timestamp, b"\x07\x08\x09", 1),
)
await conn.commit()
@@ -630,19 +622,16 @@ class TestRawPacketRepository:
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL UNIQUE,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
message_id INTEGER
)
""")
now = int(time.time())
recent_timestamp = now - (5 * 86400) # 5 days ago
# Insert only recent packet
# Insert only recent packet (message_id NULL = undecrypted)
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, decrypted) VALUES (?, ?, 0)",
"INSERT INTO raw_packets (timestamp, data) VALUES (?, ?)",
(recent_timestamp, b"\x01\x02\x03"),
)
await conn.commit()
@@ -680,23 +669,20 @@ class TestMaintenanceEndpoint:
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL UNIQUE,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
message_id INTEGER
)
""")
now = int(time.time())
old_timestamp = now - (20 * 86400) # 20 days ago
# Insert old undecrypted packets
# Insert old undecrypted packets (message_id NULL = undecrypted)
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, decrypted) VALUES (?, ?, 0)",
"INSERT INTO raw_packets (timestamp, data) VALUES (?, ?)",
(old_timestamp, b"\x01\x02\x03"),
)
await conn.execute(
"INSERT INTO raw_packets (timestamp, data, decrypted) VALUES (?, ?, 0)",
"INSERT INTO raw_packets (timestamp, data) VALUES (?, ?)",
(old_timestamp, b"\x04\x05\x06"),
)
await conn.commit()

View File

@@ -65,13 +65,26 @@ class TestMigration001:
on_radio INTEGER DEFAULT 0
)
""")
# Raw packets table with old schema (for migrations 2 and 3)
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
)
""")
await conn.execute("CREATE INDEX idx_raw_packets_decrypted ON raw_packets(decrypted)")
await conn.commit()
# Run migrations
applied = await run_migrations(conn)
assert applied == 1
assert await get_version(conn) == 1
assert applied == 3 # All 3 migrations run
assert await get_version(conn) == 3
# Verify columns exist by inserting and selecting
await conn.execute(
@@ -117,15 +130,28 @@ class TestMigration001:
name TEXT NOT NULL
)
""")
# Raw packets table with old schema (for migrations 2 and 3)
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
)
""")
await conn.execute("CREATE INDEX idx_raw_packets_decrypted ON raw_packets(decrypted)")
await conn.commit()
# Run migrations twice
applied1 = await run_migrations(conn)
applied2 = await run_migrations(conn)
assert applied1 == 1
assert applied1 == 3 # All 3 migrations run
assert applied2 == 0 # No migrations on second run
assert await get_version(conn) == 1
assert await get_version(conn) == 3
finally:
await conn.close()
@@ -150,14 +176,27 @@ class TestMigration001:
last_read_at INTEGER
)
""")
# Raw packets table with old schema (for migrations 2 and 3)
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
)
""")
await conn.execute("CREATE INDEX idx_raw_packets_decrypted ON raw_packets(decrypted)")
await conn.commit()
# Run migrations - should not fail
applied = await run_migrations(conn)
# Still counts as applied (version incremented) but no error
assert applied == 1
assert await get_version(conn) == 1
# All 3 migrations applied (version incremented) but no error
assert applied == 3
assert await get_version(conn) == 3
finally:
await conn.close()
@@ -182,6 +221,19 @@ class TestMigration001:
is_hashtag INTEGER DEFAULT 0
)
""")
# Raw packets table with old schema (for migrations 2 and 3)
await conn.execute("""
CREATE TABLE raw_packets (
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
data BLOB NOT NULL,
decrypted INTEGER DEFAULT 0,
message_id INTEGER,
decrypt_attempts INTEGER DEFAULT 0,
last_attempt INTEGER
)
""")
await conn.execute("CREATE INDEX idx_raw_packets_decrypted ON raw_packets(decrypted)")
await conn.execute(
"INSERT INTO contacts (public_key, name, type) VALUES (?, ?, ?)",
("existingkey", "ExistingContact", 1),

View File

@@ -519,9 +519,9 @@ class TestCreateMessageFromDecrypted:
received_at=1700000001,
)
# Verify packet is marked decrypted
# Verify packet is marked decrypted (has message_id set)
undecrypted = await RawPacketRepository.get_undecrypted(limit=100)
packet_ids = [p[0] for p in undecrypted]
packet_ids = [p.id for p in undecrypted]
assert packet_id not in packet_ids # Should be marked as decrypted