mirror of
https://github.com/jkingsman/Remote-Terminal-for-MeshCore.git
synced 2026-05-01 02:53:00 +02:00
Docs, dead code, and schema updates
This commit is contained in:
@@ -327,6 +327,7 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
||||
| GET | `/api/contacts/analytics` | Unified keyed-or-name contact analytics payload |
|
||||
| GET | `/api/contacts/repeaters/advert-paths` | List recent unique advert paths for all contacts |
|
||||
| POST | `/api/contacts` | Create contact (optionally trigger historical DM decrypt) |
|
||||
| POST | `/api/contacts/bulk-delete` | Delete multiple contacts |
|
||||
| DELETE | `/api/contacts/{public_key}` | Delete contact |
|
||||
| POST | `/api/contacts/{public_key}/mark-read` | Mark contact conversation as read |
|
||||
| POST | `/api/contacts/{public_key}/command` | Send CLI command to repeater |
|
||||
@@ -350,6 +351,7 @@ All endpoints are prefixed with `/api` (e.g., `/api/health`).
|
||||
| GET | `/api/channels` | List channels |
|
||||
| GET | `/api/channels/{key}/detail` | Comprehensive channel profile (message stats, top senders) |
|
||||
| POST | `/api/channels` | Create channel |
|
||||
| POST | `/api/channels/bulk-hashtag` | Create multiple hashtag channels |
|
||||
| DELETE | `/api/channels/{key}` | Delete channel |
|
||||
| POST | `/api/channels/{key}/flood-scope-override` | Set or clear a per-channel regional flood-scope override |
|
||||
| POST | `/api/channels/{key}/mark-read` | Mark channel as read |
|
||||
@@ -475,7 +477,7 @@ mc.subscribe(EventType.ACK, handler)
|
||||
| `MESHCORE_ENABLE_MESSAGE_POLL_FALLBACK` | `false` | Switch the always-on radio audit task from hourly checks to aggressive 10-second polling; the audit checks both missed message drift and channel-slot cache drift |
|
||||
| `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE` | `false` | Disable channel-slot reuse and force `set_channel(...)` before every channel send, even on serial/BLE |
|
||||
|
||||
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `sidebar_sort_order`, `advert_interval`, `last_advert_time`, `favorites`, `last_message_times`, `flood_scope`, `blocked_keys`, and `blocked_names`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. The backend still carries `sidebar_sort_order` for compatibility and migration, but the current frontend sidebar stores sort order per section (`Channels`, `Contacts`, `Repeaters`) in localStorage rather than treating it as one shared server-backed preference. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
||||
**Note:** Runtime app settings are stored in the database (`app_settings` table), not environment variables. These include `max_radio_contacts`, `auto_decrypt_dm_on_advert`, `sidebar_sort_order`, `advert_interval`, `last_advert_time`, `favorites`, `last_message_times`, `flood_scope`, `blocked_keys`, `blocked_names`, and `discovery_blocked_types`. `max_radio_contacts` is the configured radio contact capacity baseline used by background maintenance: favorites reload first, non-favorite fill targets about 80% of that value, and full offload/reload triggers around 95% occupancy. They are configured via `GET/PATCH /api/settings`. The backend still carries `sidebar_sort_order` for compatibility and migration, but the current frontend sidebar stores sort order per section (`Channels`, `Contacts`, `Repeaters`) in localStorage rather than treating it as one shared server-backed preference. MQTT, bot, webhook, Apprise, and SQS configs are stored in the `fanout_configs` table, managed via `/api/fanout`. If the radio's channel slots appear unstable or another client is mutating them underneath this app, operators can force the old always-reconfigure send path with `MESHCORE_FORCE_CHANNEL_SLOT_RECONFIGURE=true`.
|
||||
|
||||
Byte-perfect channel retries are user-triggered via `POST /api/messages/channel/{message_id}/resend` and are allowed for 30 seconds after the original send.
|
||||
|
||||
|
||||
@@ -190,6 +190,7 @@ app/
|
||||
- `GET /contacts/analytics` — unified keyed-or-name analytics payload
|
||||
- `GET /contacts/repeaters/advert-paths` — recent advert paths for all contacts
|
||||
- `POST /contacts`
|
||||
- `POST /contacts/bulk-delete`
|
||||
- `DELETE /contacts/{public_key}`
|
||||
- `POST /contacts/{public_key}/mark-read`
|
||||
- `POST /contacts/{public_key}/command`
|
||||
@@ -214,6 +215,7 @@ app/
|
||||
- `GET /channels`
|
||||
- `GET /channels/{key}/detail`
|
||||
- `POST /channels`
|
||||
- `POST /channels/bulk-hashtag`
|
||||
- `DELETE /channels/{key}`
|
||||
- `POST /channels/{key}/flood-scope-override`
|
||||
- `POST /channels/{key}/mark-read`
|
||||
@@ -306,7 +308,7 @@ Repository writes should prefer typed models such as `ContactUpsert` over ad hoc
|
||||
- `advert_interval`
|
||||
- `last_advert_time`
|
||||
- `flood_scope`
|
||||
- `blocked_keys`, `blocked_names`
|
||||
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
|
||||
|
||||
Note: `sidebar_sort_order` remains in the backend model for compatibility and migration, but the current frontend sidebar uses per-section localStorage sort preferences instead of a single shared server-backed sort mode.
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ CREATE TABLE IF NOT EXISTS messages (
|
||||
text TEXT NOT NULL,
|
||||
sender_timestamp INTEGER,
|
||||
received_at INTEGER NOT NULL,
|
||||
path TEXT,
|
||||
paths TEXT,
|
||||
txt_type INTEGER DEFAULT 0,
|
||||
signature TEXT,
|
||||
outgoing INTEGER DEFAULT 0,
|
||||
@@ -91,23 +91,66 @@ CREATE TABLE IF NOT EXISTS contact_name_history (
|
||||
FOREIGN KEY (public_key) REFERENCES contacts(public_key) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS app_settings (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
max_radio_contacts INTEGER DEFAULT 200,
|
||||
favorites TEXT DEFAULT '[]',
|
||||
auto_decrypt_dm_on_advert INTEGER DEFAULT 1,
|
||||
sidebar_sort_order TEXT DEFAULT 'recent',
|
||||
last_message_times TEXT DEFAULT '{}',
|
||||
preferences_migrated INTEGER DEFAULT 0,
|
||||
advert_interval INTEGER DEFAULT 0,
|
||||
last_advert_time INTEGER DEFAULT 0,
|
||||
flood_scope TEXT DEFAULT '',
|
||||
blocked_keys TEXT DEFAULT '[]',
|
||||
blocked_names TEXT DEFAULT '[]',
|
||||
discovery_blocked_types TEXT DEFAULT '[]'
|
||||
);
|
||||
INSERT OR IGNORE INTO app_settings (id) VALUES (1);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS fanout_configs (
|
||||
id TEXT PRIMARY KEY,
|
||||
type TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
enabled INTEGER DEFAULT 0,
|
||||
config TEXT NOT NULL DEFAULT '{}',
|
||||
scope TEXT NOT NULL DEFAULT '{}',
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS repeater_telemetry_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_key TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
FOREIGN KEY (public_key) REFERENCES contacts(public_key) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_received ON messages(received_at);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_dedup_null_safe
|
||||
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))
|
||||
WHERE type = 'CHAN';
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_incoming_priv_dedup
|
||||
ON messages(type, conversation_key, text, COALESCE(sender_timestamp, 0))
|
||||
WHERE type = 'PRIV' AND outgoing = 0;
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_sender_key ON messages(sender_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_pagination
|
||||
ON messages(type, conversation_key, received_at DESC, id DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_unread_covering
|
||||
ON messages(type, conversation_key, outgoing, received_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_raw_packets_message_id ON raw_packets(message_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_raw_packets_timestamp ON raw_packets(timestamp);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_raw_packets_payload_hash ON raw_packets(payload_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_contacts_on_radio ON contacts(on_radio);
|
||||
CREATE INDEX IF NOT EXISTS idx_contacts_type_last_seen ON contacts(type, last_seen);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_type_received_conversation
|
||||
ON messages(type, received_at, conversation_key);
|
||||
-- idx_messages_sender_key is created by migration 25 (after adding the sender_key column)
|
||||
-- idx_messages_incoming_priv_dedup is created by migration 44 after legacy rows are reconciled
|
||||
CREATE INDEX IF NOT EXISTS idx_contact_advert_paths_recent
|
||||
ON contact_advert_paths(public_key, last_seen DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_contact_name_history_key
|
||||
ON contact_name_history(public_key, last_seen DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_repeater_telemetry_pk_ts
|
||||
ON repeater_telemetry_history(public_key, timestamp);
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -202,7 +202,6 @@ async def on_path_update(event: "Event") -> None:
|
||||
# Legacy firmware/library payloads only support 1-byte hop hashes.
|
||||
normalized_path_hash_mode = -1 if normalized_path_len == -1 else 0
|
||||
else:
|
||||
normalized_path_hash_mode = None
|
||||
try:
|
||||
normalized_path_hash_mode = int(path_hash_mode)
|
||||
except (TypeError, ValueError):
|
||||
|
||||
@@ -80,14 +80,6 @@ _PAYLOAD_ADAPTERS: dict[WsEventType, TypeAdapter[Any]] = {
|
||||
}
|
||||
|
||||
|
||||
def validate_ws_event_payload(event_type: str, data: Any) -> WsEventPayload | Any:
|
||||
"""Validate known WebSocket payloads; pass unknown events through unchanged."""
|
||||
adapter = _PAYLOAD_ADAPTERS.get(event_type) # type: ignore[arg-type]
|
||||
if adapter is None:
|
||||
return data
|
||||
return adapter.validate_python(data)
|
||||
|
||||
|
||||
def dump_ws_event(event_type: str, data: Any) -> str:
|
||||
"""Serialize a WebSocket event envelope with validation for known event types."""
|
||||
adapter = _PAYLOAD_ADAPTERS.get(event_type) # type: ignore[arg-type]
|
||||
@@ -104,13 +96,3 @@ def dump_ws_event(event_type: str, data: Any) -> str:
|
||||
event_type,
|
||||
)
|
||||
return json.dumps({"type": event_type, "data": data})
|
||||
|
||||
|
||||
def dump_ws_event_payload(event_type: str, data: Any) -> Any:
|
||||
"""Return the JSON-serializable payload for a WebSocket event."""
|
||||
adapter = _PAYLOAD_ADAPTERS.get(event_type) # type: ignore[arg-type]
|
||||
if adapter is None:
|
||||
return data
|
||||
|
||||
validated = adapter.validate_python(data)
|
||||
return adapter.dump_python(validated, mode="json")
|
||||
|
||||
@@ -144,11 +144,8 @@ class MapUploadModule(FanoutModule):
|
||||
if advert is None:
|
||||
return
|
||||
|
||||
# TODO: advert Ed25519 signature verification is skipped here.
|
||||
# The radio has already validated the packet before passing it to RT,
|
||||
# so re-verification is redundant in practice. If added, verify that
|
||||
# nacl.bindings.crypto_sign_open(sig + (pubkey_bytes || timestamp_bytes),
|
||||
# advert.public_key_bytes) succeeds before proceeding.
|
||||
# Advert Ed25519 signature verification is intentionally skipped.
|
||||
# The radio validates packets before passing them to RT.
|
||||
|
||||
# Only process repeaters (2) and rooms (3) — any other role is rejected
|
||||
if advert.device_role not in _ALLOWED_DEVICE_ROLES:
|
||||
|
||||
@@ -283,30 +283,6 @@ class NearestRepeater(BaseModel):
|
||||
heard_count: int
|
||||
|
||||
|
||||
class ContactDetail(BaseModel):
|
||||
"""Comprehensive contact profile data."""
|
||||
|
||||
contact: Contact
|
||||
name_history: list[ContactNameHistory] = Field(default_factory=list)
|
||||
dm_message_count: int = 0
|
||||
channel_message_count: int = 0
|
||||
most_active_rooms: list[ContactActiveRoom] = Field(default_factory=list)
|
||||
advert_paths: list[ContactAdvertPath] = Field(default_factory=list)
|
||||
advert_frequency: float | None = Field(
|
||||
default=None,
|
||||
description="Advert observations per hour (includes multi-path arrivals of same advert)",
|
||||
)
|
||||
nearest_repeaters: list[NearestRepeater] = Field(default_factory=list)
|
||||
|
||||
|
||||
class NameOnlyContactDetail(BaseModel):
|
||||
"""Channel activity summary for a sender name that is not tied to a known key."""
|
||||
|
||||
name: str
|
||||
channel_message_count: int = 0
|
||||
most_active_rooms: list[ContactActiveRoom] = Field(default_factory=list)
|
||||
|
||||
|
||||
class ContactAnalyticsHourlyBucket(BaseModel):
|
||||
"""A single hourly activity bucket for contact analytics."""
|
||||
|
||||
|
||||
@@ -253,70 +253,6 @@ async def should_run_full_periodic_sync(mc: MeshCore) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
async def sync_and_offload_contacts(mc: MeshCore) -> dict:
|
||||
"""
|
||||
Sync contacts from radio to database, then remove them from radio.
|
||||
Returns counts of synced and removed contacts.
|
||||
"""
|
||||
synced = 0
|
||||
removed = 0
|
||||
|
||||
try:
|
||||
# Get all contacts from radio
|
||||
result = await mc.commands.get_contacts()
|
||||
|
||||
if result is None or result.type == EventType.ERROR:
|
||||
logger.error(
|
||||
"Failed to get contacts from radio: %s. "
|
||||
"If you see this repeatedly, the radio may be visible on the "
|
||||
"serial/TCP/BLE port but not responding to commands. Check for "
|
||||
"another process with the serial port open (other RemoteTerm "
|
||||
"instances, serial monitors, etc.), verify the firmware is "
|
||||
"up-to-date and in client mode (not repeater), or try a "
|
||||
"power cycle.",
|
||||
result,
|
||||
)
|
||||
return {"synced": 0, "removed": 0, "error": str(result)}
|
||||
|
||||
contacts = result.payload or {}
|
||||
logger.info("Found %d contacts on radio", len(contacts))
|
||||
|
||||
# Sync each contact to database, then remove from radio
|
||||
for public_key, contact_data in contacts.items():
|
||||
# Save to database
|
||||
await ContactRepository.upsert(
|
||||
ContactUpsert.from_radio_dict(public_key, contact_data, on_radio=False)
|
||||
)
|
||||
asyncio.create_task(
|
||||
_reconcile_contact_messages_background(
|
||||
public_key,
|
||||
contact_data.get("adv_name"),
|
||||
)
|
||||
)
|
||||
synced += 1
|
||||
|
||||
# Remove from radio
|
||||
try:
|
||||
remove_result = await mc.commands.remove_contact(contact_data)
|
||||
if remove_result.type == EventType.OK:
|
||||
removed += 1
|
||||
_evict_removed_contact_from_library_cache(mc, public_key)
|
||||
else:
|
||||
logger.warning(
|
||||
"Failed to remove contact %s: %s", public_key[:12], remove_result.payload
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Error removing contact %s: %s", public_key[:12], e)
|
||||
|
||||
logger.info("Synced %d contacts, removed %d from radio", synced, removed)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error during contact sync: %s", e)
|
||||
return {"synced": synced, "removed": removed, "error": str(e)}
|
||||
|
||||
return {"synced": synced, "removed": removed}
|
||||
|
||||
|
||||
async def sync_and_offload_channels(mc: MeshCore, max_channels: int | None = None) -> dict:
|
||||
"""
|
||||
Sync channels from radio to database, then clear them from radio.
|
||||
|
||||
@@ -356,7 +356,7 @@ LocalStorage migration helpers for favorites; canonical favorites are server-sid
|
||||
- `advert_interval`
|
||||
- `last_advert_time`
|
||||
- `flood_scope`
|
||||
- `blocked_keys`, `blocked_names`
|
||||
- `blocked_keys`, `blocked_names`, `discovery_blocked_types`
|
||||
|
||||
The backend still carries `sidebar_sort_order` for compatibility and old preference migration, but the current sidebar UI stores sort order per section (`Channels`, `Contacts`, `Repeaters`) in frontend localStorage rather than treating it as one global server-backed setting.
|
||||
|
||||
|
||||
@@ -166,23 +166,6 @@ export interface NearestRepeater {
|
||||
heard_count: number;
|
||||
}
|
||||
|
||||
export interface ContactDetail {
|
||||
contact: Contact;
|
||||
name_history: ContactNameHistory[];
|
||||
dm_message_count: number;
|
||||
channel_message_count: number;
|
||||
most_active_rooms: ContactActiveRoom[];
|
||||
advert_paths: ContactAdvertPath[];
|
||||
advert_frequency: number | null;
|
||||
nearest_repeaters: NearestRepeater[];
|
||||
}
|
||||
|
||||
export interface NameOnlyContactDetail {
|
||||
name: string;
|
||||
channel_message_count: number;
|
||||
most_active_rooms: ContactActiveRoom[];
|
||||
}
|
||||
|
||||
export interface ContactAnalyticsHourlyBucket {
|
||||
bucket_start: number;
|
||||
last_24h_count: number;
|
||||
|
||||
@@ -768,310 +768,6 @@ class TestSyncAndOffloadAll:
|
||||
assert payload["public_key"] == KEY_A
|
||||
|
||||
|
||||
class TestSyncAndOffloadContacts:
|
||||
"""Test sync_and_offload_contacts: pull contacts from radio, save to DB, remove from radio."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_syncs_and_removes_contacts(self, test_db):
|
||||
"""Contacts are upserted to DB and removed from radio."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {
|
||||
KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0},
|
||||
KEY_B: {"adv_name": "Bob", "type": 1, "flags": 0},
|
||||
}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT # Not ERROR
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_remove_result = MagicMock()
|
||||
mock_remove_result.type = EventType.OK
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(return_value=mock_remove_result)
|
||||
|
||||
result = await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
assert result["synced"] == 2
|
||||
assert result["removed"] == 2
|
||||
|
||||
# Verify contacts are in real DB
|
||||
alice = await ContactRepository.get_by_key(KEY_A)
|
||||
bob = await ContactRepository.get_by_key(KEY_B)
|
||||
assert alice is not None
|
||||
assert alice.name == "Alice"
|
||||
assert bob is not None
|
||||
assert bob.name == "Bob"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_claims_prefix_messages_for_each_contact(self, test_db):
|
||||
"""Prefix message claims still complete via scheduled reconciliation tasks."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
# Pre-insert a message with a prefix key that matches KEY_A
|
||||
await MessageRepository.create(
|
||||
msg_type="PRIV",
|
||||
text="Hello from prefix",
|
||||
received_at=1700000000,
|
||||
conversation_key=KEY_A[:12],
|
||||
sender_timestamp=1700000000,
|
||||
)
|
||||
|
||||
contact_payload = {KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0}}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_remove_result = MagicMock()
|
||||
mock_remove_result.type = EventType.OK
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(return_value=mock_remove_result)
|
||||
|
||||
created_tasks: list[asyncio.Task] = []
|
||||
real_create_task = asyncio.create_task
|
||||
|
||||
def _capture_task(coro):
|
||||
task = real_create_task(coro)
|
||||
created_tasks.append(task)
|
||||
return task
|
||||
|
||||
with patch("app.radio_sync.asyncio.create_task", side_effect=_capture_task):
|
||||
await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
await asyncio.gather(*created_tasks)
|
||||
|
||||
# Verify the prefix message was claimed (promoted to full key)
|
||||
messages = await MessageRepository.get_all(conversation_key=KEY_A)
|
||||
assert len(messages) == 1
|
||||
assert messages[0].conversation_key == KEY_A.lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reconciliation_does_not_block_contact_removal(self, test_db):
|
||||
"""Slow reconciliation work is scheduled in background, not awaited inline."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0}}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_remove_result = MagicMock()
|
||||
mock_remove_result.type = EventType.OK
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(return_value=mock_remove_result)
|
||||
|
||||
reconcile_started = asyncio.Event()
|
||||
reconcile_release = asyncio.Event()
|
||||
created_tasks: list[asyncio.Task] = []
|
||||
real_create_task = asyncio.create_task
|
||||
|
||||
async def _slow_reconcile(*, public_key: str, contact_name: str | None, log):
|
||||
del public_key, contact_name, log
|
||||
reconcile_started.set()
|
||||
await reconcile_release.wait()
|
||||
|
||||
def _capture_task(coro):
|
||||
task = real_create_task(coro)
|
||||
created_tasks.append(task)
|
||||
return task
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.radio_sync.promote_prefix_contacts_for_contact",
|
||||
new_callable=AsyncMock,
|
||||
return_value=[],
|
||||
),
|
||||
patch("app.radio_sync.reconcile_contact_messages", side_effect=_slow_reconcile),
|
||||
patch("app.radio_sync.asyncio.create_task", side_effect=_capture_task),
|
||||
):
|
||||
result = await sync_and_offload_contacts(mock_mc)
|
||||
await asyncio.sleep(0)
|
||||
|
||||
assert result["synced"] == 1
|
||||
assert result["removed"] == 1
|
||||
assert reconcile_started.is_set() is True
|
||||
assert created_tasks and created_tasks[0].done() is False
|
||||
mock_mc.commands.remove_contact.assert_awaited_once()
|
||||
|
||||
reconcile_release.set()
|
||||
await asyncio.gather(*created_tasks)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handles_remove_failure_gracefully(self, test_db):
|
||||
"""Failed remove_contact logs warning but continues to next contact."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {
|
||||
KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0},
|
||||
KEY_B: {"adv_name": "Bob", "type": 1, "flags": 0},
|
||||
}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_fail_result = MagicMock()
|
||||
mock_fail_result.type = EventType.ERROR
|
||||
mock_fail_result.payload = {"error": "busy"}
|
||||
|
||||
mock_ok_result = MagicMock()
|
||||
mock_ok_result.type = EventType.OK
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
# First remove fails, second succeeds
|
||||
mock_mc.commands.remove_contact = AsyncMock(side_effect=[mock_fail_result, mock_ok_result])
|
||||
|
||||
result = await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
# Both contacts synced, but only one removed successfully
|
||||
assert result["synced"] == 2
|
||||
assert result["removed"] == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handles_remove_exception_gracefully(self, test_db):
|
||||
"""Exception during remove_contact is caught and processing continues."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0}}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(side_effect=Exception("Timeout"))
|
||||
|
||||
result = await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
assert result["synced"] == 1
|
||||
assert result["removed"] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_error_when_get_contacts_fails(self):
|
||||
"""Error result from get_contacts returns error dict."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
mock_error_result = MagicMock()
|
||||
mock_error_result.type = EventType.ERROR
|
||||
mock_error_result.payload = {"error": "radio busy"}
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_error_result)
|
||||
|
||||
result = await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
assert result["synced"] == 0
|
||||
assert result["removed"] == 0
|
||||
assert "error" in result
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upserts_with_on_radio_false(self, test_db):
|
||||
"""Contacts are upserted with on_radio=False (being removed from radio)."""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0}}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_remove_result = MagicMock()
|
||||
mock_remove_result.type = EventType.OK
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(return_value=mock_remove_result)
|
||||
|
||||
await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
contact = await ContactRepository.get_by_key(KEY_A)
|
||||
assert contact is not None
|
||||
assert contact.on_radio is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_evicts_removed_contacts_from_library_cache(self, test_db):
|
||||
"""Successfully removed contacts are evicted from mc._contacts.
|
||||
|
||||
The MeshCore library's remove_contact() command does not update the
|
||||
library's in-memory _contacts cache. If we don't evict manually,
|
||||
sync_recent_contacts_to_radio() will find stale entries via
|
||||
get_contact_by_key_prefix() and skip re-adding contacts to the radio.
|
||||
"""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {
|
||||
KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0},
|
||||
KEY_B: {"adv_name": "Bob", "type": 1, "flags": 0},
|
||||
}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_remove_result = MagicMock()
|
||||
mock_remove_result.type = EventType.OK
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(return_value=mock_remove_result)
|
||||
# Seed the library's in-memory cache with the same contacts —
|
||||
# simulating what happens after get_contacts() populates it.
|
||||
mock_mc._contacts = {
|
||||
KEY_A: {"public_key": KEY_A, "adv_name": "Alice"},
|
||||
KEY_B: {"public_key": KEY_B, "adv_name": "Bob"},
|
||||
}
|
||||
|
||||
await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
# Both contacts should have been evicted from the library cache
|
||||
assert KEY_A not in mock_mc._contacts
|
||||
assert KEY_B not in mock_mc._contacts
|
||||
assert mock_mc._contacts == {}
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_failed_remove_does_not_evict_from_library_cache(self, test_db):
|
||||
"""Contacts that fail to remove from radio stay in mc._contacts.
|
||||
|
||||
We only evict from the cache on successful removal — if the radio
|
||||
still has the contact, the cache should reflect that.
|
||||
"""
|
||||
from app.radio_sync import sync_and_offload_contacts
|
||||
|
||||
contact_payload = {
|
||||
KEY_A: {"adv_name": "Alice", "type": 1, "flags": 0},
|
||||
}
|
||||
|
||||
mock_get_result = MagicMock()
|
||||
mock_get_result.type = EventType.NEW_CONTACT
|
||||
mock_get_result.payload = contact_payload
|
||||
|
||||
mock_fail_result = MagicMock()
|
||||
mock_fail_result.type = EventType.ERROR
|
||||
mock_fail_result.payload = {"error": "busy"}
|
||||
|
||||
mock_mc = MagicMock()
|
||||
mock_mc.commands.get_contacts = AsyncMock(return_value=mock_get_result)
|
||||
mock_mc.commands.remove_contact = AsyncMock(return_value=mock_fail_result)
|
||||
mock_mc._contacts = {
|
||||
KEY_A: {"public_key": KEY_A, "adv_name": "Alice"},
|
||||
}
|
||||
|
||||
await sync_and_offload_contacts(mock_mc)
|
||||
|
||||
# Contact should still be in the cache since removal failed
|
||||
assert KEY_A in mock_mc._contacts
|
||||
|
||||
|
||||
class TestBackgroundContactReconcile:
|
||||
"""Test the yielding background contact reconcile loop."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user