forked from iarv/mc-webui
feat: Enhance echo tracking with paths, incoming routes, and persistence
- Show repeater path codes in sent message echo badge (e.g., "2 (5e, d1)") - Capture and display route path for incoming messages in message meta - Persist all echo data to .echoes.jsonl (survives container restarts) - Load echo data from disk on startup with 7-day retention and compaction - Combine sent echo and incoming path data in single /echo_counts response Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -22,7 +22,7 @@ A lightweight web interface for meshcore-cli, providing browser-based access to
|
||||
- **Message archives** - Automatic daily archiving with browse-by-date selector
|
||||
- **Interactive Console** - Direct meshcli command execution via WebSocket
|
||||
- **@Mentions autocomplete** - Type @ to see contact suggestions with fuzzy search
|
||||
- **Echo tracking** - "Heard X repeats" badge shows how many repeaters forwarded your message
|
||||
- **Echo tracking** - "Heard X repeats" with repeater IDs for sent messages, route path for incoming messages (persisted across restarts)
|
||||
- **PWA support** - Browser notifications and installable app (experimental)
|
||||
- **Full offline support** - Works without internet (local Bootstrap, icons, emoji picker)
|
||||
|
||||
|
||||
@@ -299,29 +299,38 @@ def get_messages():
|
||||
channel_idx=channel_idx
|
||||
)
|
||||
|
||||
# Fetch echo counts from bridge (for "Heard X repeats" feature)
|
||||
# Fetch echo data from bridge (for "Heard X repeats" + path display)
|
||||
if not archive_date: # Only for live messages, not archives
|
||||
try:
|
||||
bridge_url = config.MC_BRIDGE_URL.replace('/cli', '/echo_counts')
|
||||
response = requests.get(bridge_url, timeout=2)
|
||||
if response.ok:
|
||||
echo_counts = response.json().get('echo_counts', [])
|
||||
resp_data = response.json()
|
||||
echo_counts = resp_data.get('echo_counts', [])
|
||||
incoming_paths = resp_data.get('incoming_paths', [])
|
||||
|
||||
# Create lookup by timestamp + channel
|
||||
echo_lookup = {(ec['timestamp'], ec['channel_idx']): ec['count']
|
||||
for ec in echo_counts}
|
||||
|
||||
# Merge into messages
|
||||
# Merge sent echo counts + paths into own messages
|
||||
for msg in messages:
|
||||
if msg.get('is_own'):
|
||||
# Find matching echo count (within 5 second window)
|
||||
msg['echo_count'] = 0
|
||||
for (ts, ch), count in echo_lookup.items():
|
||||
if msg.get('channel_idx') == ch and abs(msg['timestamp'] - ts) < 5:
|
||||
msg['echo_count'] = count
|
||||
msg['echo_paths'] = []
|
||||
for ec in echo_counts:
|
||||
if (msg.get('channel_idx') == ec.get('channel_idx') and
|
||||
abs(msg['timestamp'] - ec['timestamp']) < 5):
|
||||
msg['echo_count'] = ec['count']
|
||||
msg['echo_paths'] = ec.get('paths', [])
|
||||
break
|
||||
|
||||
# Merge incoming paths into received messages
|
||||
for msg in messages:
|
||||
if not msg.get('is_own'):
|
||||
for ip in incoming_paths:
|
||||
if (abs(msg['timestamp'] - ip['timestamp']) < 5 and
|
||||
msg.get('path_len') == ip.get('path_len')):
|
||||
msg['path'] = ip['path']
|
||||
break
|
||||
except Exception as e:
|
||||
logger.debug(f"Echo counts fetch failed (non-critical): {e}")
|
||||
logger.debug(f"Echo data fetch failed (non-critical): {e}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
|
||||
@@ -1121,6 +1121,12 @@ main {
|
||||
background-color: rgba(117, 183, 152, 0.15);
|
||||
}
|
||||
|
||||
/* Path info in message meta (incoming messages) */
|
||||
.path-info {
|
||||
cursor: help;
|
||||
border-bottom: 1px dotted currentColor;
|
||||
}
|
||||
|
||||
/* =============================================================================
|
||||
Chat Filter
|
||||
============================================================================= */
|
||||
|
||||
@@ -715,13 +715,23 @@ function createMessageElement(msg) {
|
||||
if (msg.path_len !== undefined && msg.path_len !== null) {
|
||||
metaInfo += ` | Hops: ${msg.path_len}`;
|
||||
}
|
||||
if (msg.path) {
|
||||
const segments = msg.path.match(/.{1,2}/g) || [];
|
||||
const fullPath = segments.join(' \u2192 ');
|
||||
const shortPath = segments.length > 4
|
||||
? `${segments[0]}\u2192...\u2192${segments[segments.length - 1]}`
|
||||
: segments.join('\u2192');
|
||||
metaInfo += ` | <span class="path-info" title="Path: ${fullPath}">Route: ${shortPath}</span>`;
|
||||
}
|
||||
|
||||
if (msg.is_own) {
|
||||
// Own messages: right-aligned, no avatar
|
||||
// Echo badge shows how many repeaters heard the message
|
||||
// Echo badge shows how many repeaters heard the message + their path codes
|
||||
const echoPaths = (msg.echo_paths || []).map(p => p.substring(0, 2));
|
||||
const pathDisplay = echoPaths.length > 0 ? ` (${echoPaths.join(', ')})` : '';
|
||||
const echoDisplay = msg.echo_count > 0
|
||||
? `<span class="echo-badge" title="Heard by ${msg.echo_count} repeater(s)">
|
||||
<i class="bi bi-broadcast"></i> ${msg.echo_count}
|
||||
? `<span class="echo-badge" title="Heard by ${msg.echo_count} repeater(s): ${echoPaths.join(', ')}">
|
||||
<i class="bi bi-broadcast"></i> ${msg.echo_count}${pathDisplay}
|
||||
</span>`
|
||||
: '';
|
||||
|
||||
|
||||
@@ -149,7 +149,12 @@ class MeshCLISession:
|
||||
# Echo tracking for "Heard X repeats" feature
|
||||
self.pending_echo = None # {timestamp, channel_idx, pkt_payload}
|
||||
self.echo_counts = {} # pkt_payload -> {paths: set(), timestamp: float, channel_idx: int}
|
||||
self.incoming_paths = {} # pkt_payload -> {path, snr, path_len, timestamp}
|
||||
self.echo_lock = threading.Lock()
|
||||
self.echo_log_path = self.config_dir / f"{device_name}.echoes.jsonl"
|
||||
|
||||
# Load persisted echo data from disk
|
||||
self._load_echoes()
|
||||
|
||||
# Start session
|
||||
self._start_session()
|
||||
@@ -316,7 +321,7 @@ class MeshCLISession:
|
||||
# Try to parse as GRP_TXT echo (for "Heard X repeats" feature)
|
||||
echo_data = self._parse_grp_txt_echo(line)
|
||||
if echo_data:
|
||||
self._process_echo(echo_data[0], echo_data[1])
|
||||
self._process_echo(echo_data)
|
||||
continue
|
||||
|
||||
# Otherwise, append to current CLI response
|
||||
@@ -483,30 +488,43 @@ class MeshCLISession:
|
||||
return False
|
||||
|
||||
def _parse_grp_txt_echo(self, line):
|
||||
"""Parse GRP_TXT JSON echo, return (pkt_payload, path) or None."""
|
||||
"""Parse GRP_TXT JSON echo, return data dict or None."""
|
||||
try:
|
||||
data = json.loads(line)
|
||||
if isinstance(data, dict) and data.get("payload_typename") == "GRP_TXT":
|
||||
return (data.get('pkt_payload'), data.get('path', ''))
|
||||
return {
|
||||
'pkt_payload': data.get('pkt_payload'),
|
||||
'path': data.get('path', ''),
|
||||
'snr': data.get('snr'),
|
||||
'path_len': data.get('path_len'),
|
||||
}
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
pass
|
||||
return None
|
||||
|
||||
def _process_echo(self, pkt_payload, path):
|
||||
"""Process a GRP_TXT echo and track unique paths."""
|
||||
def _process_echo(self, echo_data):
|
||||
"""Process a GRP_TXT echo: track as sent echo or incoming path."""
|
||||
pkt_payload = echo_data.get('pkt_payload')
|
||||
path = echo_data.get('path', '')
|
||||
if not pkt_payload:
|
||||
return
|
||||
|
||||
with self.echo_lock:
|
||||
current_time = time.time()
|
||||
|
||||
# If this pkt_payload is already tracked, add path
|
||||
# If this pkt_payload is already tracked as sent echo, add path
|
||||
if pkt_payload in self.echo_counts:
|
||||
self.echo_counts[pkt_payload]['paths'].add(path)
|
||||
if path not in self.echo_counts[pkt_payload]['paths']:
|
||||
self.echo_counts[pkt_payload]['paths'].add(path)
|
||||
self._save_echo({
|
||||
'type': 'sent_echo', 'pkt_payload': pkt_payload,
|
||||
'path': path, 'msg_ts': self.echo_counts[pkt_payload]['timestamp'],
|
||||
'channel_idx': self.echo_counts[pkt_payload]['channel_idx']
|
||||
})
|
||||
logger.debug(f"Echo: added path {path} to existing payload, total: {len(self.echo_counts[pkt_payload]['paths'])}")
|
||||
return
|
||||
|
||||
# If we have a pending message waiting for correlation
|
||||
# If we have a pending sent message waiting for correlation
|
||||
if self.pending_echo and self.pending_echo.get('pkt_payload') is None:
|
||||
# Check time window (60 seconds)
|
||||
if current_time - self.pending_echo['timestamp'] < 60:
|
||||
@@ -517,7 +535,32 @@ class MeshCLISession:
|
||||
'timestamp': self.pending_echo['timestamp'],
|
||||
'channel_idx': self.pending_echo['channel_idx']
|
||||
}
|
||||
self._save_echo({
|
||||
'type': 'sent_echo', 'pkt_payload': pkt_payload,
|
||||
'path': path, 'msg_ts': self.pending_echo['timestamp'],
|
||||
'channel_idx': self.pending_echo['channel_idx']
|
||||
})
|
||||
logger.info(f"Echo: correlated pkt_payload with sent message, first path: {path}")
|
||||
return
|
||||
|
||||
# Not a sent echo -> store as incoming message path
|
||||
self.incoming_paths[pkt_payload] = {
|
||||
'path': path,
|
||||
'snr': echo_data.get('snr'),
|
||||
'path_len': echo_data.get('path_len'),
|
||||
'timestamp': current_time,
|
||||
}
|
||||
self._save_echo({
|
||||
'type': 'rx_echo', 'pkt_payload': pkt_payload,
|
||||
'path': path, 'snr': echo_data.get('snr'),
|
||||
'path_len': echo_data.get('path_len')
|
||||
})
|
||||
logger.debug(f"Echo: stored incoming path {path} (path_len={echo_data.get('path_len')})")
|
||||
|
||||
# Cleanup old incoming paths (> 1 hour)
|
||||
cutoff = current_time - 3600
|
||||
self.incoming_paths = {k: v for k, v in self.incoming_paths.items()
|
||||
if v['timestamp'] > cutoff}
|
||||
|
||||
def register_pending_echo(self, channel_idx, timestamp):
|
||||
"""Register a sent message for echo tracking."""
|
||||
@@ -543,6 +586,80 @@ class MeshCLISession:
|
||||
return len(data['paths'])
|
||||
return 0
|
||||
|
||||
def _save_echo(self, record):
|
||||
"""Append echo record to .echoes.jsonl file."""
|
||||
try:
|
||||
record['ts'] = time.time()
|
||||
with open(self.echo_log_path, 'a', encoding='utf-8') as f:
|
||||
f.write(json.dumps(record, ensure_ascii=False) + '\n')
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save echo: {e}")
|
||||
|
||||
def _load_echoes(self):
|
||||
"""Load echo data from .echoes.jsonl on startup."""
|
||||
if not self.echo_log_path.exists():
|
||||
return
|
||||
|
||||
cutoff = time.time() - (7 * 24 * 3600) # 7 days
|
||||
kept_lines = []
|
||||
loaded_sent = 0
|
||||
loaded_incoming = 0
|
||||
|
||||
try:
|
||||
with open(self.echo_log_path, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
record = json.loads(line)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
ts = record.get('ts', 0)
|
||||
if ts < cutoff:
|
||||
continue # Skip old records
|
||||
|
||||
kept_lines.append(line)
|
||||
pkt_payload = record.get('pkt_payload')
|
||||
if not pkt_payload:
|
||||
continue
|
||||
|
||||
echo_type = record.get('type')
|
||||
|
||||
if echo_type == 'sent_echo':
|
||||
if pkt_payload in self.echo_counts:
|
||||
# Add path to existing entry
|
||||
path = record.get('path', '')
|
||||
if path:
|
||||
self.echo_counts[pkt_payload]['paths'].add(path)
|
||||
else:
|
||||
self.echo_counts[pkt_payload] = {
|
||||
'paths': {record.get('path', '')},
|
||||
'timestamp': record.get('msg_ts', ts),
|
||||
'channel_idx': record.get('channel_idx', 0)
|
||||
}
|
||||
loaded_sent += 1
|
||||
|
||||
elif echo_type == 'rx_echo':
|
||||
self.incoming_paths[pkt_payload] = {
|
||||
'path': record.get('path', ''),
|
||||
'snr': record.get('snr'),
|
||||
'path_len': record.get('path_len'),
|
||||
'timestamp': ts,
|
||||
}
|
||||
loaded_incoming += 1
|
||||
|
||||
# Rewrite file with only recent records (compact)
|
||||
with open(self.echo_log_path, 'w', encoding='utf-8') as f:
|
||||
for line in kept_lines:
|
||||
f.write(line + '\n')
|
||||
|
||||
logger.info(f"Loaded echoes from disk: {loaded_sent} sent, {loaded_incoming} incoming (kept {len(kept_lines)} records)")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load echoes: {e}")
|
||||
|
||||
def _log_advert(self, json_line):
|
||||
"""Log advert JSON to .jsonl file with timestamp"""
|
||||
try:
|
||||
@@ -1127,16 +1244,20 @@ def register_echo():
|
||||
@app.route('/echo_counts', methods=['GET'])
|
||||
def get_echo_counts():
|
||||
"""
|
||||
Get all echo counts for recent messages.
|
||||
Get echo data for sent and incoming messages.
|
||||
|
||||
Returns echo counts grouped by timestamp and channel, allowing
|
||||
the caller to match with their sent messages.
|
||||
Returns sent echo counts (with repeater paths) and incoming message
|
||||
path info, allowing the caller to match with displayed messages.
|
||||
|
||||
Response JSON:
|
||||
{
|
||||
"success": true,
|
||||
"echo_counts": [
|
||||
{"timestamp": 1706500000.123, "channel_idx": 0, "count": 3},
|
||||
{"timestamp": 1706500000.123, "channel_idx": 0, "count": 3, "paths": ["5e", "d1", "a3"]},
|
||||
...
|
||||
],
|
||||
"incoming_paths": [
|
||||
{"timestamp": 1706500000.456, "path": "8a40a605", "path_len": 4, "snr": 11.0},
|
||||
...
|
||||
]
|
||||
}
|
||||
@@ -1145,15 +1266,29 @@ def get_echo_counts():
|
||||
return jsonify({'success': False, 'error': 'Not initialized'}), 503
|
||||
|
||||
with meshcli_session.echo_lock:
|
||||
result = []
|
||||
sent = []
|
||||
for pkt_payload, data in meshcli_session.echo_counts.items():
|
||||
result.append({
|
||||
sent.append({
|
||||
'timestamp': data['timestamp'],
|
||||
'channel_idx': data['channel_idx'],
|
||||
'count': len(data['paths'])
|
||||
'count': len(data['paths']),
|
||||
'paths': list(data['paths'])
|
||||
})
|
||||
|
||||
return jsonify({'success': True, 'echo_counts': result}), 200
|
||||
incoming = []
|
||||
for pkt_payload, data in meshcli_session.incoming_paths.items():
|
||||
incoming.append({
|
||||
'timestamp': data['timestamp'],
|
||||
'path': data['path'],
|
||||
'path_len': data.get('path_len'),
|
||||
'snr': data.get('snr'),
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'echo_counts': sent,
|
||||
'incoming_paths': incoming
|
||||
}), 200
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
Reference in New Issue
Block a user